def initialize_pandoc(): global PANDOC_OLD global PANDOC_ENGINE global PANDOC_INITIALIZED if PANDOC_INITIALIZED: return PANDOC_VERSION = get_pandoc_version() if PANDOC_VERSION.startswith('1'): PANDOC_OLD = True PANDOC_ENGINE = '--latex-engine=' + daconfig.get( 'pandoc engine', 'pdflatex') else: PANDOC_OLD = False try: msg = subprocess.check_output(['lualatex', '--help'], stderr=subprocess.STDOUT) assert os.path.isfile( '/usr/share/texlive/texmf-dist/tex/luatex/luatexbase/luatexbase.sty' ) lualatex_supported = True except: lualatex_supported = False if lualatex_supported: PANDOC_ENGINE = '--pdf-engine=' + daconfig.get( 'pandoc engine', 'lualatex') else: PANDOC_ENGINE = '--pdf-engine=' + daconfig.get( 'pandoc engine', 'pdflatex') PANDOC_INITIALIZED = True
def create_app(): app = Flask(__name__) app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False from docassemble.base.config import daconfig import docassemble.webapp.database import docassemble.webapp.db_object alchemy_connect_string = docassemble.webapp.database.alchemy_connection_string( ) app.config['SQLALCHEMY_DATABASE_URI'] = alchemy_connect_string if alchemy_connect_string.startswith('postgres'): app.config['SQLALCHEMY_ENGINE_OPTIONS'] = dict( connect_args=docassemble.webapp.database.connect_args()) app.secret_key = daconfig.get('secretkey', '38ihfiFehfoU34mcq_4clirglw3g4o87') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = docassemble.webapp.db_object.init_flask() db.init_app(app) csrf = CSRFProtect() csrf.init_app(app) babel = Babel() babel.init_app(app) if daconfig.get('behind https load balancer', False): if proxyfix_version >= 15: app.wsgi_app = ProxyFix(app.wsgi_app, x_proto=1, x_host=1) else: app.wsgi_app = ProxyFix(app.wsgi_app) if 'cross site domains' in daconfig: CORS(app, origins=daconfig['cross site domains'], supports_credentials=True) return app, csrf, babel
def create_app(): app = Flask(__name__) app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False from docassemble.base.config import daconfig import docassemble.webapp.database import docassemble.webapp.db_object connect_string = docassemble.webapp.database.connection_string() alchemy_connect_string = docassemble.webapp.database.alchemy_connection_string( ) app.config['SQLALCHEMY_DATABASE_URI'] = alchemy_connect_string app.secret_key = daconfig.get('secretkey', '38ihfiFehfoU34mcq_4clirglw3g4o87') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = docassemble.webapp.db_object.init_flask() db.init_app(app) csrf = CSRFProtect() csrf.init_app(app) babel = Babel() babel.init_app(app) if daconfig.get('behind https load balancer', False): if proxyfix_version >= 15: app.wsgi_app = ProxyFix(app.wsgi_app, x_proto=1) else: app.wsgi_app = ProxyFix(app.wsgi_app) return app, csrf, babel
def init(self, *pargs, **kwargs): super(MSGraphConnectionObject, self).init(*pargs, **kwargs) # Default to using Docassemble configuration to retrieve credentials to connect to Microsoft Graph # if not hasattr(self, 'tenant_id'): tenant_id = daconfig.get('microsoft graph', {}).get('tenant id') else: tenant_id = self.tenant_id if not hasattr(self, 'client_id'): client_id = daconfig.get('microsoft graph', {}).get('client id') else: client_id = self.client_id if not hasattr(self, 'client_secret'): client_secret = daconfig.get( 'microsoft graph', {}).get('client secret') else: client_secret = self.client_secret token_url = "https://login.microsoftonline.com/" + tenant_id + "/oauth2/v2.0/token" token_data = { "client_id": client_id, "client_secret": client_secret, "scope": "https://graph.microsoft.com/.default", "grant_type": "client_credentials" } r = requests.post(token_url, data=token_data) self.token = r.json()['access_token'] self.authorization_header = { "Authorization": "Bearer " + self.token }
def populate_tables(): start_time = time.time() sys.stderr.write("populate_tables: starting\n") user_manager = UserManager( SQLAlchemyAdapter(db, UserModel, UserAuthClass=UserAuthModel), app) admin_defaults = daconfig.get('default admin account', dict()) if 'email' not in admin_defaults: admin_defaults['email'] = os.getenv('DA_ADMIN_EMAIL', '*****@*****.**') if 'nickname' not in admin_defaults: admin_defaults['nickname'] = 'admin' if 'first_name' not in admin_defaults: admin_defaults['first_name'] = word('System') if 'last_name' not in admin_defaults: admin_defaults['last_name'] = word('Administrator') if 'password' not in admin_defaults: admin_defaults['password'] = os.getenv('DA_ADMIN_PASSWORD', 'password') cron_defaults = daconfig.get( 'default cron account', { 'nickname': 'cron', 'email': '*****@*****.**', 'first_name': 'Cron', 'last_name': 'User' }) cron_defaults['active'] = False user_role = get_role(db, 'user') admin_role = get_role(db, 'admin') cron_role = get_role(db, 'cron') customer_role = get_role(db, 'customer') developer_role = get_role(db, 'developer') advocate_role = get_role(db, 'advocate') trainer_role = get_role(db, 'trainer') for user in UserModel.query.all(): if len(user.roles) == 0: user.roles.append(user_role) db.session.commit() admin = get_user(db, admin_role, admin_defaults) cron = get_user(db, cron_role, cron_defaults) if admin.confirmed_at is None: admin.confirmed_at = datetime.datetime.now() if cron.confirmed_at is None: cron.confirmed_at = datetime.datetime.now() db.session.commit() add_dependencies(admin.id) git_packages = Package.query.filter_by(type='git') for package in git_packages: if package.name in [ 'docassemble', 'docassemble.base', 'docassemble.webapp', 'docassemble.demo' ]: package.giturl = None package.gitsubdir = None package.type = 'pip' if daconfig.get('stable version', False): package.limitation = '<1.1.0' db.session.commit() sys.stderr.write("populate_tables: ending after " + str(time.time() - start_time) + "\n") return
def populate_tables(): user_manager = UserManager( SQLAlchemyAdapter(db, UserModel, UserAuthClass=UserAuthModel), app) admin_defaults = daconfig.get('default admin account', dict()) if 'email' not in admin_defaults: admin_defaults['email'] = '*****@*****.**' if 'nickname' not in admin_defaults: admin_defaults['nickname'] = 'admin' if 'first_name' not in admin_defaults: admin_defaults['first_name'] = word('System') if 'last_name' not in admin_defaults: admin_defaults['last_name'] = word('Administrator') cron_defaults = daconfig.get( 'default cron account', { 'nickname': 'cron', 'email': '*****@*****.**', 'first_name': 'Cron', 'last_name': 'User' }) cron_defaults['active'] = False user_role = get_role(db, 'user') admin_role = get_role(db, 'admin') cron_role = get_role(db, 'cron') customer_role = get_role(db, 'customer') developer_role = get_role(db, 'developer') advocate_role = get_role(db, 'advocate') trainer_role = get_role(db, 'trainer') admin = get_user(db, admin_role, admin_defaults) cron = get_user(db, cron_role, cron_defaults) if admin.confirmed_at is None: admin.confirmed_at = datetime.datetime.now() if cron.confirmed_at is None: cron.confirmed_at = datetime.datetime.now() db.session.commit() add_dependencies(admin.id) git_packages = Package.query.filter_by(type='git') for package in git_packages: if package.name in [ 'docassemble', 'docassemble.base', 'docassemble.webapp', 'docassemble.demo' ]: package.giturl = None package.gitsubdir = None package.type = 'pip' db.session.commit() # docassemble_git_url = daconfig.get('docassemble git url', 'https://github.com/jhpyle/docassemble') # installed_packages = get_installed_distributions() # existing_packages = [package.name for package in Package.query.all()] # for package in installed_packages: # if package.key in existing_packages: # continue # package_auth = PackageAuth(user_id=admin.id) # if package.key in ['docassemble', 'docassemble.base', 'docassemble.webapp', 'docassemble.demo']: # package_entry = Package(name=package.key, package_auth=package_auth, giturl=docassemble_git_url, packageversion=package.version, gitsubdir=re.sub(r'\.', '_', package.key), type='git', core=True) # else: # package_entry = Package(name=package.key, package_auth=package_auth, packageversion=package.version, type='pip', core=True) # db.session.add(package_auth) # db.session.add(package_entry) return
def url_for(self, **kwargs): if 'ext' in kwargs and kwargs['ext'] is not None: extn = kwargs['ext'] extn = re.sub(r'^\.', '', extn) else: extn = None filename = kwargs.get('filename', self.filename) if cloud is not None: keyname = str(self.section) + '/' + str( self.file_number) + '/' + str(filename) page = kwargs.get('page', None) if page: size = kwargs.get('size', 'page') page = re.sub(r'[^0-9]', '', page) if size == 'screen': keyname += 'screen-' + str(page) + '.png' else: keyname += 'page-' + str(page) + '.png' elif extn: keyname += '.' + extn key = cloud.get_key(keyname) if key.exists(): if 'display_filename' in kwargs: return (key.generate_url( 3600, display_filename=kwargs['display_filename'])) else: return (key.generate_url(3600)) else: return ('about:blank') else: if extn is None: extn = '' else: extn = '.' + extn root = daconfig.get('root', '/') fileroot = daconfig.get('fileserver', root) if 'display_filename' in kwargs: filename = kwargs['display_filename'] if self.section == 'files': if 'page' in kwargs and kwargs['page']: page = re.sub(r'[^0-9]', '', str(kwargs['page'])) size = kwargs.get('size', 'page') url = fileroot + 'uploadedpage' if size == 'screen': url += 'screen' url += '/' + str(self.file_number) + '/' + str(page) else: if re.search(r'\.', str(filename)): url = fileroot + 'uploadedfile/' + str( self.file_number) + '/' + str(filename) elif extn != '': url = fileroot + 'uploadedfile/' + str( self.file_number) + '/' + str(filename) + extn else: url = fileroot + 'uploadedfile/' + str( self.file_number) else: url = 'about:blank' return (url)
def ls_submit_online_intake(params, task=None): """Looks in config for legal server key, subkeys servername, username, and password then calls _ls_submit_online_intake with those values""" servername = daconfig.get('legal server', {}).get('servername') username = daconfig.get('legal server', {}).get('username') password = daconfig.get('legal server', {}).get('password') return _ls_submit_online_intake(servername, username, password, params, task=task)
def create_app(): app = Flask(__name__) app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False from docassemble.base.config import daconfig import docassemble.webapp.database connect_string = docassemble.webapp.database.connection_string() alchemy_connect_string = docassemble.webapp.database.alchemy_connection_string( ) #app.config['SQLALCHEMY_DATABASE_URI'] = alchemy_connect_string app.secret_key = daconfig.get('secretkey', '38ihfiFehfoU34mcq_4clirglw3g4o87') #app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False #db = SQLAlchemy(app) if alchemy_connect_string.startswith('postgresql'): connect_args = docassemble.webapp.database.connect_args() db = sqlalchemy.create_engine( alchemy_connect_string, connect_args=connect_args, pool_pre_ping=docassemble.webapp.database.pool_pre_ping) else: db = sqlalchemy.create_engine( alchemy_connect_string, pool_pre_ping=docassemble.webapp.database.pool_pre_ping) Base = declarative_base() Base.metadata.bind = db #app.wsgi_app = ProxyFix(app.wsgi_app) db.Model = Base db.Column = sqlalchemy.Column db.Integer = sqlalchemy.Integer db.String = sqlalchemy.String db.Index = sqlalchemy.Index db.Boolean = sqlalchemy.Boolean db.Text = sqlalchemy.Text db.DateTime = sqlalchemy.DateTime db.func = sqlalchemy.func db.relationship = relationship db.backref = backref db.ForeignKey = sqlalchemy.ForeignKey docassemble.webapp.db_object.db = db #import flask_login docassemble.webapp.db_object.UserMixin = object if 'cross site domains' in daconfig and isinstance( daconfig['cross site domains'], list) and len(daconfig['cross site domains']) > 0: origins = daconfig['cross site domains'] else: origins = [daconfig.get('url root', '*')] socketio = SocketIO(app, async_mode='eventlet', verify=False, logger=True, engineio_logger=True, cors_allowed_origins=origins) return app, db, socketio
def populate_tables(): user_manager = UserManager(SQLAlchemyAdapter(db, UserModel, UserAuthClass=UserAuthModel), app) admin_defaults = daconfig.get('default admin account', dict()) if 'email' not in admin_defaults: admin_defaults['email'] = '*****@*****.**' if 'nickname' not in admin_defaults: admin_defaults['nickname'] = 'admin' if 'first_name' not in admin_defaults: admin_defaults['first_name'] = word('System') if 'last_name' not in admin_defaults: admin_defaults['last_name'] = word('Administrator') cron_defaults = daconfig.get('default cron account', {'nickname': 'cron', 'email': '*****@*****.**', 'first_name': 'Cron', 'last_name': 'User'}) cron_defaults['active'] = False user_role = get_role(db, 'user') admin_role = get_role(db, 'admin') cron_role = get_role(db, 'cron') customer_role = get_role(db, 'customer') developer_role = get_role(db, 'developer') advocate_role = get_role(db, 'advocate') trainer_role = get_role(db, 'trainer') for user in UserModel.query.all(): if len(user.roles) == 0: user.roles.append(user_role) db.session.commit() admin = get_user(db, admin_role, admin_defaults) cron = get_user(db, cron_role, cron_defaults) if admin.confirmed_at is None: admin.confirmed_at = datetime.datetime.now() if cron.confirmed_at is None: cron.confirmed_at = datetime.datetime.now() db.session.commit() add_dependencies(admin.id) git_packages = Package.query.filter_by(type='git') for package in git_packages: if package.name in ['docassemble', 'docassemble.base', 'docassemble.webapp', 'docassemble.demo']: package.giturl = None package.gitsubdir = None package.type = 'pip' db.session.commit() # docassemble_git_url = daconfig.get('docassemble git url', 'https://github.com/jhpyle/docassemble') # installed_packages = get_installed_distributions() # existing_packages = [package.name for package in Package.query.all()] # for package in installed_packages: # if package.key in existing_packages: # continue # package_auth = PackageAuth(user_id=admin.id) # if package.key in ['docassemble', 'docassemble.base', 'docassemble.webapp', 'docassemble.demo']: # package_entry = Package(name=package.key, package_auth=package_auth, giturl=docassemble_git_url, packageversion=package.version, gitsubdir=re.sub(r'\.', '_', package.key), type='git', core=True) # else: # package_entry = Package(name=package.key, package_auth=package_auth, packageversion=package.version, type='pip', core=True) # db.session.add(package_auth) # db.session.add(package_entry) return
def main(): #import docassemble.webapp.database start_time = time.time() from docassemble.webapp.app_object import app with app.app_context(): from docassemble.webapp.db_object import db from docassemble.webapp.packages.models import Package from sqlalchemy import select #app.config['SQLALCHEMY_DATABASE_URI'] = docassemble.webapp.database.alchemy_connection_string() if mode == 'initialize': sys.stderr.write("update: updating with mode initialize after " + str(time.time() - start_time) + " seconds\n") update_versions(start_time=start_time) any_package = db.session.execute( select(Package).filter_by(active=True)).first() if any_package is None: add_dependencies(1, start_time=start_time) update_versions(start_time=start_time) check_for_updates(start_time=start_time, invalidate_cache=False) remove_inactive_hosts(start_time=start_time) else: sys.stderr.write( "update: updating with mode check_for_updates after " + str(time.time() - start_time) + " seconds\n") check_for_updates(start_time=start_time) if USING_SUPERVISOR: SUPERVISORCTL = daconfig.get('supervisorctl', 'supervisorctl') container_role = ':' + os.environ.get('CONTAINERROLE', '') + ':' if re.search(r':(web|celery|all):', container_role): sys.stderr.write("update: sending reset signal after " + str(time.time() - start_time) + " seconds\n") args = [ SUPERVISORCTL, '-s', 'http://localhost:9001', 'start', 'reset' ] subprocess.run(args, check=False) else: sys.stderr.write( "update: not sending reset signal because not web or celery after " + str(time.time() - start_time) + " seconds\n") else: sys.stderr.write("update: touched wsgi file after " + str(time.time() - start_time) + " seconds\n") wsgi_file = daconfig.get( 'webapp', '/usr/share/docassemble/webapp/docassemble.wsgi') if os.path.isfile(wsgi_file): with open(wsgi_file, 'a', encoding='utf-8'): os.utime(wsgi_file, None) db.engine.dispose() sys.exit(0)
def temp_url_for(self, **kwargs): filename = kwargs.get('filename', self.filename) seconds = kwargs.get('seconds', None) if type(seconds) is float: seconds = int(seconds) if type(seconds) is not int: seconds = 30 if cloud is not None and daconfig.get('use cloud urls', False): keyname = str(self.section) + '/' + str(self.file_number) + '/' + path_to_key(filename) key = cloud.get_key(keyname) if key.does_exist: return key.generate_url(seconds, display_filename=kwargs.get('display_filename', None), inline=kwargs.get('inline', None), content_type=kwargs.get('content_type', None)) else: sys.stderr.write("key " + str(keyname) + " did not exist\n") return('about:blank') r = docassemble.base.functions.server.server_redis while True: code = random_alphanumeric(32) keyname = 'da:tempfile:' + code if r.setnx(keyname, str(self.section) + '^' + str(self.file_number)): r.expire(keyname, seconds) break use_external = kwargs.get('_external', True if 'jsembed' in docassemble.base.functions.this_thread.misc else False) from flask import url_for url = url_for('rootindex', _external=use_external).rstrip('/') url += '/tempfile/' + code + '/' + path_to_key(kwargs.get('display_filename', filename)) return(url)
def main(): container_role = ':' + os.environ.get('CONTAINERROLE', '') + ':' if ':all:' in container_role or ':cron:' in container_role: (redis_host, redis_port, redis_username, redis_password, redis_offset, redis_cli, ssl_opts) = parse_redis_uri() r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_offset, password=redis_password, username=redis_username, **ssl_opts) if r.get('da:skip_create_tables'): sys.stderr.write("restart: skipping create_tables\n") r.delete('da:skip_create_tables') else: import docassemble.webapp.create_tables docassemble.webapp.create_tables.main() if ':cron:' in container_role: r.delete('da:cron_restart') webapp_path = daconfig.get('webapp', '/usr/share/docassemble/webapp/docassemble.wsgi') cloud = get_cloud() if cloud is not None: key = cloud.get_key('config.yml') if key.does_exist: key.get_contents_to_filename(daconfig['config file']) sys.stderr.write("Wrote config file based on copy on cloud\n") wsgi_file = webapp_path if os.path.isfile(wsgi_file): with open(wsgi_file, 'a', encoding='utf-8'): os.utime(wsgi_file, None) sys.stderr.write("Restarted.\n") sys.exit(0)
def get_home_page_dict(): from docassemble.base.config import daconfig PACKAGE_DIRECTORY = daconfig.get( 'packages', '/usr/share/docassemble/local' + str(sys.version_info.major) + '.' + str(sys.version_info.minor)) FULL_PACKAGE_DIRECTORY = os.path.join( PACKAGE_DIRECTORY, 'lib', 'python' + str(sys.version_info.major) + '.' + str(sys.version_info.minor), 'site-packages') home_page = dict() for d in os.listdir(FULL_PACKAGE_DIRECTORY): if not d.startswith('docassemble.'): continue metadata_path = os.path.join(d, 'METADATA') if os.path.isfile(metadata_path): name = None url = None with open(metadata_path, 'r', encoding='utf-8') as fp: for line in fp: if line.startswith('Name: '): name = line[6:] elif line.startswith('Home-page: '): url = line[11:].rstrip('/') break if name: home_page[name.lower()] = url return home_page
def da_unique_email_validator(form, field): if daconfig['ldap login'].get( 'enable', False) and daconfig['ldap login'].get( 'base dn', None) is not None and daconfig['ldap login'].get( 'bind email', None) is not None and daconfig['ldap login'].get( 'bind password', None) is not None: ldap_server = daconfig['ldap login'].get('server', 'localhost').strip() base_dn = daconfig['ldap login']['base dn'].strip() search_filter = daconfig['ldap login'].get( 'search pattern', "mail=%s") % (form.email.data, ) connect = ldap.initialize('ldap://' + ldap_server) try: connect.simple_bind_s(daconfig['ldap login']['bind email'], daconfig['ldap login']['bind password']) if len(connect.search_s(base_dn, ldap.SCOPE_SUBTREE, search_filter)) > 0: raise ValidationError( word( "This Email is already in use. Please try another one." )) except ldap.LDAPError: pass if daconfig.get('confirm registration', False): return True return unique_email_validator(form, field)
def create_app(): app = Flask(__name__) app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False from docassemble.base.config import daconfig import docassemble.webapp.database connect_string = docassemble.webapp.database.connection_string() alchemy_connect_string = docassemble.webapp.database.alchemy_connection_string() #app.config['SQLALCHEMY_DATABASE_URI'] = alchemy_connect_string app.secret_key = daconfig.get('secretkey', '38ihfiFehfoU34mcq_4clirglw3g4o87') #app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False #db = SQLAlchemy(app) db = sqlalchemy.create_engine(alchemy_connect_string) Base = declarative_base() Base.metadata.bind = db #app.wsgi_app = ProxyFix(app.wsgi_app) db.Model = Base db.Column = sqlalchemy.Column db.Integer = sqlalchemy.Integer db.String = sqlalchemy.String db.Index = sqlalchemy.Index db.Boolean = sqlalchemy.Boolean db.Text = sqlalchemy.Text db.DateTime = sqlalchemy.DateTime db.func = sqlalchemy.func db.relationship = relationship db.backref = backref db.ForeignKey = sqlalchemy.ForeignKey docassemble.webapp.db_object.db = db #import flask_login docassemble.webapp.db_object.UserMixin = object socketio = SocketIO(app, async_mode='eventlet', verify=False, cors_allowed_origins=[daconfig.get('url root', '*')]) return app, db, socketio
def main(): from docassemble.base.config import daconfig container_role = ':' + os.environ.get('CONTAINERROLE', '') + ':' if ':all:' in container_role or ':cron:' in container_role: import docassemble.webapp.create_tables docassemble.webapp.create_tables.main() if ':cron:' in container_role: import redis (redis_host, redis_port, redis_password, redis_offset, redis_cli) = docassemble.base.config.parse_redis_uri() r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_offset, password=redis_password) r.delete('da:cron_restart') webapp_path = daconfig.get( 'webapp', '/usr/share/docassemble/webapp/docassemble.wsgi') import docassemble.webapp.cloud cloud = docassemble.webapp.cloud.get_cloud() if cloud is not None: key = cloud.get_key('config.yml') if key.does_exist: key.get_contents_to_filename(daconfig['config file']) sys.stderr.write("Wrote config file based on copy on cloud\n") wsgi_file = webapp_path if os.path.isfile(wsgi_file): with open(wsgi_file, 'a'): os.utime(wsgi_file, None) sys.stderr.write("Restarted.\n") sys.exit(0)
def temp_url_for(self, **kwargs): if kwargs.get('_attachment', False): suffix = 'download' else: suffix = '' filename = kwargs.get('filename', self.filename) seconds = kwargs.get('seconds', None) if isinstance(seconds, float): seconds = int(seconds) if not isinstance(seconds, int): seconds = 30 if cloud is not None and daconfig.get('use cloud urls', False): keyname = str(self.section) + '/' + str(self.file_number) + '/' + path_to_key(filename) key = cloud.get_key(keyname) inline = not bool(kwargs.get('_attachment', False)) if key.does_exist: return key.generate_url(seconds, display_filename=kwargs.get('display_filename', None), inline=inline, content_type=kwargs.get('content_type', None)) sys.stderr.write("key " + str(keyname) + " did not exist\n") return 'about:blank' r = docassemble.base.functions.server.server_redis while True: code = random_alphanumeric(32) keyname = 'da:tempfile:' + code if r.setnx(keyname, str(self.section) + '^' + str(self.file_number)): r.expire(keyname, seconds) break use_external = kwargs.get('_external', bool('jsembed' in docassemble.base.functions.this_thread.misc)) url = url_for('rootindex', _external=use_external).rstrip('/') url += '/tempfile' + suffix + '/' + code + '/' + path_to_key(kwargs.get('display_filename', filename)) return url
def main(): with app.app_context(): if daconfig.get('use alembic', True): packagedir = pkg_resources.resource_filename( pkg_resources.Requirement.parse('docassemble.webapp'), 'docassemble/webapp') if not os.path.isdir(packagedir): sys.exit("path for running alembic could not be found") from alembic.config import Config from alembic import command alembic_cfg = Config(os.path.join(packagedir, 'alembic.ini')) alembic_cfg.set_main_option("sqlalchemy.url", alchemy_connection_string()) alembic_cfg.set_main_option("script_location", os.path.join(packagedir, 'alembic')) if not db.engine.has_table(dbtableprefix + 'alembic_version'): command.stamp(alembic_cfg, "head") if db.engine.has_table(dbtableprefix + 'user'): command.upgrade(alembic_cfg, "head") #db.drop_all() try: sys.stderr.write("Trying to create tables\n") db.create_all() except: sys.stderr.write( "Error trying to create tables; trying a second time.\n") try: db.create_all() except: sys.stderr.write( "Error trying to create tables; trying a third time.\n") db.create_all() populate_tables() db.engine.dispose()
def add_dependencies(user_id): #sys.stderr.write('add_dependencies: user_id is ' + str(user_id) + "\n") sys.stderr.write("add_dependencies: starting\n") from docassemble.base.config import hostname, daconfig docassemble_git_url = daconfig.get( 'docassemble git url', 'https://github.com/jhpyle/docassemble') package_by_name = dict() for package in Package.query.filter_by(active=True).order_by( Package.name, Package.id.desc()).all(): if package.name in package_by_name: continue package_by_name[package.name] = package installed_packages = get_installed_distributions() for package in installed_packages: if package.key in package_by_name: continue pip_info = get_pip_info(package.key) sys.stderr.write("Home page of " + str(package.key) + " is " + str(pip_info['Home-page']) + "\n") Package.query.filter_by(name=package.key).delete() db.session.commit() package_auth = PackageAuth(user_id=user_id) if package.key in [ 'docassemble', 'docassemble.base', 'docassemble.webapp', 'docassemble.demo' ]: package_entry = Package(name=package.key, package_auth=package_auth, giturl=docassemble_git_url, packageversion=package.version, gitsubdir=re.sub(r'\.', '-', package.key), type='git', core=True) else: if pip_info['Home-page'] is not None and re.search( r'/github.com/', pip_info['Home-page']): package_entry = Package(name=package.key, package_auth=package_auth, type='git', giturl=pip_info['Home-page'], packageversion=package.version, dependency=True) else: package_entry = Package(name=package.key, package_auth=package_auth, type='pip', packageversion=package.version, dependency=True) db.session.add(package_auth) db.session.add(package_entry) db.session.commit() install = Install(hostname=hostname, packageversion=package_entry.packageversion, version=package_entry.version, package_id=package_entry.id) db.session.add(install) db.session.commit() sys.stderr.write("add_dependencies: ending\n") return
def main(): webapp_path = daconfig.get('webapp', '/usr/share/docassemble/webapp/docassemble.wsgi') wsgi_file = webapp_path if os.path.isfile(wsgi_file): with open(wsgi_file, 'a', encoding='utf-8'): os.utime(wsgi_file, None) logmessage("Restarted WSGI.\n") sys.exit(0)
def install_package(package): sys.stderr.write("install_package: " + package.name + "\n") if package.type == 'zip' and package.upload is None: return 0, '' sys.stderr.write('install_package: ' + package.name + "\n") from docassemble.base.config import daconfig PACKAGE_DIRECTORY = daconfig.get('packages', '/usr/share/docassemble/local') logfilecontents = '' #pip.utils.logging._log_state = threading.local() #pip.utils.logging._log_state.indentation = 0 pip_log = tempfile.NamedTemporaryFile() temp_dir = tempfile.mkdtemp() if package.type == 'zip' and package.upload is not None: saved_file = SavedFile(package.upload, extension='zip', fix=True) # with zipfile.ZipFile(saved_file.path + '.zip', mode='r') as zf: # for zinfo in zf.infolist(): # parts = splitall(zinfo.filename) # if parts[-1] == 'setup.py': commands = ['pip', 'install', '--quiet', '--process-dependency-links', '--allow-all-external', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--log-file=' + pip_log.name, '--upgrade', saved_file.path + '.zip'] elif package.type == 'git' and package.giturl is not None: if package.gitsubdir is not None: commands = ['pip', 'install', '--quiet', '--process-dependency-links', '--allow-all-external', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, 'git+' + str(package.giturl) + '.git#egg=' + package.name + '&subdirectory=' + str(package.gitsubdir)] else: commands = ['pip', 'install', '--quiet', '--process-dependency-links', '--allow-all-external', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, 'git+' + str(package.giturl) + '.git#egg=' + package.name] elif package.type == 'pip': if package.limitation is None: limit = "" else: limit = str(package.limitation) commands = ['pip', 'install', '--quiet', '--process-dependency-links', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, package.name + limit] else: sys.stderr.write("Wrong package type\n") return 1, 'Unable to recognize package type: ' + package.name sys.stderr.write("install_package: running " + " ".join(commands) + "\n") logfilecontents += " ".join(commands) + "\n" returnval = 1 try: subprocess.call(commands) returnval = 0 except subprocess.CalledProcessError as err: returnval = err.returncode sys.stderr.flush() sys.stdout.flush() time.sleep(1) with open(pip_log.name, 'rU') as x: logfilecontents += x.read().decode('utf8') pip_log.close() try: sys.stderr.write(logfilecontents + "\n") except: pass sys.stderr.flush() sys.stdout.flush() time.sleep(1) sys.stderr.write('returnval is: ' + str(returnval) + "\n") sys.stderr.write('install_package: done' + "\n") shutil.rmtree(temp_dir) return returnval, logfilecontents
def main(): from docassemble.base.config import daconfig webapp_path = daconfig.get('webapp', '/usr/share/docassemble/webapp/docassemble.wsgi') wsgi_file = webapp_path if os.path.isfile(wsgi_file): with open(wsgi_file, 'a'): os.utime(wsgi_file, None) sys.stderr.write("Restarted WSGI.\n") sys.exit(0)
def cloudconvert_to_pdf(in_format, from_file, to_file, pdfa, password): headers = {"Authorization": "Bearer " + daconfig.get('cloudconvert secret').strip()} data = { "tasks": { "import-1": { "operation": "import/upload" }, "task-1": { "operation": "convert", "input_format": in_format, "output_format": "pdf", "engine": "office", "input": [ "import-1" ], "optimize_print": True, "pdf_a": pdfa, "filename": "myoutput.docx" }, "export-1": { "operation": "export/url", "input": [ "task-1" ], "inline": False, "archive_multiple_files": False } } } if password: data['tasks']['task-1']['password'] = password r = requests.post("https://api.cloudconvert.com/v2/jobs", json=data, headers=headers) resp = r.json() if 'data' not in resp: logmessage("cloudconvert_to_pdf: create job returned " + repr(r.text)) raise Exception("cloudconvert_to_pdf: failed to create job") uploaded = False for task in resp['data']['tasks']: if task['name'] == 'import-1': r = requests.post(task['result']['form']['url'], data=task['result']['form']['parameters'], files={'file': open(from_file, 'rb')}) uploaded = True if not uploaded: raise Exception("cloudconvert_to_pdf: failed to upload") r = requests.get("https://api.cloudconvert.com/v2/jobs/%s/wait" % (resp['data']['id'],), headers=headers, timeout=60) wait_resp = r.json() if 'data' not in wait_resp: logmessage("cloudconvert_to_pdf: wait returned " + repr(r.text)) raise Exception("Failed to wait on job") ok = False for task in wait_resp['data']['tasks']: if task['operation'] == "export/url": for file_result in task['result']['files']: urllib.request.urlretrieve(file_result['url'], to_file) ok = True if not ok: raise Exception("cloudconvert failed")
def cloud_custom(provider, config): config_id = str(provider) + str(config) if config_id in cloud_cache: return cloud_cache[config_id] the_config = daconfig.get(config, None) if the_config is None or type(the_config) is not dict: logmessage("cloud_custom: invalid cloud configuration") return None cloud_cache[config_id] = docassemble.webapp.cloud.get_custom_cloud(provider, the_config) return cloud_cache[config_id]
def install_package(package): sys.stderr.write("install_package: " + package.name + "\n") if package.type == 'zip' and package.upload is None: return 0, '' sys.stderr.write('install_package: ' + package.name + "\n") from docassemble.base.config import daconfig PACKAGE_DIRECTORY = daconfig.get('packages', '/usr/share/docassemble/local') logfilecontents = '' #pip.utils.logging._log_state = threading.local() #pip.utils.logging._log_state.indentation = 0 pip_log = tempfile.NamedTemporaryFile() temp_dir = tempfile.mkdtemp() if package.type == 'zip' and package.upload is not None: saved_file = SavedFile(package.upload, extension='zip', fix=True) commands = [ 'pip', 'install', '--quiet', '--process-dependency-links', '--allow-all-external', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--log-file=' + pip_log.name, '--upgrade', saved_file.path + '.zip' ] elif package.type == 'git' and package.giturl is not None: commands = [ 'pip', 'install', '--quiet', '--process-dependency-links', '--allow-all-external', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, 'git+' + package.giturl + '.git#egg=' + package.name ] elif package.type == 'pip': if package.limitation is None: limit = "" else: limit = str(package.limitation) commands = [ 'pip', 'install', '--quiet', '--process-dependency-links', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, package.name + limit ] else: sys.stderr.write("Wrong package type\n") return 1, 'Unable to recognize package type: ' + package.name sys.stderr.write("install_package: running " + " ".join(commands) + "\n") logfilecontents += " ".join(commands) + "\n" #returnval = pip.main(commands) try: subprocess.call(commands) returnval = 0 except subprocess.CalledProcessError as err: returnval = err.returncode with open(pip_log.name, 'rU') as x: logfilecontents += x.read().decode('utf8') sys.stderr.write(logfilecontents + "\n") sys.stderr.write('install_package: done' + "\n") shutil.rmtree(temp_dir) return returnval, logfilecontents
def initialize_pandoc(): if PANDOC_INITIALIZED: return global PANDOC_OLD global PANDOC_ENGINE global PANDOC_INITIALIZED PANDOC_VERSION = get_pandoc_version() if PANDOC_VERSION.startswith('1'): PANDOC_OLD = True PANDOC_ENGINE = '--latex-engine=' + daconfig.get('pandoc engine', 'pdflatex') else: PANDOC_OLD = False try: msg = subprocess.check_output(['lualatex', '--help'], stderr=subprocess.STDOUT) assert os.path.isfile('/usr/share/texlive/texmf-dist/tex/luatex/luatexbase/luatexbase.sty') lualatex_supported = True except: lualatex_supported = False if lualatex_supported: PANDOC_ENGINE = '--pdf-engine=' + daconfig.get('pandoc engine', 'lualatex') else: PANDOC_ENGINE = '--pdf-engine=' + daconfig.get('pandoc engine', 'pdflatex') PANDOC_INITIALIZED = True
def create_app(): app = Flask(__name__) app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False from docassemble.base.config import daconfig import docassemble.webapp.database connect_string = docassemble.webapp.database.connection_string() alchemy_connect_string = docassemble.webapp.database.alchemy_connection_string() app.config['SQLALCHEMY_DATABASE_URI'] = alchemy_connect_string app.secret_key = daconfig.get('secretkey', '38ihfiFehfoU34mcq_4clirglw3g4o87') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app) #app.wsgi_app = ProxyFix(app.wsgi_app) socketio = SocketIO(app, async_mode='eventlet', verify=False) return app, db, socketio
def create_app(): app = Flask(__name__) app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False from docassemble.base.config import daconfig import docassemble.webapp.database import docassemble.webapp.db_object connect_string = docassemble.webapp.database.connection_string() alchemy_connect_string = docassemble.webapp.database.alchemy_connection_string() app.config['SQLALCHEMY_DATABASE_URI'] = alchemy_connect_string app.secret_key = daconfig.get('secretkey', '38ihfiFehfoU34mcq_4clirglw3g4o87') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = docassemble.webapp.db_object.init_flask() db.init_app(app) csrf = CSRFProtect() csrf.init_app(app) babel = Babel() babel.init_app(app) if daconfig.get('behind https load balancer', False): if proxyfix_version >= 15: app.wsgi_app = ProxyFix(app.wsgi_app, x_proto=1, x_host=1) else: app.wsgi_app = ProxyFix(app.wsgi_app) return app, csrf, babel
def concatenate_files(path_list, pdfa=False, password=None): pdf_file = tempfile.NamedTemporaryFile(prefix="datemp", mode="wb", suffix=".pdf", delete=False) subprocess_arguments = [PDFTK_PATH] new_path_list = list() for path in path_list: mimetype, encoding = mimetypes.guess_type(path) if mimetype.startswith('image'): new_pdf_file = tempfile.NamedTemporaryFile(prefix="datemp", mode="wb", suffix=".pdf", delete=False) args = [daconfig.get('imagemagick', 'convert'), path, new_pdf_file.name] try: result = subprocess.run(args, timeout=60).returncode except subprocess.TimeoutExpired: logmessage("concatenate_files: convert took too long") result = 1 if result != 0: logmessage("failed to convert image to PDF: " + " ".join(args)) continue new_path_list.append(new_pdf_file.name) elif mimetype in ('application/rtf', 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', 'application/msword', 'application/vnd.oasis.opendocument.text'): new_pdf_file = tempfile.NamedTemporaryFile(prefix="datemp", mode="wb", suffix=".pdf", delete=False) if mimetype == 'application/rtf': ext = 'rtf' elif mimetype == 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': ext = 'docx' elif mimetype == 'application/msword': ext = 'doc' elif mimetype == 'application/vnd.oasis.opendocument.text': ext = 'odt' word_to_pdf(path, ext, new_pdf_file.name, pdfa=False) new_path_list.append(new_pdf_file.name) elif mimetype == 'application/pdf': new_path_list.append(path) if len(new_path_list) == 0: raise DAError("concatenate_files: no valid files to concatenate") subprocess_arguments.extend(new_path_list) subprocess_arguments.extend(['cat', 'output', pdf_file.name]) #logmessage("Arguments are " + str(subprocess_arguments)) try: result = subprocess.run(subprocess_arguments, timeout=60).returncode except subprocess.TimeoutExpired: result = 1 logmessage("concatenate_files: call to cat took too long") if result != 0: logmessage("Failed to concatenate PDF files") raise DAError("Call to pdftk failed for concatenation where arguments were " + " ".join(subprocess_arguments)) if pdfa: pdf_to_pdfa(pdf_file.name) replicate_js_and_calculations(new_path_list[0], pdf_file.name, password) return pdf_file.name
def main(): with app.app_context(): if daconfig.get('use alembic', True): packagedir = pkg_resources.resource_filename(pkg_resources.Requirement.parse('docassemble.webapp'), 'docassemble/webapp') if not os.path.isdir(packagedir): sys.exit("path for running alembic could not be found") from alembic.config import Config from alembic import command alembic_cfg = Config(os.path.join(packagedir, 'alembic.ini')) alembic_cfg.set_main_option("sqlalchemy.url", alchemy_connection_string()) alembic_cfg.set_main_option("script_location", os.path.join(packagedir, 'alembic')) if not db.engine.has_table(dbtableprefix + 'alembic_version'): command.stamp(alembic_cfg, "head") if db.engine.has_table(dbtableprefix + 'user'): command.upgrade(alembic_cfg, "head") #db.drop_all() db.create_all() populate_tables() db.engine.dispose()
def get_installed_distributions(): sys.stderr.write("get_installed_distributions: starting\n") from docassemble.base.config import daconfig PACKAGE_DIRECTORY = daconfig.get('packages', '/usr/share/docassemble/local') results = list() try: output = subprocess.check_output(['pip', 'freeze']) except subprocess.CalledProcessError as err: output = err.output # old_stdout = sys.stdout # old_stderr = sys.stderr # sys.stdout = saved_stdout = StringIO() # pip.main(['freeze']) # sys.stdout = old_stdout # output = saved_stdout.getvalue() for line in output.split('\n'): a = line.split("==") if len(a) == 2: results.append(Object(key=a[0], version=a[1])) # sys.stderr = old_stderr sys.stderr.write("get_installed_distributions: ending\n") return results
def main(): from docassemble.base.config import daconfig container_role = os.environ.get('CONTAINERROLE', None) if container_role and re.search(r':(all|cron):', container_role): import docassemble.webapp.fix_postgresql_tables docassemble.webapp.fix_postgresql_tables.main() import docassemble.webapp.create_tables docassemble.webapp.create_tables.main() webapp_path = daconfig.get('webapp', '/usr/share/docassemble/webapp/docassemble.wsgi') import docassemble.webapp.cloud cloud = docassemble.webapp.cloud.get_cloud() if cloud is not None: key = cloud.get_key('config.yml') if key.does_exist: key.get_contents_to_filename(daconfig['config file']) sys.stderr.write("Wrote config file based on copy on cloud\n") wsgi_file = webapp_path if os.path.isfile(wsgi_file): with open(wsgi_file, 'a'): os.utime(wsgi_file, None) sys.stderr.write("Restarted.\n") sys.exit(0)
def main(): from docassemble.base.config import daconfig container_role = os.environ.get('CONTAINERROLE', None) if container_role and re.search(r':(all|cron):', container_role): import docassemble.webapp.fix_postgresql_tables docassemble.webapp.fix_postgresql_tables.main() import docassemble.webapp.create_tables docassemble.webapp.create_tables.main() webapp_path = daconfig.get( 'webapp', '/usr/share/docassemble/webapp/docassemble.wsgi') import docassemble.webapp.cloud cloud = docassemble.webapp.cloud.get_cloud() if cloud is not None: key = cloud.get_key('config.yml') if key.does_exist: key.get_contents_to_filename(daconfig['config file']) sys.stderr.write("Wrote config file based on copy on cloud\n") wsgi_file = webapp_path if os.path.isfile(wsgi_file): with open(wsgi_file, 'a'): os.utime(wsgi_file, None) sys.exit(0)
def connect_args(db_config): if db_config not in daconfig or (not isinstance( daconfig[db_config], dict)) or 'name' not in daconfig[db_config]: raise Exception("connect_args: missing or invalid configuration for " + db_config) alchemy_connect_args = {} dbprefix = daconfig[db_config].get('prefix', 'postgresql+psycopg2://') if dbprefix.startswith('postgres'): ssl_mode = daconfig[db_config].get('ssl mode', None) if ssl_mode in ('disable', 'allow', 'prefer', 'require', 'verify-ca', 'verify-full'): alchemy_connect_args['sslmode'] = ssl_mode for local_parameter, postgres_parameter in (('ssl cert', 'sslcert'), ('ssl key', 'sslkey'), ('ssl root cert', 'sslrootcert')): filename = daconfig[db_config].get(local_parameter, None) if isinstance(filename, str): cert_file = os.path.join( daconfig.get('web server certificate directory', '/var/www/.certs'), filename) if os.path.isfile(cert_file): alchemy_connect_args[postgres_parameter] = cert_file return alchemy_connect_args
import redis import re from docassemble.base.config import daconfig redis_host = daconfig.get('redis', None) if redis_host is None: redis_host = 'redis://localhost' redis_host = redis_host.strip() redis_host = re.sub(r'^redis://', r'', redis_host) m = re.search(r':([0-9]+)$', redis_host) if m: redis_port = m.group(1) redis_host = re.sub(r':([0-9]+)$', '', redis_host) else: redis_port = '6379' redis_offset = daconfig.get('redis database offset', 0) r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_offset) r_store = redis.StrictRedis(host=redis_host, port=redis_port, db=1 + redis_offset) r_user = redis.StrictRedis(host=redis_host, port=redis_port, db=2 + redis_offset) # def clear_user_cache(user_id=None): # if user_id is None: # keys_to_delete = [y.decode() for y in r.keys('da:usercache:*')] # for key in keys_to_delete: # r.delete(key) # else: # r.delete('da:usercache:' + str(user_id))
for line in output.split('\n'): #sys.stderr.write("Found line " + str(line) + "\n") a = line.split(": ") if len(a) == 2: #sys.stderr.write("Found " + a[0] + " which was " + a[1] + "\n") results[a[0]] = a[1] for key in ['Name', 'Home-page', 'Version']: if key not in results: results[key] = None return results if __name__ == "__main__": #import docassemble.webapp.database with app.app_context(): #app.config['SQLALCHEMY_DATABASE_URI'] = docassemble.webapp.database.alchemy_connection_string() update_versions() any_package = Package.query.filter_by(active=True).first() if any_package is None: add_dependencies(1) update_versions() check_for_updates(doing_startup=True) remove_inactive_hosts() from docassemble.base.config import daconfig sys.stderr.write("update: touched wsgi file" + "\n") wsgi_file = daconfig.get('webapp', '/usr/share/docassemble/webapp/docassemble.wsgi') if os.path.isfile(wsgi_file): with open(wsgi_file, 'a'): os.utime(wsgi_file, None) db.engine.dispose() sys.exit(0)
def install_package(package): sys.stderr.write("install_package: " + package.name + "\n") if package.type == 'zip' and package.upload is None: return 0, '' sys.stderr.write('install_package: ' + package.name + "\n") from docassemble.base.config import daconfig PACKAGE_DIRECTORY = daconfig.get('packages', '/usr/share/docassemble/local') logfilecontents = '' #pip.utils.logging._log_state = threading.local() #pip.utils.logging._log_state.indentation = 0 pip_log = tempfile.NamedTemporaryFile() temp_dir = tempfile.mkdtemp() use_pip_cache = r.get('da:updatepackage:use_pip_cache') if use_pip_cache is None: disable_pip_cache = False elif int(use_pip_cache): disable_pip_cache = False else: disable_pip_cache = True if package.type == 'zip' and package.upload is not None: saved_file = SavedFile(package.upload, extension='zip', fix=True) # with zipfile.ZipFile(saved_file.path + '.zip', mode='r') as zf: # for zinfo in zf.infolist(): # parts = splitall(zinfo.filename) # if parts[-1] == 'setup.py': commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend(['--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--log-file=' + pip_log.name, '--upgrade', saved_file.path + '.zip']) elif package.type == 'git' and package.giturl is not None: if package.gitbranch is not None: branchpart = '@' + str(package.gitbranch) else: branchpart = '' if package.gitsubdir is not None: commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend(['--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, 'git+' + str(package.giturl) + '.git' + branchpart + '#egg=' + package.name + '&subdirectory=' + str(package.gitsubdir)]) else: commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend(['--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, 'git+' + str(package.giturl) + '.git' + branchpart + '#egg=' + package.name]) elif package.type == 'pip': if package.limitation is None: limit = "" else: limit = str(package.limitation) commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend(['--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, package.name + limit]) else: sys.stderr.write("Wrong package type\n") return 1, 'Unable to recognize package type: ' + package.name sys.stderr.write("install_package: running " + " ".join(commands) + "\n") logfilecontents += " ".join(commands) + "\n" returnval = 1 try: subprocess.call(commands) returnval = 0 except subprocess.CalledProcessError as err: returnval = err.returncode sys.stderr.flush() sys.stdout.flush() time.sleep(4) with open(pip_log.name, 'rU') as x: logfilecontents += x.read().decode('utf8') pip_log.close() try: sys.stderr.write(logfilecontents + "\n") except: pass sys.stderr.flush() sys.stdout.flush() time.sleep(4) sys.stderr.write('returnval is: ' + str(returnval) + "\n") sys.stderr.write('install_package: done' + "\n") shutil.rmtree(temp_dir) return returnval, logfilecontents
from docassemble.webapp.users.forms import UserProfileForm, EditUserProfileForm, PhoneUserProfileForm, MyRegisterForm, MyInviteForm, NewPrivilegeForm, UserAddForm from docassemble.webapp.users.models import UserAuthModel, UserModel, Role, MyUserInvitation #import docassemble.webapp.daredis from docassemble.base.functions import word, debug_status, get_default_timezone from docassemble.base.logger import logmessage from docassemble.base.config import daconfig from docassemble.base.generate_key import random_alphanumeric from sqlalchemy import or_, and_ import random import string import pytz import datetime import re HTTP_TO_HTTPS = daconfig.get('behind https load balancer', False) @app.route('/privilegelist', methods=['GET', 'POST']) @login_required @roles_required('admin') def privilege_list(): output = """\ <table class="table"> <thead> <tr> <th scope="col">""" + word("Privilege") + """</th> <th scope="col">""" + word("Action") + """</th> </tr> </thead> <tbody> """
import tempfile import mimetypes import zipfile import datetime import subprocess import time from docassemble.base.logger import logmessage from docassemble.base.error import DAError from docassemble.base.config import daconfig import docassemble.webapp.cloud import docassemble.base.functions from docassemble.base.generate_key import random_alphanumeric cloud = docassemble.webapp.cloud.get_cloud() UPLOAD_DIRECTORY = daconfig.get('uploads', '/usr/share/docassemble/files') class SavedFile(object): def __init__(self, file_number, extension=None, fix=False, section='files', filename='file'): file_number = int(file_number) section = str(section) if section not in docassemble.base.functions.this_thread.saved_files: docassemble.base.functions.this_thread.saved_files[section] = dict() if file_number in docassemble.base.functions.this_thread.saved_files[section]: # sys.stderr.write("SavedFile: using cache for " + section + '/' + str(file_number) + "\n") sf = docassemble.base.functions.this_thread.saved_files[section][file_number] for attribute in ['file_number', 'fixed', 'section', 'filename', 'extension', 'directory', 'path', 'modtimes', 'keydict']: if hasattr(sf, attribute): setattr(self, attribute, getattr(sf, attribute)) self.extension = extension self.filename = filename
def url_for(self, **kwargs): if 'ext' in kwargs and kwargs['ext'] is not None: extn = kwargs['ext'] extn = re.sub(r'^\.', '', extn) else: extn = None filename = kwargs.get('filename', self.filename) use_external = kwargs.get('_external', False) if cloud is not None and not (self.section == 'files' and 'page' in kwargs and kwargs['page']): keyname = str(self.section) + '/' + str(self.file_number) + '/' + str(filename) page = kwargs.get('page', None) if page: size = kwargs.get('size', 'page') page = re.sub(r'[^0-9]', '', str(page)) if size == 'screen': keyname += 'screen-' + str(page) + '.png' else: keyname += 'page-' + str(page) + '.png' elif extn: keyname += '.' + extn key = cloud.get_key(keyname) if key.does_exist: if 'display_filename' in kwargs: return key.generate_url(3600, display_filename=kwargs['display_filename']) else: return key.generate_url(3600) else: #logmessage("Key " + str(keyname) + " did not exist") #why not serve right from uploadedpage in this case? sys.stderr.write("key " + str(keyname) + " did not exist\n") return('about:blank') else: if extn is None: extn = '' else: extn = '.' + extn root = daconfig.get('root', '/') fileroot = daconfig.get('fileserver', root) if 'display_filename' in kwargs: filename = kwargs['display_filename'] if self.section == 'files': if 'page' in kwargs and kwargs['page']: page = re.sub(r'[^0-9]', '', str(kwargs['page'])) size = kwargs.get('size', 'page') url = fileroot + 'uploadedpage' if size == 'screen': url += 'screen' url += '/' + str(self.file_number) + '/' + str(page) else: if re.search(r'\.', str(filename)): url = fileroot + 'uploadedfile/' + str(self.file_number) + '/' + str(filename) elif extn != '': url = fileroot + 'uploadedfile/' + str(self.file_number) + '/' + str(filename) + extn else: url = fileroot + 'uploadedfile/' + str(self.file_number) else: sys.stderr.write("section " + section + " was wrong\n") url = 'about:blank' if use_external and url.startswith('/'): url = docassemble.base.functions.get_url_root() + url return(url)
def convertapi_to_pdf(from_file, to_file): convertapi.api_secret = daconfig.get('convertapi secret') result = convertapi.convert('pdf', { 'File': from_file }) result.file.save(to_file)
from docassemble.base.config import daconfig task_serializer = 'pickle' accept_content = ['pickle'] result_serializer = 'pickle' timezone = daconfig.get('timezone', 'America/New_York') enable_utc = True if 'celery processes' in daconfig: worker_concurrency = daconfig['celery processes']
import docassemble.base.parse import re import os import sys from flask import session, current_app, has_request_context, url_for from flask_mail import Mail as FlaskMail, Message from flask_wtf.csrf import generate_csrf from flask_login import current_user import docassemble.webapp.worker from docassemble.webapp.mailgun_mail import Mail as MailgunMail #sys.stderr.write("I am in backend\n") import docassemble.webapp.setup DEBUG = daconfig.get('debug', False) #docassemble.base.parse.debug = DEBUG from docassemble.webapp.file_access import get_info_from_file_number, get_info_from_file_reference, reference_exists, url_if_exists from docassemble.webapp.file_number import get_new_file_number import time def elapsed(name_of_function): def elapse_decorator(func): def time_func(*pargs, **kwargs): time_start = time.time() result = func(*pargs, **kwargs) sys.stderr.write(name_of_function + ': ' + unicode(time.time() - time_start) + "\n") return result return time_func
def main(): dbconfig = daconfig.get('db', dict()) db_prefix = dbconfig.get('prefix', 'postgresql+psycopg2://') if db_prefix != 'postgresql+psycopg2://': sys.stderr.write("fix_postgresql_tables: skipping because configured database is not PostgreSQL.\n") return db_name = dbconfig.get('name', None) db_host = dbconfig.get('host', None) db_user = dbconfig.get('user', None) db_password = dbconfig.get('password', None) db_port = dbconfig.get('port', None) db_table_prefix = dbconfig.get('table prefix', None) schema_file = dbconfig.get('schema file', None) if db_name is None: db_name = os.getenv('DBNAME', '') if db_name == '': db_name = 'docassemble' if db_host is None: db_host = os.getenv('DBHOST', '') if db_host == '': db_host = 'localhost' if db_user is None: db_user = os.getenv('DBUSER', '') if db_user == '': db_user = '******' if db_password is None: db_password = os.getenv('DBPASSWORD', '') if db_password == '': db_password = '******' if db_port is None: db_port = os.getenv('DBPORT', '') if db_port == '': db_port = '5432' if db_table_prefix is None: db_table_prefix = os.getenv('DBTABLEPREFIX', '') if schema_file is None: schema_file = os.getenv('DBSCHEMAFILE', None) if not (schema_file and os.path.isfile(schema_file)): schema_file = pkg_resources.resource_filename(pkg_resources.Requirement.parse('docassemble.webapp'), "docassemble/webapp/data/db-schema.txt") conn = psycopg2.connect(database=db_name, user=db_user, password=db_password, host=db_host, port=db_port) cur = conn.cursor() try: cur.execute("select table_name, column_name, data_type, character_maximum_length, column_default from information_schema.columns where table_schema='public'") except: sys.exit("failed to read existing columns from database") existing_columns = dict() rows = cur.fetchall() for col in rows: if col[0] not in existing_columns: existing_columns[col[0]] = dict() existing_columns[col[0]][col[1]] = {'type': col[2], 'size': col[3], 'default': col[4]} if 'alembic_version' in existing_columns and daconfig.get('use alembic', True): sys.stderr.write("fix_postgresql_tables: skipping because alembic is in use.\n") return desired_columns = dict() with open(schema_file, 'rU') as f: for line in f: read_in(line.rstrip(), desired_columns) commands = list() if db_table_prefix + 'shortener' in existing_columns and db_table_prefix + 'email' not in existing_columns: commands.append("drop table if exists " + db_table_prefix + "shortener;") for table_name in desired_columns: if db_table_prefix + table_name in existing_columns: for column_name in desired_columns[table_name]: if column_name not in existing_columns[db_table_prefix + table_name]: output = "alter table \"" + db_table_prefix + table_name + "\" add column \"" + column_name + "\" " + desired_columns[table_name][column_name]['type'] if desired_columns[table_name][column_name]['size']: output += "(" + desired_columns[table_name][column_name]['size'] + ")" if desired_columns[table_name][column_name]['default']: output += " default " + desired_columns[table_name][column_name]['default'] output += ";" commands.append(output) if len(commands): for command in commands: try: cur.execute(command) except: sys.exit("Failed to run: " + command) conn.commit() cur.close() conn.close()
def word_to_pdf(in_file, in_format, out_file, pdfa=False, password=None, update_refs=False, tagged=False): tempdir = tempfile.mkdtemp() from_file = os.path.join(tempdir, "file." + in_format) to_file = os.path.join(tempdir, "file.pdf") shutil.copyfile(in_file, from_file) tries = 0 if pdfa: method = 'pdfa' elif tagged: method = 'tagged' else: method = 'default' while tries < 5: use_libreoffice = True if update_refs: if daconfig.get('convertapi secret', None) is not None: update_references(from_file) try: convertapi_to_pdf(from_file, to_file) result = 0 except: logmessage("Call to convertapi failed") result = 1 use_libreoffice = False else: subprocess_arguments = [LIBREOFFICE_PATH, '--headless', '--invisible', 'macro:///Standard.Module1.ConvertToPdf(' + from_file + ',' + to_file + ',True,' + method + ')'] elif daconfig.get('convertapi secret', None) is not None: try: convertapi_to_pdf(from_file, to_file) result = 0 except: logmessage("Call to convertapi failed") result = 1 use_libreoffice = False else: if method == 'default': subprocess_arguments = [LIBREOFFICE_PATH, '--headless', '--convert-to', 'pdf', from_file] else: subprocess_arguments = [LIBREOFFICE_PATH, '--headless', '--invisible', 'macro:///Standard.Module1.ConvertToPdf(' + from_file + ',' + to_file + ',False,' + method + ')'] if use_libreoffice: initialize_libreoffice() #logmessage("Trying libreoffice with " + repr(subprocess_arguments)) p = subprocess.Popen(subprocess_arguments, cwd=tempdir) result = p.wait() if os.path.isfile(to_file): break result = 1 tries += 1 time.sleep(2 + tries*random.random()) if use_libreoffice: logmessage("Retrying libreoffice with " + repr(subprocess_arguments)) else: logmessage("Retrying convertapi") continue if result == 0: if password: pdf_encrypt(to_file, password) shutil.copyfile(to_file, out_file) if tempdir is not None: shutil.rmtree(tempdir) if result != 0: return False return True
import shutil import sys import re import time import random from docassemble.base.config import daconfig from docassemble.base.logger import logmessage from docassemble.base.pdfa import pdf_to_pdfa from docassemble.base.pdftk import pdf_encrypt, PDFTK_PATH, replicate_js_and_calculations from io import open import mimetypes from subprocess import call, check_output import convertapi style_find = re.compile(r'{\s*(\\s([1-9])[^\}]+)\\sbasedon[^\}]+heading ([0-9])', flags=re.DOTALL) PANDOC_PATH = daconfig.get('pandoc', 'pandoc') def convertapi_to_pdf(from_file, to_file): convertapi.api_secret = daconfig.get('convertapi secret') result = convertapi.convert('pdf', { 'File': from_file }) result.file.save(to_file) def get_pandoc_version(): p = subprocess.Popen( [PANDOC_PATH, '--version'], stdin=subprocess.PIPE, stdout=subprocess.PIPE ) version_content = p.communicate()[0].decode('utf-8') version_content = re.sub(r'\n.*', '', version_content) version_content = re.sub(r'^pandoc ', '', version_content)
import redis import re from docassemble.base.config import daconfig redis_host = daconfig.get('redis', None) if redis_host is None: redis_host = 'redis://localhost' redis_host = re.sub(r'^redis://', r'', redis_host) r = redis.StrictRedis(host=redis_host, db=0) r_store = redis.StrictRedis(host=redis_host, db=1) r_user = redis.StrictRedis(host=redis_host, db=2)
indexno = 0 for locale in daconfig['other os locales']: print('OTHERLOCALES[' + str(indexno) + ']=' + repr(str(locale))) indexno += 1 else: other_locales_variable = os.getenv('OTHERLOCALES', None) if other_locales_variable is not None and other_locales_variable != 'null': print('declare -a OTHERLOCALES') print('export OTHERLOCALES') indexno = 0 for locale in map(lambda x: x.strip(), separator.split(other_locales_variable)): print('OTHERLOCALES[' + str(indexno) + ']=' + repr(str(locale))) indexno += 1 max_content_length = daconfig.get('maximum content length', 16 * 1024 * 1024) if isinstance(max_content_length, (int, type(None))): if max_content_length is None or max_content_length <= 0: print('DAMAXCONTENTLENGTH=0') else: print('DAMAXCONTENTLENGTH=' + str(max_content_length)) else: print('DAMAXCONTENTLENGTH=' + str(16 * 1024 * 1024)) if 'debian packages' in daconfig and type( daconfig['debian packages']) is list: print('declare -a PACKAGES') print('export PACKAGES') indexno = 0 for package in daconfig['debian packages']: print('PACKAGES[' + str(indexno) + ']=' + repr(str(package))) indexno += 1
from docassemble.webapp.app_object import app from docassemble.base.config import daconfig import docassemble.webapp.database app.config['APP_NAME'] = daconfig.get('appname', 'docassemble') app.config['BRAND_NAME'] = daconfig.get('brandname', daconfig.get('appname', 'docassemble')) app.config['SHOW_PROFILE'] = True if daconfig.get('show profile link', True) else False app.config['SHOW_MY_INTERVIEWS'] = True if daconfig.get('show interviews link', True) else False app.config['SHOW_DISPATCH'] = True if len(daconfig['dispatch']) and daconfig.get('show dispatch link', False) else False app.config['MAIL_USERNAME'] = daconfig['mail'].get('username', None) app.config['MAIL_PASSWORD'] = daconfig['mail'].get('password', None) app.config['MAIL_DEFAULT_SENDER'] = daconfig['mail'].get('default sender', None) app.config['MAIL_SERVER'] = daconfig['mail'].get('server', 'localhost') app.config['MAIL_PORT'] = daconfig['mail'].get('port', 25) app.config['MAIL_USE_SSL'] = daconfig['mail'].get('use ssl', False) app.config['MAIL_USE_TLS'] = daconfig['mail'].get('use tls', True) #app.config['ADMINS'] = [daconfig.get('admin address', None)] app.config['APP_SYSTEM_ERROR_SUBJECT_LINE'] = app.config['APP_NAME'] + " system error" app.config['APPLICATION_ROOT'] = daconfig.get('root', '/') app.config['CSRF_ENABLED'] = False if daconfig['two factor authentication'].get('enable', True): app.config['USE_MFA'] = True else: app.config['USE_MFA'] = False if daconfig['two factor authentication'].get('allow sms', True): app.config['MFA_ALLOW_SMS'] = True else: app.config['MFA_ALLOW_SMS'] = False if daconfig['two factor authentication'].get('allow app', True): app.config['MFA_ALLOW_APP'] = True else: app.config['MFA_ALLOW_APP'] = False