def load_all_models(cls): """ Helper function to load all models """ s3 = current.response.s3 if s3.all_models_loaded: # Already loaded return models = current.models # Load models if models is not None: for name in models.__dict__: if type(models.__dict__[name]).__name__ == "module": cls.load(name) # Define importer tables from s3import import S3Importer, S3ImportJob S3Importer.define_upload_table() S3ImportJob.define_job_table() S3ImportJob.define_item_table() # Don't do this again within the current request cycle s3.all_models_loaded = True return
def load_all_models(cls): """ Helper function to load all models """ s3 = current.response.s3 if s3.all_models_loaded: # Already loaded return s3.load_all_models = True models = current.models # Load models if models is not None: for name in models.__dict__: if type(models.__dict__[name]).__name__ == "module": cls.load(name) # Define importer tables from s3import import S3Importer, S3ImportJob S3Importer.define_upload_table() S3ImportJob.define_job_table() S3ImportJob.define_item_table() # Don't do this again within the current request cycle s3.load_all_models = False s3.all_models_loaded = True
def load_all_models(cls): """ Helper function to load all models """ s3 = current.response.s3 if s3.all_models_loaded: # Already loaded return s3.load_all_models = True models = current.models # Load models if models is not None: for name in models.__dict__: if type(models.__dict__[name]).__name__ == "module": cls.load(name) # Define importer tables from s3import import S3Importer, S3ImportJob S3Importer.define_upload_table() S3ImportJob.define_job_table() S3ImportJob.define_item_table() # Define sessions table if current.deployment_settings.get_base_session_db(): # Copied from https://github.com/web2py/web2py/blob/master/gluon/globals.py#L895 # Not DRY, bit no easy way to make it so current.db.define_table( "web2py_session", Field("locked", "boolean", default=False), Field("client_ip", length=64), Field("created_datetime", "datetime", default=current.request.now), Field("modified_datetime", "datetime"), Field("unique_key", length=64), Field("session_data", "blob"), ) # Don't do this again within the current request cycle s3.load_all_models = False s3.all_models_loaded = True
def load_all_models(self): """ Load all models """ config = self.config db = current.db tables = [tn for tn in config if "load" in config[tn]] for tablename in tables: if tablename not in db: self.load(tablename) # Also load importer tables from s3import import S3Importer, S3ImportJob S3Importer.define_upload_table() S3ImportJob.define_job_table() S3ImportJob.define_item_table()
def load_all_models(cls): """ Helper function to load all models """ s3 = current.response.s3 if s3.all_models_loaded: # Already loaded return s3.load_all_models = True models = current.models # Load models if models is not None: for name in models.__dict__: if type(models.__dict__[name]).__name__ == "module": cls.load(name) # Define importer tables from s3import import S3Importer, S3ImportJob S3Importer.define_upload_table() S3ImportJob.define_job_table() S3ImportJob.define_item_table() # Define sessions table if current.deployment_settings.get_base_session_db(): # Copied from https://github.com/web2py/web2py/blob/master/gluon/globals.py#L895 # Not DRY, bit no easy way to make it so current.db.define_table("web2py_session", Field("locked", "boolean", default=False), Field("client_ip", length=64), Field("created_datetime", "datetime", default=current.request.now), Field("modified_datetime", "datetime"), Field("unique_key", length=64), Field("session_data", "blob"), ) # Don't do this again within the current request cycle s3.load_all_models = False s3.all_models_loaded = True
def load_all_models(cls): """ Helper function to load all models """ models = current.models # Load models if models is not None: for name in models.__dict__: if type(models.__dict__[name]).__name__ == "module": cls.load(name) # Define importer tables from s3import import S3Importer, S3ImportJob S3Importer.define_upload_table() S3ImportJob.define_job_table() S3ImportJob.define_item_table() return
def main(): args = parse_command_line() setup_logging(args.verbose) try: cluster = sxclient.Cluster(args.cluster_name, args.cluster_address, args.is_secure, args.verify, args.port) user_data = sxclient.UserData.from_key_path(args.key_path) with contextlib.closing(sxclient.SXController(cluster, user_data)) as sx: s3importer = S3Importer(volume_size=args.volume_size, volume_owner=args.owner, volume_replica=args.replica_count, sx=sx, volume_prefix=args.volume_prefix, subdir=args.subdir, worker_num=args.workers) s3importer.import_all() except (Exception, KeyboardInterrupt) as exc: error_exit(exc)