def create_production(build_dir, backups, script_dir): """Put the staging version to production hosted at register.geostandaarden.nl """ print "Building production..." logging.info("Building production...") deploy = OSFS('..') if deploy.exists(backups) == False: deploy.makedir(backups) deploy.copydir('%s/%s' % (script_dir, build_dir), 'register-new', overwrite=True) if deploy.exists('register') == True: # server refuses to recursively remove register/staging # hence we excplicitly remove symbolic link to staging try: deploy.remove('register/staging/staging') except ResourceNotFoundError: print "Warning, register/staging/staging not found..." try: deploy.removedir('register/staging') except ResourceNotFoundError: print "Warning, register/staging not found..." backup_dir = time.strftime('%Y-%m-%d-%H-%M-%S') # if deploy.exists('backups/%s' % backup_dir): # deploy.removedir('backups/%s' % backup_dir, force=True) deploy.copydir('register', 'backups/%s' % backup_dir, overwrite=True) try: deploy.movedir('register', 'register-old', overwrite=True) except ResourceNotFoundError: pass deploy.movedir('register-new', 'register', overwrite=True) # create symbolic link to standalone staging directory # fails if production is built first... deploy.makedir('register/staging') call('cd ../register/staging; ln -s ../../staging', shell=True) call('cd ../register; ln -s ../%s/log.txt' % script_dir , shell=True) try: deploy.removedir('register-old', force=True) except ResourceNotFoundError: pass call('chmod -R a+rx ../register', shell=True) print "Done building production..." logging.info("Production built successfully!")
def verify_content_existence(self, modulestore, root_dir, location, dirname, category_name, filename_suffix=''): filesystem = OSFS(root_dir / 'test_export') self.assertTrue(filesystem.exists(dirname)) query_loc = Location('i4x', location.org, location.course, category_name, None) items = modulestore.get_items(query_loc) for item in items: filesystem = OSFS(root_dir / ('test_export/' + dirname)) self.assertTrue(filesystem.exists(item.location.name + filename_suffix))
def deploy_register(): """Put the staging version to production hosted at register.geostandaarden.nl """ ## TODO: feed this function absolute paths print "Deploying production..." logging.info("Deploying production...") production = OSFS(production_path) # NOTE: only build paths within script_dir are currently supported call ('cp -r %s %s' % (ospath.join(build_path, register_path), ospath.join(production_path, register_path + '-new')), shell=True) if production.exists(register_path): backup_dir = time.strftime('%Y-%m-%d-%H-%M-%S') production.copydir(register_path, '%s/%s' % (backups_path, backup_dir), overwrite=True) try: production.movedir(register_path, register_path + '-old', overwrite=True) except ResourceNotFoundError: pass production.movedir(register_path + '-new', register_path, overwrite=True) try: production.removedir(register_path + '-old', force=True) except ResourceNotFoundError: pass call('chmod -R a+rx %s/%s' % (production_path, register_path), shell=True) logging.info("Production built successfully!")
def get_yahoo_icon(): try: http = urllib3.PoolManager() fetch_data = http.request('GET', 'http://opi.yahoo.com/online?u=%s&m=t&t=1' % request.args[0]) if fetch_data.status == 200: from fs.osfs import OSFS file_server = OSFS('/home/www-data/web2py/applications/cbw/static/images') if len(request.args) != 1: raise HTTP(400) if fetch_data.data == "01": path = "icon-YMonline.png" else: path = "icon-YMoffline.png" if file_server.exists(path): response.headers['Content-Length'] = file_server.getinfo(path)['size'] response.headers['Content-Type'] = 'image/png' response.headers['Content-Disposition'] = "attachment; filename=yahoo.png" return response.stream(file_server.open(path=path, mode='rb')) else: raise HTTP(404) else: raise Exception(fetch_data.status) except: return None
def get_yaml(username, project_name): user_path = config.project_path + "/" + username file_dir = OSFS(user_path) if file_dir.exists(project_name + "/nap-compose.yml"): f = file(config.project_path + "/" + username + "/" + project_name + '/nap-compose.yml') return f.read() return "yaml"
def cleanup(source, destination_temp, standard): """Remove the source and temporary destination folders.""" try: source_fs = OSFS('%s/%s' % (source, standard)) except ResourceNotFoundError: return None destination_fs = OSFS(destination_temp) artifacts = source_fs.listdir(dirs_only=True) if '.git' in artifacts: artifacts.remove('.git') for artifact in artifacts: path = '%s/%s' % (artifact, standard) if destination_fs.exists(path): destination_fs.removedir(path, force=True) if destination_fs.exists(standard): destination_fs.removedir(standard, force=True)
def cleanup(build_path, source, destination_temp, standard): """Remove the source and temporary destination folders.""" try: source_fs = OSFS(ospath.join(build_path, source, standard)) except ResourceNotFoundError: return None destination_fs = OSFS(ospath.join(build_path, destination_temp)) artifacts = source_fs.listdir(dirs_only=True) if '.git' in artifacts: artifacts.remove('.git') for artifact in artifacts: path = ospath.join(artifact, standard) if destination_fs.exists(path): destination_fs.removedir(path, force=True) if destination_fs.exists(standard): destination_fs.removedir(standard, force=True)
def test_unzip_local_data(self): os = OSFS(".") os_remove = os.remove os.remove = mock.Mock(return_value=None) os.copy("./tests/resources/local_data/base_train.zip", "./tests/resources/local_data/train.zip") test_local = DataSet(os, "/local/path", "train", "./tests/resources/local_data/train.zip", "test dataset", "zip") test_local.unzip_file() result = os.exists("./tests/resources/local_data/train/train.csv") os.remove = os_remove os.remove("./tests/resources/local_data/train/train.csv") os.remove("./tests/resources/local_data/train.zip") os.removedir("./tests/resources/local_data/train") self.assertTrue(result)
def destroy_project(username, project_name): user_path = config.project_path + "/" + username home_dir = OSFS(user_path) if home_dir.exists(project_name): home_dir.removedir(project_name, force=True) project = Project.get_project_by_name(username, project_name) if project is None: return 'project not found' project.stop() project.remove() database_update.delete_project(username, project_name) database_update.delete_service_for_scale(username, project_name) return 'success'
def demo(): try: from fs.osfs import OSFS file_server = OSFS('/home/demo/') if len(request.args) != 1: raise HTTP(400) path = request.args[0] if file_server.exists(path): response.headers['Content-Length'] = file_server.getinfo( path)['size'] response.headers['Content-Type'] = 'application/octet-stream' response.headers[ 'Content-Disposition'] = "attachment; filename=%s" % path return response.stream(file_server.open(path=path, mode='rb')) else: raise HTTP(404) except: raise HTTP(200, "ERROR")
def create_staging(staging_path, production_path, build_path): """Create a staging version of the register hosted at register.geostandaarden.nl/staging """ logging.info("Building staging...") production = OSFS(production_path) print "Removing current staging..." if production.exists(staging_path): production.removedir(staging_path, force=True) print 'Moving new register to staging...' # OSFS cannot copy to arbitrary locations call('cp -r %s %s' % (ospath.join(build_path, staging_path), production_path), shell=True) call('chmod -R a+rx %s' % (ospath.join(production_path, staging_path)), shell=True) logging.info("Staging built successfully!")
def download_url(config): folder, url = config _hash = hashlib.md5(url.encode("utf-8")).hexdigest() _hash = "%s.html" % _hash ofs = OSFS(folder) if ofs.exists(_hash): return try: resp = requests.get(url, timeout=10) except SSLError: try: resp = requests.get(url, verify=False) except: return except Exception: print("Error: %s" % url) return print("\t\t\tDownloaded... %s" % url) with ofs.open(_hash, "wb") as f: f.write(resp.content)
from fs.errors import ResourceNotFoundError import settings as s from backend import fetch_repo, deploy_register, build_register from utils import load_repos root_fs = OSFS(s.root_path) root_fs.makedir(s.build_path, recursive=True, allow_recreate=True) build_fs = OSFS(s.build_path) build_fs.makedir(s.sources_path, allow_recreate=True) build_fs.makedir(s.register_path, allow_recreate=True) # create production directory if needed try: production_fs = OSFS(s.production_path) except ResourceNotFoundError: # grab production dir's parent dir path = s.production_path.split('/')[-2] print path production_fs = OSFS(s.production_path[:len(s.production_path) - (len(path) + 1)]).makeopendir(path) print production_fs if not production_fs.exists(s.backups_path): production_fs.makedir(s.backups_path) # fetch repos from GitHub for repo in load_repos(s.repos_path)[0].values(): print 'Fetching %s for the first time' % repo['id'] fetch_repo(root_fs, repo['id'], repo['url']) build_register(repo['id']) deploy_register()
def create_production(destination, backups, script_entry_path, production_path): """Put the staging version to production hosted at register.geostandaarden.nl """ ## TODO: feed this function absolute paths print "Building production..." logging.info("Building production...") production = OSFS(production_path) # if production.exists(backups) == False: # production.makedir(backups) # copy newly baked register/staging to production directory # NOTE: only build paths within script_dir are currently supported call ('cp -r %s %s' % (ospath.join(build_path, destination), ospath.join(production_path, destination + '-new')), shell=True) # production.copydir('%s/%s/%s' % (script_dir, build_path, destination), destination + '-new', overwrite=True) if production.exists(destination) == True: # server refuses to recursively remove register/staging # hence we excplicitly remove symbolic link to staging try: production.remove('%s/staging/staging' % destination) except ResourceNotFoundError: print "Warning, %s/staging/staging not found..." % destination try: production.removedir('%s/staging' % destination) except ResourceNotFoundError: print "Warning, %s/staging not found..." % destination backup_dir = time.strftime('%Y-%m-%d-%H-%M-%S') # if production.exists('backups/%s' % backup_dir): # production.removedir('backups/%s' % backup_dir, force=True) production.copydir(destination, '%s/%s' % (backups, backup_dir), overwrite=True) try: production.movedir(destination, destination + '-old', overwrite=True) except ResourceNotFoundError: pass production.movedir(destination + '-new', destination, overwrite=True) # create symbolic link to standalone staging directory # fails if production is built first... production.makedir('%s/staging' % destination) call('cd %s; ln -s %s' % (ospath.join(production_path, destination, 'staging'), ospath.join(production_path, 'staging')), shell=True) call('cd %s; ln -s %s' % (ospath.join(production_path, destination), ospath.join(script_entry_path, 'log.txt')), shell=True) try: production.removedir(destination + '-old', force=True) except ResourceNotFoundError: pass call('chmod -R a+rx %s/%s' % (production_path, destination), shell=True) print "Done building production..." logging.info("Production built successfully!")
mail = Mail() mail.settings.server = settings.email_server mail.settings.sender = settings.email_sender mail.settings.login = settings.email_login ## configure auth policy auth.settings.controller = 'default' auth.settings.mailer = mail auth.settings.registration_requires_verification = True auth.settings.registration_requires_approval = True auth.settings.reset_password_requires_verification = True auth.settings.create_user_groups = False auth.settings.actions_disabled.append('register') osFileServer = OSFS(settings.home_dir) if not osFileServer.exists(settings.creator_dir): osFileServer.makedir(settings.creator_dir) if not osFileServer.exists(settings.product_image_dir): osFileServer.makedir(settings.product_image_dir) ## Google Api Key GOOGLE_API_KEY = "AIzaSyBFA3zO-fDW6iVg11fMqf6MANE4AwB1xRU" GCM_SEND_HOST = "android.googleapis.com" GCM_SEND_URL = "/gcm/send" db.define_table('clsb_config', Field('config_key', type='string', unique=True, notnull=True), Field('config_value', type='text')) db.define_table( 'clsb20_encrypt_product', Field('product_code', type='string', unique=True, notnull=True),
# Turn on captcha for registration if int(myconf.take('recaptcha.use')): auth.settings.captcha = Recaptcha2(request, myconf.take('recaptcha.site_key'), myconf.take('recaptcha.secret_key')) # ----------------------------------------------------------------------------- # IMPORT the CKEDITOR PLUGIN TO GIVE A WYSIWYG EDITOR FOR BLOGS AND NEWS # -- OK, so this editor is neat but one issue is that it dumps files into the # root of uploads, which is messy # -- Ordinarily, this would be controlled by the upload_folder setting but # this is hardcoded in the module. Could edit it there but you can also use # a fs object to provide a folder # -- You'd think it might be possible to have multiple upload folders but # it turns out to be quite hard to switch the settings # ----------------------------------------------------------------------------- ckeditor = CKEditor(db) app_root = request.folder app_root_fs = OSFS(app_root) if not app_root_fs.exists('uploads/news_and_blogs/'): blog_fs = app_root_fs.makeopendir('uploads/news_and_blogs/') else: blog_fs = app_root_fs.opendir('uploads/news_and_blogs/') ckeditor.settings.uploadfs = blog_fs ckeditor.settings.table_upload_name = 'ckeditor_uploads' ckeditor.define_tables(fake_migrate=True)
class VirtualHost(object): """ Represents a single host. This class implements the commands that are host-specific, like pwd, ls, etc. """ def __init__(self, params, network, fs_dir): self.hostname = params['hostname'] self.ip_address = params['ip_address'] self.network = network self.env = params['env'] valid_ips = map(str, network[1:-1]) if self.ip_address is None: logger.error( 'IP address for {} is not specified in the config file (or is "null")' .format(self.hostname)) if not self._set_ip_from_previous_run(fs_dir, valid_ips): self.ip_address = get_random_item(valid_ips) logger.info('Assigned random IP {} to host {}'.format( self.ip_address, self.hostname)) else: if not self.ip_address in valid_ips: logger.error( 'IP Address {} for {} is not valid for the specified network' .format(params['ip_address'], self.hostname)) if not self._set_ip_from_previous_run(fs_dir, valid_ips): self.ip_address = get_random_item(valid_ips) logger.info('Assigned random IP {} to host {}'.format( self.ip_address, self.hostname)) self.valid_logins = params['valid_logins'] self.logged_in = False self.current_user = None if params.get('default', False): self.default = True else: self.default = False self.filesystem = OSFS(os.path.join( fs_dir, '{}_{}'.format(self.hostname, self.ip_address)), create=True) self.working_path = '/' def authenticate(self, username, password): if self.valid_logins.get(username, None) == password: return True return False def login(self, username): logger.debug('User "{}" has logged into "{}" host'.format( username, self.hostname)) self.logged_in = True self.current_user = username def logout(self): self.logged_in = False self.current_user = None @property def welcome(self): if self.filesystem.isfile('/etc/motd'): with self.filesystem.open('/etc/motd') as motd_file: return motd_file.read() else: return 'Welcome to {} server.'.format(self.hostname) @property def prompt(self): prompt = '{}@{}:{}$ '.format(self.current_user, self.hostname, self.working_path) return prompt def run_echo(self, params, shell): if not params: shell.writeline('') elif params[0].startswith('$') and len(params) == 1: var_name = params[0][1:] value = self.env.get(var_name, '') shell.writeline(value) elif '*' in params: params.remove('*') params.extend(self.filesystem.listdir()) shell.writeline(' '.join(params)) else: shell.writeline(' '.join(params)) def run_pwd(self, params, shell): if params: shell.writeline('pwd: too many arguments') else: shell.writeline('{}'.format(self.working_path)) def run_wget(self, params, shell): parser = Parser(add_help=False) parser.add_argument('-h', '--help', action='store_true', default=False) parser.add_argument('-V', '--version', action='store_true', default=False) parser.add_argument('-O', '--output-document') args, unparsed = parser.parse_known_args(params) if unparsed: url = unparsed[0] elif not args.help and not args.version: noparam_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'wget', 'no_param') self.send_data_from_file(noparam_file_path, shell) return if args.help: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'wget', 'help') self.send_data_from_file(help_file_path, shell) return if args.version: version_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'wget', 'version') self.send_data_from_file(version_file_path, shell) return wget_command = WgetCommand(url, self.working_path, self.filesystem, args, shell) wget_command.process() def run_ping(self, params, shell): options = [x for x in params if x.startswith('-')] if '-h' in options or len(params) == 0: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ping', 'help') self.send_data_from_file(help_file_path, shell) return filtered_params = [p for p in params if not p.startswith('-')] ping_host = filtered_params[-1] logger.debug('Going to ping {}'.format(ping_host)) ping_command = PingCommand(ping_host, shell) ping_command.process() def run_ifconfig(self, params, shell): if len(params) >= 2: shell.writeline('SIOCSIFFLAGS: Operation not permitted') return if params: parameter = params[0] if parameter == '--version': version_file_path = os.path.join( os.path.dirname(hornet.__file__), 'data', 'commands', 'ifconfig', 'version') self.send_data_from_file(version_file_path, shell) logger.debug( 'Sending version string for ifconfig from {} file'.format( version_file_path)) return elif parameter == '--help' or parameter == '-h': help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ifconfig', 'help') self.send_data_from_file(help_file_path, shell) logger.debug( 'Sending version string for ifconfig from {} file'.format( help_file_path)) return output_template_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ifconfig', 'output_template') ifconfig_command = IfconfigCommand(params, output_template_path, self.ip_address, self.network) output = ifconfig_command.process() shell.writeline(output) def run_ls(self, params, shell): paths = [] other_params = [] for p in params: if p.startswith('-'): other_params.append(p) else: paths.append(p) if not paths: # List contents of working dir by default paths.append(self.working_path) parser = Parser(add_help=False) parser.add_argument('-a', '--all', action='store_true', default=False) parser.add_argument('-A', '--almost-all', action='store_true', default=False) parser.add_argument('-d', '--directory', action='store_true', default=False) parser.add_argument('-l', action='store_true', default=False) # We ignore these (for now), but still parse them ;-) parser.add_argument('-h', '--human-readable', action='store_true', default=False) parser.add_argument('-b', '--escape', action='store_true', default=False) parser.add_argument('--block-size') parser.add_argument('-B', '--ignore-backups', action='store_true', default=False) parser.add_argument('-c', action='store_true', default=False) parser.add_argument('-C', action='store_true', default=False) parser.add_argument('--color') parser.add_argument('-D', '--dired', action='store_true', default=False) parser.add_argument('-f', action='store_true', default=False) parser.add_argument('-F', '--classify', action='store_true', default=False) parser.add_argument('--file-type', action='store_true', default=False) parser.add_argument('--format') parser.add_argument('--full-time', action='store_true', default=False) parser.add_argument('-g', action='store_true', default=False) parser.add_argument('--group-directories-first', action='store_true', default=False) parser.add_argument('-G', '--no-group', action='store_true', default=False) parser.add_argument('-H', '--dereference-command-line', action='store_true', default=False) parser.add_argument('--dereference-command-line-symlink-to-dir', action='store_true', default=False) parser.add_argument('--hide') parser.add_argument('--indicator-style') parser.add_argument('-i', '--inode', action='store_true', default=False) parser.add_argument('-I', '--ignore') parser.add_argument('-k', '--kibibytes', action='store_true', default=False) parser.add_argument('-L', '--deference', action='store_true', default=False) parser.add_argument('-m', action='store_true', default=False) parser.add_argument('-n', '--numeric-uid-gid', action='store_true', default=False) parser.add_argument('-N', '--literal', action='store_true', default=False) parser.add_argument('-o', action='store_true', default=False) parser.add_argument('-p', action='store_true', default=False) parser.add_argument('-q', '--hide-control-chars', action='store_true', default=False) parser.add_argument('--show-control-chars', action='store_true', default=False) parser.add_argument('-Q', '--quote-name', action='store_true', default=False) parser.add_argument('--quoting-style') parser.add_argument('-r', '--reverse', action='store_true', default=False) parser.add_argument('-R', '--recursive', action='store_true', default=False) parser.add_argument('-s', '--size', action='store_true', default=False) parser.add_argument('-S', action='store_true', default=False) parser.add_argument('--sort') parser.add_argument('--time') parser.add_argument('--time-style') parser.add_argument('-t', action='store_true', default=False) parser.add_argument('-T', '--tabsize', default=False) parser.add_argument('-u', action='store_true', default=False) parser.add_argument('-U', action='store_true', default=False) parser.add_argument('-v', action='store_true', default=False) parser.add_argument('-w', '--width') parser.add_argument('-x', action='store_true', default=False) parser.add_argument('-X', action='store_true', default=False) parser.add_argument('-1', dest='one_per_line', action='store_true', default=False) parser.add_argument('--help', action='store_true', default=False) parser.add_argument('--version', action='store_true', default=False) try: args = parser.parse_args(other_params) except ParseError: shell.writeline('ls: invalid options: \"{}\"'.format( ' '.join(params))) shell.writeline('Try \'ls --help\' for more information.') return if args.help: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ls', 'help') logger.debug( 'Sending help string from file {}'.format(help_file_path)) self.send_data_from_file(help_file_path, shell) return if args.version: version_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ls', 'version') logger.debug('Sending version string from file {}'.format( version_file_path)) self.send_data_from_file(version_file_path, shell) return ls_cmd = LsCommand(args, paths, self.filesystem, self.working_path) output = ls_cmd.process() shell.writeline(output) def run_cd(self, params, shell): if len(params) == 0: params = ['/'] cd_path = os.path.join(self.working_path, params[0]) new_path_exists = False try: new_path_exists = self.filesystem.exists(cd_path) except BackReferenceError as e: logger.warn('Access to the external file system was attempted.') cd_path = '/' new_path_exists = True finally: if not new_path_exists: shell.writeline('cd: {}: No such file or directory'.format( params[0])) else: self.working_path = os.path.normpath(cd_path) logger.debug( 'Working directory for host {} changed to {}'.format( self.hostname, self.working_path)) def run_uname(self, params, shell): if not params: shell.writeline('Linux') return buff = '' info = [ 'Linux', self.hostname, '3.13.0-37-generic', '#64-Ubuntu SMP Mon Sep 22 21:30:01 UTC 2014', 'i686', 'i686', 'i686', 'GNU/Linux' ] parser = Parser(add_help=False) parser.add_argument('-a', '--all', default=False, action='store_true') parser.add_argument('-s', '--kernel-name', default=False, action='store_true') parser.add_argument('-n', '--nodename', default=False, action='store_true') parser.add_argument('-r', '--kernel-release', default=False, action='store_true') parser.add_argument('-v', '--kernel-version', default=False, action='store_true') parser.add_argument('-m', '--kernel-machine', default=False, action='store_true') parser.add_argument('-p', '--processor', default=False, action='store_true') parser.add_argument('-i', '--hardware-platform', default=False, action='store_true') parser.add_argument('-o', '--operating-system', default=False, action='store_true') parser.add_argument('--help', default=False, action='store_true') parser.add_argument('--version', default=False, action='store_true') try: args = parser.parse_args(params) except ParseError: shell.writeline('uname: invalid options -- \'{}\''.format( ' '.join(params))) shell.writeline('Try \'uname --help\' for more information.') return if args.all: buff = ' '.join(info) shell.writeline(buff) return if args.help: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'uname', 'help') self.send_data_from_file(help_file_path, shell) return if args.version: version_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'uname', 'version') self.send_data_from_file(version_file_path, shell) return if args.kernel_name: buff = buff + info[0] + ' ' if args.nodename: buff = buff + self.hostname + ' ' if args.kernel_release: buff = buff + info[2] + ' ' if args.kernel_version: buff = buff + info[3] + ' ' if args.kernel_machine: buff = buff + info[4] + ' ' if args.processor: buff = buff + info[4] + ' ' if args.hardware_platform: buff = buff + info[4] + ' ' if args.operating_system: buff += 'GNU/Linux' shell.writeline(buff) def _set_ip_from_previous_run(self, fs_dir, valid_ips): # pragma: no cover for dir_name in os.listdir(fs_dir): if dir_name.startswith(self.hostname + '_'): possible_ip = dir_name.split('_')[1] if possible_ip in valid_ips: self.ip_address = possible_ip logger.info('Assigned IP {} to host {}'.format( self.ip_address, self.hostname)) return True return False @staticmethod def send_data_from_file(path, shell): with open(path, 'r') as infile: for line in infile: line = line.strip() shell.writeline(line)
def test_export_course(self): module_store = modulestore('direct') draft_store = modulestore('draft') content_store = contentstore() import_from_xml(module_store, 'common/test/data/', ['full']) location = CourseDescriptor.id_to_location('edX/full/6.002_Spring_2012') # get a vertical (and components in it) to put into 'draft' vertical = module_store.get_item(Location(['i4x', 'edX', 'full', 'vertical', 'vertical_66', None]), depth=1) draft_store.clone_item(vertical.location, vertical.location) # We had a bug where orphaned draft nodes caused export to fail. This is here to cover that case. draft_store.clone_item(vertical.location, Location(['i4x', 'edX', 'full', 'vertical', 'no_references', 'draft'])) for child in vertical.get_children(): draft_store.clone_item(child.location, child.location) root_dir = path(mkdtemp_clean()) # now create a private vertical private_vertical = draft_store.clone_item(vertical.location, Location(['i4x', 'edX', 'full', 'vertical', 'a_private_vertical', None])) # add private to list of children sequential = module_store.get_item(Location(['i4x', 'edX', 'full', 'sequential', 'Administrivia_and_Circuit_Elements', None])) private_location_no_draft = private_vertical.location.replace(revision=None) module_store.update_children(sequential.location, sequential.children + [private_location_no_draft.url()]) # read back the sequential, to make sure we have a pointer to sequential = module_store.get_item(Location(['i4x', 'edX', 'full', 'sequential', 'Administrivia_and_Circuit_Elements', None])) self.assertIn(private_location_no_draft.url(), sequential.children) print 'Exporting to tempdir = {0}'.format(root_dir) # export out to a tempdir export_to_xml(module_store, content_store, location, root_dir, 'test_export', draft_modulestore=draft_store) # check for static tabs self.verify_content_existence(module_store, root_dir, location, 'tabs', 'static_tab', '.html') # check for custom_tags self.verify_content_existence(module_store, root_dir, location, 'info', 'course_info', '.html') # check for custom_tags self.verify_content_existence(module_store, root_dir, location, 'custom_tags', 'custom_tag_template') # check for about content self.verify_content_existence(module_store, root_dir, location, 'about', 'about', '.html') # check for graiding_policy.json filesystem = OSFS(root_dir / 'test_export/policies/6.002_Spring_2012') self.assertTrue(filesystem.exists('grading_policy.json')) course = module_store.get_item(location) # compare what's on disk compared to what we have in our course with filesystem.open('grading_policy.json', 'r') as grading_policy: on_disk = loads(grading_policy.read()) self.assertEqual(on_disk, course.grading_policy) #check for policy.json self.assertTrue(filesystem.exists('policy.json')) # compare what's on disk to what we have in the course module with filesystem.open('policy.json', 'r') as course_policy: on_disk = loads(course_policy.read()) self.assertIn('course/6.002_Spring_2012', on_disk) self.assertEqual(on_disk['course/6.002_Spring_2012'], own_metadata(course)) # remove old course delete_course(module_store, content_store, location) # reimport import_from_xml(module_store, root_dir, ['test_export'], draft_store=draft_store) items = module_store.get_items(Location(['i4x', 'edX', 'full', 'vertical', None])) self.assertGreater(len(items), 0) for descriptor in items: # don't try to look at private verticals. Right now we're running # the service in non-draft aware if getattr(descriptor, 'is_draft', False): print "Checking {0}....".format(descriptor.location.url()) resp = self.client.get(reverse('edit_unit', kwargs={'location': descriptor.location.url()})) self.assertEqual(resp.status_code, 200) # verify that we have the content in the draft store as well vertical = draft_store.get_item(Location(['i4x', 'edX', 'full', 'vertical', 'vertical_66', None]), depth=1) self.assertTrue(getattr(vertical, 'is_draft', False)) for child in vertical.get_children(): self.assertTrue(getattr(child, 'is_draft', False)) # make sure that we don't have a sequential that is in draft mode sequential = draft_store.get_item(Location(['i4x', 'edX', 'full', 'sequential', 'Administrivia_and_Circuit_Elements', None])) self.assertFalse(getattr(sequential, 'is_draft', False)) # verify that we have the private vertical test_private_vertical = draft_store.get_item(Location(['i4x', 'edX', 'full', 'vertical', 'vertical_66', None])) self.assertTrue(getattr(test_private_vertical, 'is_draft', False)) # make sure the textbook survived the export/import course = module_store.get_item(Location(['i4x', 'edX', 'full', 'course', '6.002_Spring_2012', None])) self.assertGreater(len(course.textbooks), 0) shutil.rmtree(root_dir)
from fs.errors import ResourceNotFoundError import settings as s from backend import fetch_repo, create_production, build from utils import load_repos root_fs = OSFS(s.root_path) build_fs = root_fs.makeopendir(s.build_path) build_fs.makedir(s.sources_path) build_fs.makedir(s.staging_path) build_fs.makedir(s.register_path) # create production directory if needed try: production_fs = OSFS(s.production_path) except ResourceNotFoundError: # grap production dir's parent dir path = s.production_path.split('/')[-2] print path production_fs = OSFS(s.production_path[:len(s.production_path) - (len(path) + 1)]).makeopendir(path) print production_fs if production_fs.exists(s.backups_path) == False: production_fs.makedir(s.backups_path) # fetch repos from GitHub for repo in load_repos(s.repos_path)[0].values(): print 'Fetching %s for the first time' % repo['id'] fetch_repo(root_fs, s.sources_path, repo['id'], repo['url'], s.build_path) build(s.sources_path, s.register_path, root_fs, repo['id']) create_production(s.register_path, s.backups_path, s.script_entry_path, s.production_path)