def main(): parser = argparse.ArgumentParser(description='Create free editor.slf') parser.add_argument('original', help="Original editor.slf") parser.add_argument( '-o', '--output', default='build/editor.slf', help="Where to store the created slf file" ) args = parser.parse_args() if not os.path.exists(os.path.dirname(args.output)): os.makedirs(os.path.dirname(args.output)) target_fs = BufferedSlfFS() replacement_fs = OSFS('editor') with open(args.original, 'rb') as source_file: source_fs = SlfFS(source_file) target_fs.library_name = source_fs.library_name target_fs.library_path = source_fs.library_path target_fs.version = source_fs.version target_fs.sort = source_fs.sort for directory in source_fs.walkdirs(): if directory == '/': continue target_fs.makedir(directory) for file in source_fs.walkfiles(): base_name, _ = os.path.splitext(file) with source_fs.open(file, 'rb') as source, target_fs.open(file, 'wb') as target: ja2_images = load_8bit_sti(source) replacement_path = base_name + '.gif' replacement_file_exists = replacement_fs.isfile(replacement_path) replacement_dir = file replacement_dir_exists = replacement_fs.isdir(replacement_dir) if len(ja2_images) == 1 and replacement_file_exists: print("Replacing {0} with {1}".format(file, replacement_path)) replacement_img = Image.open(replacement_fs.open(replacement_path, 'rb')) ja2_images._palette = replacement_img.palette ja2_images.images[0]._image = replacement_img elif len(ja2_images) > 1 and replacement_dir_exists: for i in range(len(ja2_images)): replacement_path = replacement_dir + '/{}.gif'.format(i) print("Replacing {0} with {1}".format(file, replacement_path)) replacement_img = Image.open(replacement_fs.open(replacement_path, 'rb')) ja2_images._palette = replacement_img.palette ja2_images.images[i]._image = replacement_img else: print("Replacing {0} with nothingness".format(file)) for sub_image in ja2_images.images: width, height = sub_image.image.size sub_image._image = Image.new('P', (width, height), color=54) save_8bit_sti(ja2_images, target) with open(args.output, 'wb') as target_file: target_fs.save(target_file)
def wrapper(self, *args): # Create dir first path_to_file = os.path.join( settings.PROFILE_IMAGE_BACKEND['options']['location'], unicode(self.location.course_key), self.location.block_id) if not os.path.exists(path_to_file): os.makedirs(path_to_file) # Uncompress content from assets later?? if self.scorm_zip_file: assets, _ = contentstore().get_all_content_for_course( self.location.course_key) zipCandidates = filter( lambda a: a.get('displayname') == self.scorm_zip_file, assets) if len(zipCandidates): zipScorm = zipCandidates[0] zFile = u'{}/{}'.format(path_to_file, self.scorm_zip_file) if not os.path.exists(zFile): try: with contentstore().fs.get(zipScorm.get('_id')) as fp: disk_fs = OSFS(path_to_file) with disk_fs.open(self.scorm_zip_file, 'wb') as asset_file: asset_file.write(fp.read()) except Exception as e: raise e if os.path.exists(zFile): zipfile.ZipFile(zFile, 'r').extractall(path_to_file) return fn(self, *args)
def get_yahoo_icon(): try: http = urllib3.PoolManager() fetch_data = http.request('GET', 'http://opi.yahoo.com/online?u=%s&m=t&t=1' % request.args[0]) if fetch_data.status == 200: from fs.osfs import OSFS file_server = OSFS('/home/www-data/web2py/applications/cbw/static/images') if len(request.args) != 1: raise HTTP(400) if fetch_data.data == "01": path = "icon-YMonline.png" else: path = "icon-YMoffline.png" if file_server.exists(path): response.headers['Content-Length'] = file_server.getinfo(path)['size'] response.headers['Content-Type'] = 'image/png' response.headers['Content-Disposition'] = "attachment; filename=yahoo.png" return response.stream(file_server.open(path=path, mode='rb')) else: raise HTTP(404) else: raise Exception(fetch_data.status) except: return None
def publish(self): super(PyFS, self).publish() deploy_fs = OSFS(self.site.config.deploy_root_path.path) for (dirnm, local_filenms) in deploy_fs.walk(): logger.info("Making directory: %s", dirnm) self.fs.makedir(dirnm, allow_recreate=True) remote_fileinfos = self.fs.listdirinfo(dirnm, files_only=True) # Process each local file, to see if it needs updating. for filenm in local_filenms: filepath = pathjoin(dirnm, filenm) # Try to find an existing remote file, to compare metadata. for (nm, info) in remote_fileinfos: if nm == filenm: break else: info = {} # Skip it if the etags match if self.check_etag and "etag" in info: with deploy_fs.open(filepath, "rb") as f: local_etag = self._calculate_etag(f) if info["etag"] == local_etag: logger.info("Skipping file [etag]: %s", filepath) continue # Skip it if the mtime is more recent remotely. if self.check_mtime and "modified_time" in info: local_mtime = deploy_fs.getinfo(filepath)["modified_time"] if info["modified_time"] > local_mtime: logger.info("Skipping file [mtime]: %s", filepath) continue # Upload it to the remote filesystem. logger.info("Uploading file: %s", filepath) with deploy_fs.open(filepath, "rb") as f: self.fs.setcontents(filepath, f) # Process each remote file, to see if it needs deleting. for (filenm, info) in remote_fileinfos: filepath = pathjoin(dirnm, filenm) if filenm not in local_filenms: logger.info("Removing file: %s", filepath) self.fs.remove(filepath)
def export(self, location, output_directory): content = self.find(location) if content.import_path is not None: output_directory = output_directory + '/' + os.path.dirname(content.import_path) if not os.path.exists(output_directory): os.makedirs(output_directory) disk_fs = OSFS(output_directory) with disk_fs.open(content.name, 'wb') as asset_file: asset_file.write(content.data)
def export(self, location, output_directory): content = self.find(location) filename = content.name if content.import_path is not None: output_directory = output_directory + '/' + os.path.dirname(content.import_path) if not os.path.exists(output_directory): os.makedirs(output_directory) # Escape invalid char from filename. export_name = escape_invalid_characters(name=filename, invalid_char_list=['/', '\\']) disk_fs = OSFS(output_directory) with disk_fs.open(export_name, 'wb') as asset_file: asset_file.write(content.data)
def export(course, export_dir): """Export the specified course to course_dir. Creates dir if it doesn't exist. Overwrites files, does not clean out dir beforehand. """ fs = OSFS(export_dir, create=True) if not fs.isdirempty("."): print ("WARNING: Directory {dir} not-empty." " May clobber/confuse things".format(dir=export_dir)) try: xml = course.export_to_xml(fs) with fs.open("course.xml", mode="w") as f: f.write(xml) return True except: print "Export failed!" traceback.print_exc() return False
def export(course, export_dir): """Export the specified course to course_dir. Creates dir if it doesn't exist. Overwrites files, does not clean out dir beforehand. """ fs = OSFS(export_dir, create=True) if not fs.isdirempty('.'): print ('WARNING: Directory {dir} not-empty.' ' May clobber/confuse things'.format(dir=export_dir)) try: xml = course.export_to_xml(fs) with fs.open('course.xml', mode='w') as f: f.write(xml) return True except: print 'Export failed!' traceback.print_exc() return False
def demo(): try: from fs.osfs import OSFS file_server = OSFS('/home/demo/') if len(request.args) != 1: raise HTTP(400) path = request.args[0] if file_server.exists(path): response.headers['Content-Length'] = file_server.getinfo( path)['size'] response.headers['Content-Type'] = 'application/octet-stream' response.headers[ 'Content-Disposition'] = "attachment; filename=%s" % path return response.stream(file_server.open(path=path, mode='rb')) else: raise HTTP(404) except: raise HTTP(200, "ERROR")
def export(course, export_dir): """ Export the specified course to course_dir. Creates dir if it doesn't exist. Overwrites files, does not clean out dir beforehand. """ fs = OSFS(export_dir, create=True) if not fs.isdirempty('.'): print(u'WARNING: Directory {dir} not-empty. May clobber/confuse things'.format(dir=export_dir)) try: course.runtime.export_fs = fs root = lxml.etree.Element('root') course.add_xml_to_node(root) with fs.open('course.xml', mode='w') as f: root.write(f) return True except: print('Export failed!') traceback.print_exc() return False
def export(course, export_dir): """Export the specified course to course_dir. Creates dir if it doesn't exist. Overwrites files, does not clean out dir beforehand. """ fs = OSFS(export_dir, create=True) if not fs.isdirempty('.'): print ('WARNING: Directory {dir} not-empty.' ' May clobber/confuse things'.format(dir=export_dir)) try: course.runtime.export_fs = fs root = lxml.etree.Element('root') course.add_xml_to_node(root) with fs.open('course.xml', mode='w') as f: root.write(f) return True except: print 'Export failed!' traceback.print_exc() return False
def download_url(config): folder, url = config _hash = hashlib.md5(url.encode("utf-8")).hexdigest() _hash = "%s.html" % _hash ofs = OSFS(folder) if ofs.exists(_hash): return try: resp = requests.get(url, timeout=10) except SSLError: try: resp = requests.get(url, verify=False) except: return except Exception: print("Error: %s" % url) return print("\t\t\tDownloaded... %s" % url) with ofs.open(_hash, "wb") as f: f.write(resp.content)
import sys from clint.textui import puts, indent, progress puts("Ren'Py setup") with indent(2): cwdfs = OSFS(".") tempfs = MemoryFS() if "renpy.zip" not in cwdfs.listdir("/"): puts("Downloading Ren'Py") r = requests.get( "https://www.renpy.org/dl/6.99.12.4/renpy-6.99.12.4-sdk.zip", stream=True) r.raise_for_status() with cwdfs.open("renpy.zip", 'wb') as fd: total_length = int(r.headers.get('content-length')) for chunk in progress.bar(r.iter_content(chunk_size=1024), expected_size=(total_length / 1024) + 1): fd.write(chunk) puts("Extracting Ren'Py") with ZipFS("./renpy.zip") as zipfs: fscopy.copy_dir(zipfs, "renpy-6.99.12.4-sdk", tempfs, "renpy") cwdfs.remove("renpy.zip") puts("ModTemplate setup") with indent(2): if "modtemplate.zip" not in cwdfs.listdir("/"): puts("Downloading ModTemplate")
class VirtualHost(object): """ Represents a single host. This class implements the commands that are host-specific, like pwd, ls, etc. """ def __init__(self, params, network, fs_dir): self.hostname = params['hostname'] self.ip_address = params['ip_address'] self.network = network self.env = params['env'] valid_ips = map(str, network[1:-1]) if self.ip_address is None: logger.error( 'IP address for {} is not specified in the config file (or is "null")' .format(self.hostname)) if not self._set_ip_from_previous_run(fs_dir, valid_ips): self.ip_address = get_random_item(valid_ips) logger.info('Assigned random IP {} to host {}'.format( self.ip_address, self.hostname)) else: if not self.ip_address in valid_ips: logger.error( 'IP Address {} for {} is not valid for the specified network' .format(params['ip_address'], self.hostname)) if not self._set_ip_from_previous_run(fs_dir, valid_ips): self.ip_address = get_random_item(valid_ips) logger.info('Assigned random IP {} to host {}'.format( self.ip_address, self.hostname)) self.valid_logins = params['valid_logins'] self.logged_in = False self.current_user = None if params.get('default', False): self.default = True else: self.default = False self.filesystem = OSFS(os.path.join( fs_dir, '{}_{}'.format(self.hostname, self.ip_address)), create=True) self.working_path = '/' def authenticate(self, username, password): if self.valid_logins.get(username, None) == password: return True return False def login(self, username): logger.debug('User "{}" has logged into "{}" host'.format( username, self.hostname)) self.logged_in = True self.current_user = username def logout(self): self.logged_in = False self.current_user = None @property def welcome(self): if self.filesystem.isfile('/etc/motd'): with self.filesystem.open('/etc/motd') as motd_file: return motd_file.read() else: return 'Welcome to {} server.'.format(self.hostname) @property def prompt(self): prompt = '{}@{}:{}$ '.format(self.current_user, self.hostname, self.working_path) return prompt def run_echo(self, params, shell): if not params: shell.writeline('') elif params[0].startswith('$') and len(params) == 1: var_name = params[0][1:] value = self.env.get(var_name, '') shell.writeline(value) elif '*' in params: params.remove('*') params.extend(self.filesystem.listdir()) shell.writeline(' '.join(params)) else: shell.writeline(' '.join(params)) def run_pwd(self, params, shell): if params: shell.writeline('pwd: too many arguments') else: shell.writeline('{}'.format(self.working_path)) def run_wget(self, params, shell): parser = Parser(add_help=False) parser.add_argument('-h', '--help', action='store_true', default=False) parser.add_argument('-V', '--version', action='store_true', default=False) parser.add_argument('-O', '--output-document') args, unparsed = parser.parse_known_args(params) if unparsed: url = unparsed[0] elif not args.help and not args.version: noparam_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'wget', 'no_param') self.send_data_from_file(noparam_file_path, shell) return if args.help: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'wget', 'help') self.send_data_from_file(help_file_path, shell) return if args.version: version_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'wget', 'version') self.send_data_from_file(version_file_path, shell) return wget_command = WgetCommand(url, self.working_path, self.filesystem, args, shell) wget_command.process() def run_ping(self, params, shell): options = [x for x in params if x.startswith('-')] if '-h' in options or len(params) == 0: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ping', 'help') self.send_data_from_file(help_file_path, shell) return filtered_params = [p for p in params if not p.startswith('-')] ping_host = filtered_params[-1] logger.debug('Going to ping {}'.format(ping_host)) ping_command = PingCommand(ping_host, shell) ping_command.process() def run_ifconfig(self, params, shell): if len(params) >= 2: shell.writeline('SIOCSIFFLAGS: Operation not permitted') return if params: parameter = params[0] if parameter == '--version': version_file_path = os.path.join( os.path.dirname(hornet.__file__), 'data', 'commands', 'ifconfig', 'version') self.send_data_from_file(version_file_path, shell) logger.debug( 'Sending version string for ifconfig from {} file'.format( version_file_path)) return elif parameter == '--help' or parameter == '-h': help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ifconfig', 'help') self.send_data_from_file(help_file_path, shell) logger.debug( 'Sending version string for ifconfig from {} file'.format( help_file_path)) return output_template_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ifconfig', 'output_template') ifconfig_command = IfconfigCommand(params, output_template_path, self.ip_address, self.network) output = ifconfig_command.process() shell.writeline(output) def run_ls(self, params, shell): paths = [] other_params = [] for p in params: if p.startswith('-'): other_params.append(p) else: paths.append(p) if not paths: # List contents of working dir by default paths.append(self.working_path) parser = Parser(add_help=False) parser.add_argument('-a', '--all', action='store_true', default=False) parser.add_argument('-A', '--almost-all', action='store_true', default=False) parser.add_argument('-d', '--directory', action='store_true', default=False) parser.add_argument('-l', action='store_true', default=False) # We ignore these (for now), but still parse them ;-) parser.add_argument('-h', '--human-readable', action='store_true', default=False) parser.add_argument('-b', '--escape', action='store_true', default=False) parser.add_argument('--block-size') parser.add_argument('-B', '--ignore-backups', action='store_true', default=False) parser.add_argument('-c', action='store_true', default=False) parser.add_argument('-C', action='store_true', default=False) parser.add_argument('--color') parser.add_argument('-D', '--dired', action='store_true', default=False) parser.add_argument('-f', action='store_true', default=False) parser.add_argument('-F', '--classify', action='store_true', default=False) parser.add_argument('--file-type', action='store_true', default=False) parser.add_argument('--format') parser.add_argument('--full-time', action='store_true', default=False) parser.add_argument('-g', action='store_true', default=False) parser.add_argument('--group-directories-first', action='store_true', default=False) parser.add_argument('-G', '--no-group', action='store_true', default=False) parser.add_argument('-H', '--dereference-command-line', action='store_true', default=False) parser.add_argument('--dereference-command-line-symlink-to-dir', action='store_true', default=False) parser.add_argument('--hide') parser.add_argument('--indicator-style') parser.add_argument('-i', '--inode', action='store_true', default=False) parser.add_argument('-I', '--ignore') parser.add_argument('-k', '--kibibytes', action='store_true', default=False) parser.add_argument('-L', '--deference', action='store_true', default=False) parser.add_argument('-m', action='store_true', default=False) parser.add_argument('-n', '--numeric-uid-gid', action='store_true', default=False) parser.add_argument('-N', '--literal', action='store_true', default=False) parser.add_argument('-o', action='store_true', default=False) parser.add_argument('-p', action='store_true', default=False) parser.add_argument('-q', '--hide-control-chars', action='store_true', default=False) parser.add_argument('--show-control-chars', action='store_true', default=False) parser.add_argument('-Q', '--quote-name', action='store_true', default=False) parser.add_argument('--quoting-style') parser.add_argument('-r', '--reverse', action='store_true', default=False) parser.add_argument('-R', '--recursive', action='store_true', default=False) parser.add_argument('-s', '--size', action='store_true', default=False) parser.add_argument('-S', action='store_true', default=False) parser.add_argument('--sort') parser.add_argument('--time') parser.add_argument('--time-style') parser.add_argument('-t', action='store_true', default=False) parser.add_argument('-T', '--tabsize', default=False) parser.add_argument('-u', action='store_true', default=False) parser.add_argument('-U', action='store_true', default=False) parser.add_argument('-v', action='store_true', default=False) parser.add_argument('-w', '--width') parser.add_argument('-x', action='store_true', default=False) parser.add_argument('-X', action='store_true', default=False) parser.add_argument('-1', dest='one_per_line', action='store_true', default=False) parser.add_argument('--help', action='store_true', default=False) parser.add_argument('--version', action='store_true', default=False) try: args = parser.parse_args(other_params) except ParseError: shell.writeline('ls: invalid options: \"{}\"'.format( ' '.join(params))) shell.writeline('Try \'ls --help\' for more information.') return if args.help: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ls', 'help') logger.debug( 'Sending help string from file {}'.format(help_file_path)) self.send_data_from_file(help_file_path, shell) return if args.version: version_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'ls', 'version') logger.debug('Sending version string from file {}'.format( version_file_path)) self.send_data_from_file(version_file_path, shell) return ls_cmd = LsCommand(args, paths, self.filesystem, self.working_path) output = ls_cmd.process() shell.writeline(output) def run_cd(self, params, shell): if len(params) == 0: params = ['/'] cd_path = os.path.join(self.working_path, params[0]) new_path_exists = False try: new_path_exists = self.filesystem.exists(cd_path) except BackReferenceError as e: logger.warn('Access to the external file system was attempted.') cd_path = '/' new_path_exists = True finally: if not new_path_exists: shell.writeline('cd: {}: No such file or directory'.format( params[0])) else: self.working_path = os.path.normpath(cd_path) logger.debug( 'Working directory for host {} changed to {}'.format( self.hostname, self.working_path)) def run_uname(self, params, shell): if not params: shell.writeline('Linux') return buff = '' info = [ 'Linux', self.hostname, '3.13.0-37-generic', '#64-Ubuntu SMP Mon Sep 22 21:30:01 UTC 2014', 'i686', 'i686', 'i686', 'GNU/Linux' ] parser = Parser(add_help=False) parser.add_argument('-a', '--all', default=False, action='store_true') parser.add_argument('-s', '--kernel-name', default=False, action='store_true') parser.add_argument('-n', '--nodename', default=False, action='store_true') parser.add_argument('-r', '--kernel-release', default=False, action='store_true') parser.add_argument('-v', '--kernel-version', default=False, action='store_true') parser.add_argument('-m', '--kernel-machine', default=False, action='store_true') parser.add_argument('-p', '--processor', default=False, action='store_true') parser.add_argument('-i', '--hardware-platform', default=False, action='store_true') parser.add_argument('-o', '--operating-system', default=False, action='store_true') parser.add_argument('--help', default=False, action='store_true') parser.add_argument('--version', default=False, action='store_true') try: args = parser.parse_args(params) except ParseError: shell.writeline('uname: invalid options -- \'{}\''.format( ' '.join(params))) shell.writeline('Try \'uname --help\' for more information.') return if args.all: buff = ' '.join(info) shell.writeline(buff) return if args.help: help_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'uname', 'help') self.send_data_from_file(help_file_path, shell) return if args.version: version_file_path = os.path.join(os.path.dirname(hornet.__file__), 'data', 'commands', 'uname', 'version') self.send_data_from_file(version_file_path, shell) return if args.kernel_name: buff = buff + info[0] + ' ' if args.nodename: buff = buff + self.hostname + ' ' if args.kernel_release: buff = buff + info[2] + ' ' if args.kernel_version: buff = buff + info[3] + ' ' if args.kernel_machine: buff = buff + info[4] + ' ' if args.processor: buff = buff + info[4] + ' ' if args.hardware_platform: buff = buff + info[4] + ' ' if args.operating_system: buff += 'GNU/Linux' shell.writeline(buff) def _set_ip_from_previous_run(self, fs_dir, valid_ips): # pragma: no cover for dir_name in os.listdir(fs_dir): if dir_name.startswith(self.hostname + '_'): possible_ip = dir_name.split('_')[1] if possible_ip in valid_ips: self.ip_address = possible_ip logger.info('Assigned IP {} to host {}'.format( self.ip_address, self.hostname)) return True return False @staticmethod def send_data_from_file(path, shell): with open(path, 'r') as infile: for line in infile: line = line.strip() shell.writeline(line)
def handle_rechannel(**kwargs): """ Handle rechannel actions. Args: **kwargs (dict): Arguments """ default_args = { 'input': None, 'output': None, 'prefix': None, 'map': None, 'num_threads': None, 'multithreading': 1 } default_args.update(kwargs) args = dict_to_namedtuple(default_args) # open output filesystem out_fs = assure_fs(args.output) # split map path dirname, basename = os.path.split(unicode(args.map)) try: map_fs = OSFS(dirname) if map_fs.isfile(basename): with map_fs.open(basename) as file_handle: try: layer_map = json.loads(file_handle.read()) except Exception as error: console.error(error) return else: console.error('Map {} does not exist.'.format(args.map)) return except CreateFailed: console.error('Map parent directory {} does not exist.'.format(args.map)) return # split input path dirname, basename = os.path.split(unicode(args.input)) # open input filesystem try: in_fs = OSFS(dirname) if in_fs.isfile(basename): # prepend prefix to basename if args.prefix: basename = args.prefix + basename rechannel_file(in_fs.getsyspath(basename), out_fs.getsyspath(basename), layer_map) elif in_fs.isdir(basename): rechannel_dir(in_fs.opendir(basename), out_fs, layer_map, args.num_threads, bool(args.multithreading), prefix=args.prefix) except CreateFailed: console.error('Input {} does not exist.'.format(args.input)) return
class Timeline(object): """A timeline is a sequence of timestamped events.""" def __init__(self, path, name, max_events=None): self.path = path self.name = name self.fs = OSFS(path, create=True) self.max_events = max_events def __repr__(self): return "Timeline({!r}, {!r}, max_events={!r})".format( self.path, self.name, self.max_events) def new_event(self, event_type, timestamp=None, *args, **kwargs): """Create and return an event, to be used as a context manager""" if self.max_events is not None: size = len(self.fs.listdir(wildcard="*.json")) if size >= self.max_events: raise TimelineFullError( "The timeline has reached its maximum size") if timestamp is None: timestamp = int(time() * 1000.0) try: event_cls = _event_registry[event_type] except KeyError: raise UnknownEventError("No event type '{}'".format(event_type)) # Make an event id that we can be confident it's unique token = str(randint(0, 2**31)) event_id = "{}_{}_{}".format(event_type, timestamp, token) event = event_cls(self, event_id, timestamp, *args, **kwargs) log.debug('new event {!r}'.format(event)) return event def new_photo(self, file, filename=None, ext=None, **kwargs): """Create a new photo object""" event = self.new_event('IMAGE', **kwargs) if hasattr(file, 'getvalue'): bytes = file.getvalue() elif file is not None: if isinstance(file, basestring): with open(file, 'rb') as f: bytes = f.read() else: bytes = file.read() else: if bytes is None: raise ValueError("A value for 'file' or 'bytes' is required") event.attach_bytes(bytes, name='photo', filename=filename, ext=ext) return event def get_events(self, sort=True): """Get all accumulated events""" events = [] for event_filename in self.fs.listdir(wildcard="*.json"): with self.fs.open(event_filename, 'rb') as f: event = loads(f.read()) events.append(event) if sort: # sort by timestamp events.sort(key=itemgetter('timestamp')) return events def clear_all(self): """Clear all stored events""" for filename in self.fs.listdir(wildcard="*.json"): try: self.fs.remove(filename) except FSError: pass def clear_events(self, event_ids): """Clear any events that have been processed""" for event_id in event_ids: filename = "{}.json".format(event_id) try: self.fs.remove(filename) except FSError: pass def _write_event(self, event_id, event): if hasattr(event, 'to_data'): event = event.to_data() event['event_id'] = event_id event_json = dumps(event, indent=4) filename = "{}.json".format(event_id) with self.fs.open(filename, 'wb') as f: f.write(event_json)
def cmd_render(self, *params, **options): icon_sizes = ','.join(str(s) for s in sorted(settings.DESKTOP_FORCE_ICON_SIZES)) num_rendered = 0 from linkstop.threadpool import ThreadPool thread_pool = ThreadPool(3, 6) try: max_renders = int(params[0]) except IndexError: max_renders = None qs = FavIcon.objects.filter(rendered=False).order_by('pk') media_fs = OSFS(settings.MEDIA_ROOT) media_fs.makedir('favicons', allow_recreate=True) try: for favicon in qs: original_sizes = favicon.get_original_sizes() if not original_sizes: continue remaining_sizes = sorted(set(settings.DESKTOP_FORCE_ICON_SIZES).difference(favicon.get_sizes())) for size in remaining_sizes: print "Rendering %ix%i icon" % (size, size) image_path = os.path.join( settings.MEDIA_ROOT, url_to_path(favicon.url), 'icon%i.png' % original_sizes[-1] ) output_path = get_size_path(favicon.url, size) thread_pool.job( render, (size, size), image_path, output_path, settings.FAVICON_POV_SCENE ) favicon.sizes = icon_sizes favicon.rendered = True favicon.save() #favicon_path = url_to_path(favicon.url) #favicon_fs = media_fs.makeopendir(favicon_path, recursive=True) favicon_fs = OSFS(get_icon_directory(favicon.url), create=True) favicon.export(favicon_fs.open('scan.pik', 'w')) #pickle_path = favicon_fs.getsyspath('scan.pik') num_rendered += 1 if max_renders is not None and num_rendered >= max_renders: break finally: thread_pool.flush_quit() print "%i icon sets rendered" % num_rendered
class QualityStandard: """Stores information about a quality standard.""" def __init__(self, resource_root_dir: types_path_like): """Create a ``QualityStandard`` instance. Parameters ---------- resource_root_dir : The path to the resource root directory of the standard """ from fs.osfs import OSFS self._name = None self._max_version = None self._versions = {} if isinstance(resource_root_dir, Path): resource_root_dir = resource_root_dir.as_posix() if isinstance(resource_root_dir, str): self._filesystem = OSFS(resource_root_dir) else: self._filesystem = resource_root_dir manifest_dir = self._filesystem.opendir("manifests") manifest_files = [ file.name for file in self._filesystem.filterdir( "manifests", ["*.yml", "*.yaml"]) ] for filename in manifest_files: # stem of pyfilesystem cuts after first . qs_name, version = split_tag_version( filename[:filename.rindex(".")]) if self._name is None: self._name = qs_name self._max_version = version else: if qs_name != self._name: raise ValueError("Inconsistent naming of manifest files") if self._max_version < version: self._max_version = version with manifest_dir.open(filename, "r") as stream: content = yaml.load(stream, Loader=yaml.SafeLoader) self._versions[version] = { "manifest_file_mapping": { content["id"]: filename }, "schema_file_mapping": { mapping["uri"]: (f"{mapping['file']}.yaml") for mapping in content["tags"] }, } def _map_file_content(self, file_mapping: dict, directory: str, version: AsdfVersion) -> ResourceMappingProxy: """Get a mapping between an URI and a file content. Parameters ---------- file_mapping : Dict A dictionary containing the mapping between URI and the file path directory: Directory that contains the files. This is either 'schemas' or 'mappings' version : AsdfVersion The version of the standard. Returns ------- ResourceMappingProxy : Mapping between an URI and a file content """ content_mapping = { uri: self._filesystem.open(f"{directory}/{filename}").read() for uri, filename in file_mapping.items() } return ResourceMappingProxy(content_mapping, package_name=self._name, package_version=version) @property def name(self) -> str: """Get the quality standards name.""" return self._name def get_mappings(self, version: Union[AsdfVersion, str] = None): """Get the manifest and schema mapping for the specified version. Parameters ---------- version : Union[AsdfVersion, str] Requested standard version. If `None` is provided, the latest will be used. Returns ------- ResourceMappingProxy : Manifest mapping ResourceMappingProxy : Schema mapping """ if version is None: version = self._max_version elif not isinstance(version, AsdfVersion): version = AsdfVersion(version) file_mappings = self._versions[version] manifest_mapping = self._map_file_content( file_mappings["manifest_file_mapping"], "manifests", version) schema_mapping = self._map_file_content( file_mappings["schema_file_mapping"], "schemas", version) return manifest_mapping, schema_mapping
def handle_arguments(): """ Commandline arguments entrypoint. """ # default layer map layer_map = { '^(?P<layer>r)$': 'R', '^(?P<layer>g)$': 'G', '^(?P<layer>b)$': 'B', '^(?P<layer>a)$': 'A', '(?P<layer>diffuse)\.(?P<channel>\S+)': 'diffuse' } # argument parser parser = ThrowingArgumentParser(description='Commandline tool for processing .exr files. Create previews or rename layers and channels.', prog='exrio') subparsers = parser.add_subparsers(help='Submodule commands.', dest='module') # create rechannel subparser rechannel_parser = subparsers.add_parser('rechannel', help='Rename layers and channels in EXR files and directories containing EXR files.') # example rechannel_parser.add_argument('--example', action='store_true', help='Create example map at current working directory.') # extra step to check if an example map should be created try: args = parser.parse_args() if args.module == 'rechannel': if args.example: cwd_fs = OSFS(u'.') with cwd_fs.open(u'example.json', mode='w') as file_handle: file_handle.write(unicode(json.dumps(layer_map, indent=2))) return except ArgumentParserError as error: pass apply_input_output_arguments(rechannel_parser) # layer map argument rechannel_parser.add_argument('map', type=str, help='Path to a JSON file containing the layers to rename. Use regular expression to find the name and replace it with a new name. Example: {}'.format(json.dumps(layer_map))) apply_multiprocessing_arguments(rechannel_parser) # create preview subparser preview_parser = subparsers.add_parser('preview', help='Create previews for EXR files and directories containing EXR files.') apply_input_output_arguments(preview_parser) apply_multiprocessing_arguments(preview_parser) # layer preview_parser.add_argument('--layer', type=str, nargs='+', help='Select layer to preview (default=rgb).') # create inspect subparser inspect_parser = subparsers.add_parser('inspect', help='Inspect EXR files or a directory containing EXR files.') # input path argument inspect_parser.add_argument('input', type=str, help='Path to an EXR file or a directory containing EXR files.') try: args = parser.parse_args() except ArgumentParserError as error: console.error(error.message) parser.print_help() return except Exception as error: console.error(error.message) return if args.module == 'rechannel': handle_rechannel(**vars(args)) elif args.module == 'preview': handle_preview(**vars(args)) elif args.module == 'inspect': handle_inspect(**vars(args))
from fs.osfs import OSFS import Image img_fs = OSFS(sys.argv[1]) imgs = [] for path in img_fs.listdir(wildcard='*.png'): img = Image.open(img_fs.getsyspath(path)) size = img.size[0] if size != 16: continue imgs.append((path, img)) sprite = Image.new('RGBA', (16, len(imgs)*16)) imgs.sort(key=lambda i:i[0]) sprite_text_f = img_fs.open('sprites.txt', 'wt') for i, (path, img) in enumerate(imgs): y = i*16 sprite.paste(img, (0, y)) sprite_text_f.write( "%i\t%s\n" % (y, path)) sprite.save(img_fs.getsyspath('sprites.png')) sprite_text_f.close()
def main(): parser = argparse.ArgumentParser(description='Create free editor.slf') parser.add_argument('--original', help="Original editor.slf") parser.add_argument('-o', '--output', default='build/editor.slf', help="Where to store the created slf file") parser.add_argument('--name', help="Library name") args = parser.parse_args() if not os.path.exists(os.path.dirname(args.output)): os.makedirs(os.path.dirname(args.output)) if args.original is None: target_fs = create_free_editorslf(args.name) with open(args.output, 'wb') as target_file: target_fs.save(target_file) generate_md5_file(args.output) return # create editor.slf by replacing images in the original editor.slf target_fs = BufferedSlfFS() replacement_fs = OSFS('editor') with open(args.original, 'rb') as source_file: source_fs = SlfFS(source_file) target_fs.library_name = args.name or source_fs.library_name target_fs.library_path = source_fs.library_path target_fs.version = source_fs.version target_fs.sort = source_fs.sort for directory in source_fs.walkdirs(): if directory == '/': continue target_fs.makedir(directory) for file in source_fs.walkfiles(): base_name, _ = os.path.splitext(file) with source_fs.open(file, 'rb') as source, target_fs.open( file, 'wb') as target: ja2_images = load_8bit_sti(source) replacement_path = base_name + '.gif' replacement_file_exists = replacement_fs.isfile( replacement_path) replacement_dir = file replacement_dir_exists = replacement_fs.isdir(replacement_dir) if len(ja2_images) == 1 and replacement_file_exists: print("Replacing {0} with {1}".format( file, replacement_path)) replacement_img = Image.open( replacement_fs.open(replacement_path, 'rb')) ja2_images._palette = replacement_img.palette ja2_images.images[0]._image = replacement_img elif len(ja2_images) > 1 and replacement_dir_exists: for i in range(len(ja2_images)): replacement_path = replacement_dir + '/{}.gif'.format( i) print("Replacing {0} with {1}".format( file, replacement_path)) replacement_img = Image.open( replacement_fs.open(replacement_path, 'rb')) ja2_images._palette = replacement_img.palette ja2_images.images[i]._image = replacement_img else: print("Replacing {0} with nothingness".format(file)) for sub_image in ja2_images.images: width, height = sub_image.image.size sub_image._image = Image.new('P', (width, height), color=54) save_8bit_sti(ja2_images, target) with open(args.output, 'wb') as target_file: target_fs.save(target_file) generate_md5_file(args.output)
def export_document(self, document_id, user_id, template_id=None, format='docx', settings={}, output_filename=None, mode='final'): """ Export document :param documen_id: Smashdocs document id to be exported :param user_id: user id of the Smashdocs user performing the export :param template_id: template UID of a word template (mandatory if format='docx') :param format: docx|html|sdxml|parsx :param settings: DOCX specific export settings (https://documentation.smashdocs.net/api_guide.html#exporting-documents-to-word) :param mode: final|news|allInOne|redlineAndPending """ check_uuid(document_id) if format not in ('docx', 'html', 'sdxml', 'parsx'): raise ValueError('"format" must be sdxml|html|docx|parsx') if format == 'html': if mode not in ('final', 'news', 'allInOne', 'redlineAndPending'): raise ValueError( '"mode" must be final|news|allInOne|redlineAndPending') headers = { 'x-client-id': self.client_id, 'content-type': 'application/json', 'authorization': 'Bearer ' + self.get_token(), } data = { 'userId': user_id, } if format == 'docx': url = self.partner_url + \ '/partner/documents/{0}/export/word'.format(document_id) data['templateId'] = template_id data['settings'] = settings elif format == 'sdxml': url = self.partner_url + \ '/partner/documents/{0}/export/sdxml'.format(document_id) elif format == 'html': data['mode'] = mode url = self.partner_url + \ '/partner/documents/{0}/export/html'.format(document_id) elif format == 'parsx': url = self.partner_url + \ '/partner/documents/{0}/export/parsx'.format(document_id) else: raise ValueError(u'Unsupported format: {}'.format(format)) result = requests.post(url, headers=headers, data=json.dumps(data), verify=VERIFY) if result.status_code != 200: msg = u'Export error (HTTP {0}, {1})'.format( result.status_code, result.content) raise ExportError(msg, result) self.check_response(result) suffix = format if format in ('html', 'sdxml', 'parsx'): suffix = 'zip' if format == 'sdxml': tmp_fn = tempfile.mktemp(suffix='.zip') with open(tmp_fn, 'wb') as fp: fp.write(result.content) zf = zipfile.ZipFile(tmp_fn) sdxml = zf.read('sd.xml') zf.close() os.unlink(tmp_fn) self.validate_sdxml(sdxml) if not output_filename: output_filename = tempfile.mktemp(suffix='.' + suffix) if isinstance(output_filename, str): full_filename = os.path.abspath(output_filename) dirname, fn = os.path.split(full_filename) dirname = safe_unicode(dirname) fn = safe_unicode(fn) handle = OSFS(dirname) elif isinstance(output_filename, tuple): handle, fn = output_filename with handle.open(fn, 'wb') as fp: fp.write(result.content) return output_filename
def test_export_course(self): module_store = modulestore('direct') draft_store = modulestore('draft') content_store = contentstore() import_from_xml(module_store, 'common/test/data/', ['full']) location = CourseDescriptor.id_to_location('edX/full/6.002_Spring_2012') # get a vertical (and components in it) to put into 'draft' vertical = module_store.get_item(Location(['i4x', 'edX', 'full', 'vertical', 'vertical_66', None]), depth=1) draft_store.clone_item(vertical.location, vertical.location) # We had a bug where orphaned draft nodes caused export to fail. This is here to cover that case. draft_store.clone_item(vertical.location, Location(['i4x', 'edX', 'full', 'vertical', 'no_references', 'draft'])) for child in vertical.get_children(): draft_store.clone_item(child.location, child.location) root_dir = path(mkdtemp_clean()) # now create a private vertical private_vertical = draft_store.clone_item(vertical.location, Location(['i4x', 'edX', 'full', 'vertical', 'a_private_vertical', None])) # add private to list of children sequential = module_store.get_item(Location(['i4x', 'edX', 'full', 'sequential', 'Administrivia_and_Circuit_Elements', None])) private_location_no_draft = private_vertical.location.replace(revision=None) module_store.update_children(sequential.location, sequential.children + [private_location_no_draft.url()]) # read back the sequential, to make sure we have a pointer to sequential = module_store.get_item(Location(['i4x', 'edX', 'full', 'sequential', 'Administrivia_and_Circuit_Elements', None])) self.assertIn(private_location_no_draft.url(), sequential.children) print 'Exporting to tempdir = {0}'.format(root_dir) # export out to a tempdir export_to_xml(module_store, content_store, location, root_dir, 'test_export', draft_modulestore=draft_store) # check for static tabs self.verify_content_existence(module_store, root_dir, location, 'tabs', 'static_tab', '.html') # check for custom_tags self.verify_content_existence(module_store, root_dir, location, 'info', 'course_info', '.html') # check for custom_tags self.verify_content_existence(module_store, root_dir, location, 'custom_tags', 'custom_tag_template') # check for about content self.verify_content_existence(module_store, root_dir, location, 'about', 'about', '.html') # check for graiding_policy.json filesystem = OSFS(root_dir / 'test_export/policies/6.002_Spring_2012') self.assertTrue(filesystem.exists('grading_policy.json')) course = module_store.get_item(location) # compare what's on disk compared to what we have in our course with filesystem.open('grading_policy.json', 'r') as grading_policy: on_disk = loads(grading_policy.read()) self.assertEqual(on_disk, course.grading_policy) #check for policy.json self.assertTrue(filesystem.exists('policy.json')) # compare what's on disk to what we have in the course module with filesystem.open('policy.json', 'r') as course_policy: on_disk = loads(course_policy.read()) self.assertIn('course/6.002_Spring_2012', on_disk) self.assertEqual(on_disk['course/6.002_Spring_2012'], own_metadata(course)) # remove old course delete_course(module_store, content_store, location) # reimport import_from_xml(module_store, root_dir, ['test_export'], draft_store=draft_store) items = module_store.get_items(Location(['i4x', 'edX', 'full', 'vertical', None])) self.assertGreater(len(items), 0) for descriptor in items: # don't try to look at private verticals. Right now we're running # the service in non-draft aware if getattr(descriptor, 'is_draft', False): print "Checking {0}....".format(descriptor.location.url()) resp = self.client.get(reverse('edit_unit', kwargs={'location': descriptor.location.url()})) self.assertEqual(resp.status_code, 200) # verify that we have the content in the draft store as well vertical = draft_store.get_item(Location(['i4x', 'edX', 'full', 'vertical', 'vertical_66', None]), depth=1) self.assertTrue(getattr(vertical, 'is_draft', False)) for child in vertical.get_children(): self.assertTrue(getattr(child, 'is_draft', False)) # make sure that we don't have a sequential that is in draft mode sequential = draft_store.get_item(Location(['i4x', 'edX', 'full', 'sequential', 'Administrivia_and_Circuit_Elements', None])) self.assertFalse(getattr(sequential, 'is_draft', False)) # verify that we have the private vertical test_private_vertical = draft_store.get_item(Location(['i4x', 'edX', 'full', 'vertical', 'vertical_66', None])) self.assertTrue(getattr(test_private_vertical, 'is_draft', False)) # make sure the textbook survived the export/import course = module_store.get_item(Location(['i4x', 'edX', 'full', 'course', '6.002_Spring_2012', None])) self.assertGreater(len(course.textbooks), 0) shutil.rmtree(root_dir)
class Timeline(object): """A timeline is a sequence of timestamped events.""" def __init__(self, path, name, max_events=None): self.path = path self.name = name self.fs = OSFS(path, create=True) self.max_events = max_events def __repr__(self): return "Timeline({!r}, {!r}, max_events={!r})".format(self.path, self.name, self.max_events) def new_event(self, event_type, timestamp=None, *args, **kwargs): """Create and return an event, to be used as a context manager""" if self.max_events is not None: size = len(self.fs.listdir(wildcard="*.json")) if size >= self.max_events: raise TimelineFullError("The timeline has reached its maximum size") if timestamp is None: timestamp = int(time() * 1000.0) try: event_cls = _event_registry[event_type] except KeyError: raise UnknownEventError("No event type '{}'".format(event_type)) # Make an event id that we can be confident it's unique token = str(randint(0, 2 ** 31)) event_id = kwargs.pop('event_id', None) or "{}_{}_{}".format(event_type, timestamp, token) event = event_cls(self, event_id, timestamp, *args, **kwargs) log.debug('new event {!r}'.format(event)) return event def new_photo(self, file, filename=None, ext=None, **kwargs): """Create a new photo object""" event = self.new_event('IMAGE', **kwargs) if hasattr(file, 'getvalue'): bytes = file.getvalue() elif file is not None: if isinstance(file, text_type): with open(file, 'rb') as f: bytes = f.read() else: bytes = file.read() else: if bytes is None: raise ValueError("A value for 'file' or 'bytes' is required") event.attach_bytes(bytes, name='photo', filename=filename, ext=ext) return event def get_events(self, sort=True): """Get all accumulated events""" events = [] for event_filename in self.fs.listdir(wildcard="*.json"): with self.fs.open(event_filename, 'rb') as f: event = loads(f.read().decode('utf-8')) events.append(event) if sort: # sort by timestamp events.sort(key=itemgetter('timestamp')) return events def clear_all(self): """Clear all stored events""" for filename in self.fs.listdir(wildcard="*.json"): try: self.fs.remove(filename) except FSError: pass def clear_events(self, event_ids): """Clear any events that have been processed""" for event_id in event_ids: filename = "{}.json".format(event_id) try: self.fs.remove(filename) except FSError: pass def _write_event(self, event_id, event): if hasattr(event, 'to_data'): event = event.to_data() event['event_id'] = event_id event_json = dumps(event, indent=4).encode('utf-8') filename = "{}.json".format(event_id) with self.fs.open(filename, 'wb') as f: f.write(event_json)
def upload_document(self, filename, title=None, description=None, role=None, user_data=None, status='draft'): """ Upload DOCX document :param filename: DOCX filename :param title: title of document :param description: description of document :param role: Smashdoch role: editor|reader|approver|commentator :param user_data: dict with user data :param status: create new document in draft|review mode :rtype: Smashdocs return datastructure (see Partner API docs for details) """ check_title(title) check_description(description) check_role(role) check_status(status) check_user_data(user_data) if isinstance(filename, str): full_filename = os.path.abspath(filename) dirname, fn = os.path.split(full_filename) dirname = safe_unicode(dirname) fn = safe_unicode(fn) handle = OSFS(dirname) elif isinstance(filename, tuple): handle, fn = filename headers = { 'x-client-id': self.client_id, 'authorization': 'Bearer ' + self.get_token() } data = { 'user': user_data, 'title': safe_unicode(title), 'description': safe_unicode(description), 'groupId': self.group_id, 'userRole': role, 'status': status, 'sectionHistory': True } base, ext = os.path.splitext(fn) suffix = 'docx' if ext.lower() == '.docx' else 'zip' endpoint = 'word' if ext.lower() == '.docx' else 'sdxml' if endpoint == 'sdxml' and filename.endswith('.zip'): zf = zipfile.ZipFile(filename) sdxml = zf.read('sd.xml') zf.close() self.validate_sdxml(sdxml) with handle.open(fn, 'rb') as fp: files = { 'data': (None, json.dumps(data), 'application/json'), 'file': ('dummy.{}'.format(suffix), fp, 'application/octet-stream'), } result = requests.post( self.partner_url + '/partner/imports/{0}/upload'.format(endpoint), headers=headers, files=files, verify=VERIFY) if result.status_code != 200: msg = u'Upload error (HTTP {0}, {1}'.format( result.status_code, result.content) raise UploadError(msg, result) self.check_response(result) return result.json()