def merge(source_files, output_filename="default.json", output_metrics=True): """Utility function to merge JSON assets.""" LOG.info("%i assets -> %s", len(source_files), output_filename) merged = { } for i, f in enumerate(source_files): LOG.info("Processing:%03i:%s", i + 1, f) try: with open(f, 'r') as source: j = json_load(source) if isinstance(j, dict): merged = merge_dictionaries(j, merged) else: merged = j except IOError as e: LOG.error("Failed processing: %s", f) LOG.error(' >> %s', e) try: with open(output_filename, 'w') as target: LOG.info("Writing:%s", output_filename) json_encoder.FLOAT_REPR = float_to_string json_dump(merged, target, sort_keys=True, separators=(',', ':')) except IOError as e: LOG.error('Failed processing: %s', output_filename) LOG.error(' >> %s', e) else: if output_metrics: log_metrics(merged)
def write_to_file(options, data, filename=None, output_path=None, force_overwrite=False): if not filename: filename = "%s-%s-%s.json" % (options.project, options.type, options.daterange.filename_str()) try: if not output_path: output_path = normpath(path_join(options.outputdir, filename)) if path_exists(output_path): if options.overwrite or force_overwrite: if not options.silent: warning("Overwriting existing file: %s" % output_path) elif not options.silent: warning("Skipping existing file: %s" % output_path) return indentation = None if options.indent: indentation = 4 if isinstance(data, str): data = json_loads(data) with open(output_path, "wb") as fout: if isinstance(data, str): fout.write(data) else: json_dump(data, fout, indent=indentation) if options.verbose: log("Finished writing to: %s" % output_path) except (IOError, OSError) as e: error(e) exit(-1)
def json_to_file(self, target, sort=True, indent=0): """Convert the asset to JSON and write it to the file stream.""" json_encoder.FLOAT_REPR = float_to_string if indent > 0: return json_dump(self.asset, target, sort_keys=sort, indent=indent) else: return json_dump(self.asset, target, sort_keys=sort, separators=(',', ':'))
def write_metadata_cache(self, meta_data, force_mtime): try: file_path = self.get_meta_data_path() gzip_file = GzipFile(filename=file_path, mode='wb', compresslevel=9) json_dump(meta_data, gzip_file, separators=(',', ':'), sort_keys=True) gzip_file.close() if force_mtime > 0: _update_file_mtime(file_path, force_mtime) except (IOError, OSError): pass
def write_to_file(options, data, filename=None, output_path=None, force_overwrite=False): if not filename: start_date = options.daterange[0] end_date = options.daterange[-1] filename = '%s-%s-%s' % (options.project, options.datatype, start_date) if start_date != end_date: filename += '_-_' + end_date filename += '.json' try: if not output_path: output_path = normpath(path_join(options.outputdir, filename)) if path_exists(output_path): if options.overwrite or force_overwrite: if not options.silent: warning('Overwriting existing file: %s' % output_path) elif not options.silent: warning('Skipping existing file: %s' % output_path) return indentation = None if options.indent: indentation = 4 if isinstance(data, str): data = json_loads(data) with open(output_path, 'wb') as fout: if isinstance(data, str): fout.write(data) else: json_dump(data, fout, indent=indentation) if options.verbose: log('Finished writing to: %s' % output_path) except (IOError, OSError) as e: error(e) exit(-1)
def save_hashes(self, hashes): try: hashes_folder = join(self.cache_dir, self._cached_hash_folder) try: makedirs(hashes_folder) except OSError as e: if e.errno != EEXIST: LOG.error(str(e)) return # Load existing cache and only save the delta for file_path in iglob(join(hashes_folder, '*.json')): try: file_obj = open(file_path, 'rb') hashes_meta = json_load(file_obj) file_obj.close() hashes_host = hashes_meta['host'] if hashes_host == self.hub_pool.host: hashes.difference_update(hashes_meta['hashes']) except (IOError, TypeError, ValueError, KeyError, AttributeError): pass if hashes: try: file_path = join(hashes_folder, '%d.json' % long(time())) file_obj = open(file_path, 'wb') hashes_meta = { 'version': 2, 'host': self.hub_pool.host, 'hashes': list(hashes) } json_dump(hashes_meta, file_obj, separators=(',', ':')) file_obj.close() except IOError: pass # pylint: disable=W0703 except Exception as e: LOG.error(str(e))
def save_hashes(self, hashes): try: hashes_folder = join(self.cache_dir, self._cached_hash_folder) try: makedirs(hashes_folder) except OSError as e: if e.errno != EEXIST: LOG.error(str(e)) return # Load existing cache and only save the delta for file_path in iglob(join(hashes_folder, '*.json')): try: file_obj = open(file_path, 'rb') hashes_meta = json_load(file_obj) file_obj.close() hashes_host = hashes_meta['host'] if hashes_host == self.hub_pool.host: hashes.difference_update(hashes_meta['hashes']) except (IOError, TypeError, ValueError, KeyError, AttributeError): pass if hashes: try: file_path = join(hashes_folder, '%d.json' % long(time())) file_obj = open(file_path, 'wb') hashes_meta = {'version': 2, 'host': self.hub_pool.host, 'hashes': list(hashes)} json_dump(hashes_meta, file_obj, separators=(',', ':')) file_obj.close() except IOError: pass # pylint: disable=W0703 except Exception as e: LOG.error(str(e))
def _write_mapping_table(): print '%i assets -> %s' % (len(urn_mapping), env['MAPPING_TABLE']) with open(env['APP_MAPPING_TABLE'], 'w') as f: json_dump(mapping_table_obj, f, separators=(',', ':'))
def main(): result = 0 env = {} templates = ['app'] shaders = [] # Ignore temporarily ['draw2D'] parser = OptionParser() parser.add_option('--clean', action='store_true', \ default=False, \ help="Only builds") parser.add_option('--clean-only', action='store_true', \ default=False, \ help="Only cleans") parser.add_option('--code-only', action='store_true', default=False, help="Build only the game code") parser.add_option('--find-non-ascii', action='store_true', default=False, help="Searches for non ascii characters in the scripts") parser.add_option('--development', action='store_true', \ help="Only builds the development build") parser.add_option('--verbose', action='store_true', \ help="Prints additional information about the build process") (options, args) = parser.parse_args() if not configure(env, options): result = 1 print 'Failed to configure build' return result if options.find_non_ascii: result = find_non_ascii(env['APP_SCRIPTS'], env, options) if result != 0: print "Found non-ascii character in script" else: print "Only ASCII found!" return result # Clean only if options.clean_only: result = clean(env, options) if result != 0: print 'Failed to clean build' else: print 'Cleaned' return result # Clean build first if options.clean: result = clean(env, options) if result != 0: print 'Failed to clean build' return result print 'Cleaned' # Asset build if len(args) > 0: files = args else: if not options.code_only: print "" print "----------------------------------------------------------" print " ASSET BUILD (may be slow - disable with --code-only)" print "----------------------------------------------------------" print "" # Mapping table if not os.path.exists('staticmax'): os.makedirs('staticmax') (mapping_table_obj, build_deps) = gen_mapping('assets', 'staticmax') # Write mapping table with open('mapping_table.json', 'wb') as f: json_dump(mapping_table_obj, f, separators=(',', ':')) # Build all asset files # print "Deps: %s" % build_deps for src in build_deps: dest = build_deps[src] print "Building %s -> %s" % (src, dest) result = do_build(src, dest, env, options) if result: print "Build failed" exit(1) # Code print "" print "----------------------------------------------------------" print " CODE BUILD" print "----------------------------------------------------------" print "" code_files = glob.glob('templates/*.js') # print "CODE FILES: %s" % code_files for f in code_files: print " APP: %s" % f (code_base, code_ext) = os.path.splitext(os.path.split(f)[1]) code_dests = [ code_base + ".canvas.debug.html", code_base + ".canvas.release.html", code_base + ".canvas.js", code_base + ".debug.html", code_base + ".release.html", code_base + ".tzjs" ] # print " CODE:FILES: %s" % code_dests for dest in code_dests: do_build_code(dest, env, options) print "DONE" exit(0) # files = [] # for s in shaders: # files.append('%s.cgfx' % s) # result = build(files, env, options) # if result == 0: # print 'Built Assets' # else: # print 'Failed to build assets' # return result # if yaml2json('mapping_table', 'mapping_table', True, env, options) == 0: # print 'Built Mapping Table' # else: # print 'Failed Mapping Table' if len(args) > 0: files = args else: files = [] for t in templates: if options.development: if env['SDK_VERSION'] < StrictVersion('0.19.0'): files.append('%s.jsinc' % t) files.append('%s.development.html' % t) else: files.append('%s.debug.html' % t) files.append('%s.canvas.debug.html' % t) else: if env['SDK_VERSION'] < StrictVersion('0.19.0'): files.append('%s.jsinc' % t) files.append('%s.development.html' % t) files.append('%s.release.html' % t) files.append('%s.tzjs' % t) else: # Order is important files.append('%s.debug.html' % t) files.append('%s.default.debug.html' % t) files.append('%s.canvas.debug.html' % t) files.append('%s.canvas.default.debug.html' % t) files.append('%s.tzjs' % t) files.append('%s.release.html' % t) files.append('%s.canvas.js' % t) files.append('%s.canvas.release.html' % t) result = build(files, env, options) if result == 0: print 'Built Templates' else: print 'Failed Templates' return result
def yaml2json(source_filename, dest_filename, is_mapping_table, env, options, indent=0): json_filename = '%s.json' % dest_filename yaml_filename = '%s.yaml' % source_filename result = 0 json_file = None yaml_file = None try: json_file = open(json_filename, 'w') yaml_file = open(yaml_filename, 'r') except IOError as e: print str(e) result = 1 else: yaml_data = yaml.load(yaml_file) if yaml_data is None: print ('Failed to decode response for: %s' % yaml_filename) result = 1 else: if is_mapping_table: # Support for version 1.0 yaml_dict = { 'version': 1.0 } staticmax_path = env['APP_STATICMAX'] if not os.path.isdir(staticmax_path): os.makedirs(staticmax_path) # Process assets for entry in yaml_data: src = yaml_data[entry] hash = get_staticmax_name(src) if hash is not None: dst = os.path.join('staticmax', hash) try: copyfile(src, dst) except IOError as e: print str(e) else: yaml_data[entry] = hash else: print "No hash available for: %s" % src yaml_dict['urnmapping'] = yaml_data else: yaml_dict = yaml_data try: if indent > 0: json_dump(yaml_dict, json_file, indent=indent) else: json_dump(yaml_dict, json_file) except TypeError as e: print str(e) result = 1 if json_file is not None: json_file.close() if yaml_file is not None: yaml_file.close() return result