def _dump_tars(self, specs, dump_dir=curdir): expanded_dump_dir = expandvars(expanduser(dump_dir)) hashes_to_indexes = {} with open(ospath_join(expanded_dump_dir, '_dumped_entries.py'), 'wb') as dump_py_file: for image_id, indexes, _, top_most_layer in specs: hash_tar = self._get_hash_tar(image_id, indexes, top_most_layer) actual_hexdigest = hash_tar.hash_obj.hexdigest() with open( ospath_join(expanded_dump_dir, '{}.tar'.format(actual_hexdigest)), 'wb') as dump_tar_file: try: index_list = hashes_to_indexes[actual_hexdigest] except KeyError: index_list = hashes_to_indexes[actual_hexdigest] = [] index_list.append(indexes) dump_tar_file.write(hash_tar.getvalue()) print("( '{}', ( {} ), '{}', {} ),".format( image_id, ', '.join( ('0x{:1x}'.format(i) for i in indexes)), actual_hexdigest, min(enumerate(indexes), key=itemgetter(1))[0]), file=dump_py_file) for k, v in iteritems(hashes_to_indexes): print('# {} -> {}'.format(k, v), file=dump_py_file)
def drawGeoMap(self, aStuData): sno, sname = aStuData['info'].split(SPLIT_CHAR) sdata = aStuData['data'] try: geo = Geo() def add_data(date, location): # print(location) try: longitude, latitude, address = location except: longitude, latitude, address = DEFAULT_Location geo.add_coordinate(date, longitude, latitude) geo.add(date, [(date, address)], type_=ChartType.EFFECT_SCATTER) geo.add_schema(maptype="china") # add data for date, data in sdata.items(): add_data(date, data) title = f'{sno} {sname} 位置动态' geo.set_series_opts(label_opts=opts.LabelOpts(is_show=False)) geo.set_global_opts(legend_opts=opts.LegendOpts(orient='vertical', pos_left='left', pos_top='10%'), title_opts=opts.TitleOpts(title=title)) html_path = ospath_join(self.mapsDir, f'{sno}_{sname}.html') geo.render(html_path) except Exception as e: log.error(f'地图生成失败: {sno} {sname} - 可能是包含了文件名不可用的特殊字符', exc_info=True) self._signal.emit(f'错误:{sno} {sname} 地图生成失败!可能原因:工号或提交人姓名中包含文件名不可用的特殊字符,如:*。请修改后重试!')
def test_initial_value(self): with TemporaryDirectory() as tempdir: langsettings = LangSettings( ospath_join(tempdir, 'test_langsettings.json')) self.assertDictEqual(langsettings.data['english'], { 'enable': True, 'font_name': DEFAULT_FONT }) for lang, value in langsettings.data.items(): if lang == 'english': continue self.assertDictEqual(value, {'enable': False, 'font_name': ''})
def fontnames(): '''フォントファイルを検索しそのiterableを返す''' global _fontnames if _fontnames is None: fontnamesset = set() for directory in LabelBase.get_system_fonts_dir(): for filename in os_listdir(directory): if ospath_isfile(ospath_join(directory, filename)): if filename.endswith('.ttf') or filename.endswith('.ttc'): fontnamesset.add(filename) _fontnames = list(fontnamesset) _fontnames.sort(key=(lambda x: x.lower())) return _fontnames
async def main(cls, *, widgets, ctx): # NOTE: For some reason, 'Widget.export_as_image()' doesn't work so uses 'Window.screenshot()' instead. # NOTE: Screenshot sometimes f****d up (maybe) depending on the window size. from os.path import join as ospath_join from kivy.core.window import Window import asynckivy as ak from ._utils import show_yes_no_dialog, open_file_with_default_os_app, temp_dir if await show_yes_no_dialog(text_main='take a screenshot of the entire app?\n(Sorry, you cannot just save the canvas for now. )') == 'yes': await ak.sleep(.3) actual_filepath = Window.screenshot(ospath_join(temp_dir(), 'screenshot.png')) open_file_with_default_os_app(actual_filepath) for c in widgets['toolbox'].children: c.state = 'normal'
def _dump_tars(self, specs, dump_dir=curdir): expanded_dump_dir = expandvars(expanduser(dump_dir)) hashes_to_indexes = {} with open(ospath_join(expanded_dump_dir, '_dumped_entries.py'), 'wb') as dump_py_file: for image_id, indexes, _, top_most_layer in specs: hash_tar = self._get_hash_tar(image_id, indexes, top_most_layer) actual_hexdigest = hash_tar.hash_obj.hexdigest() with open(ospath_join(expanded_dump_dir, '{}.tar'.format(actual_hexdigest)), 'wb') as dump_tar_file: try: index_list = hashes_to_indexes[actual_hexdigest] except KeyError: index_list = hashes_to_indexes[actual_hexdigest] = [] index_list.append(indexes) dump_tar_file.write(hash_tar.getvalue()) print("( '{}', ( {} ), '{}', {} ),".format(image_id, ', '.join(( '0x{:1x}'.format(i) for i in indexes )), actual_hexdigest, min(enumerate(indexes), key=itemgetter(1))[0]), file=dump_py_file) for k, v in iteritems(hashes_to_indexes): print('# {} -> {}'.format(k, v), file=dump_py_file)
def get_image(self, image): if not image: raise APIError(HTTPError('500 Server Error'), None, explanation='Usage: image_export IMAGE [IMAGE...]') layers = [] next_layer_id = image while next_layer_id: layer = normalizeimage(self._findlayer(next_layer_id), copy=True) layers.append(layer) next_layer_id = layers[-1][':parent_id'] image_file = BytesIO() mtime = time() with tarfile_open(mode='w', fileobj=image_file) as image_tar_file: for layer in layers: ti_dir = TarInfo(layer[':id']) ti_dir.mtime = mtime ti_dir.mode = 0o755 ti_dir.type = DIRTYPE image_tar_file.addfile(ti_dir) layer_tar_src_path = ospath_join(self._my_dir, 'data', layer[':short_id'], 'layer.tar') with open(layer_tar_src_path, 'rb') as layer_tar_src_file: layer_tar_dst_path = '{}/layer.tar'.format(layer[':id']) ti_layer = image_tar_file.gettarinfo( layer_tar_src_path, layer_tar_dst_path) ti_layer.mtime = mtime ti_layer.mode = 0o644 ti_layer.uid = ti_layer.gid = 0 ti_layer.uname = ti_layer.gname = '' image_tar_file.addfile(ti_layer, fileobj=layer_tar_src_file) image_file.seek(0) return image_file
def test_available_langs(self): with TemporaryDirectory() as tempdir: langsettings = LangSettings(ospath_join(tempdir, 'test.json')) data = langsettings.data data['english'].update(enable=False) data['japanese'].update(enable=True, font_name='yutapon') data['chinese'].update(enable=True, font_name='uming.ttc') self.assertDictEqual( { lang: value for lang, value in langsettings.available_langs() }, { 'japanese': { 'enable': True, 'font_name': 'yutapon' }, 'chinese': { 'enable': True, 'font_name': 'uming.ttc' }, }, )
def get_image(self, image): if not image: raise APIError(HTTPError('500 Server Error'), None, explanation='Usage: image_export IMAGE [IMAGE...]') layers = [] next_layer_id = image while next_layer_id: layer = normalizeimage(self._findlayer(next_layer_id), copy=True) layers.append(layer) next_layer_id = layers[-1][':parent_id'] image_file = BytesIO() mtime = time() with tarfile_open(mode='w', fileobj=image_file) as image_tar_file: for layer in layers: ti_dir = TarInfo(layer[':id']) ti_dir.mtime = mtime ti_dir.mode = 0o755 ti_dir.type = DIRTYPE image_tar_file.addfile(ti_dir) layer_tar_src_path = ospath_join(self._my_dir, 'data', layer[':short_id'], 'layer.tar') with open(layer_tar_src_path, 'rb') as layer_tar_src_file: layer_tar_dst_path = '{}/layer.tar'.format(layer[':id']) ti_layer = image_tar_file.gettarinfo(layer_tar_src_path, layer_tar_dst_path) ti_layer.mtime = mtime ti_layer.mode = 0o644 ti_layer.uid = ti_layer.gid = 0 ti_layer.uname = ti_layer.gname = '' image_tar_file.addfile(ti_layer, fileobj=layer_tar_src_file) image_file.seek(0) return image_file
INSTALL_REQUIRES = ( 'docker-py', 'future', 'humanize', 'python-dateutil', ) TESTS_REQUIRE = [ 'pytest', ] # WARNING: This imposes limitations on test/requirements.txt such that the # full Pip syntax is not supported. See also # <http://stackoverflow.com/questions/14399534/>. with open(ospath_join(_MY_DIR, 'test', 'requirements.txt')) as f: TESTS_REQUIRE.extend(f.read().splitlines()) # ---- Initialization ---------------------------------------------------- _namespace = { '_version_path': ospath_join(_MY_DIR, '_dimgx', 'version.py'), } if isfile(_namespace['_version_path']): with open(_namespace['_version_path']) as _version_file: exec(compile(_version_file.read(), _namespace['_version_path'], 'exec'), _namespace, _namespace) # pylint: disable=exec-used with codecs_open(ospath_join(_MY_DIR, 'README.rst'), encoding='utf-8') as _readme_file:
__all__ = () INSTALL_REQUIRES = ( 'docker-py', 'future', 'humanize', 'python-dateutil', ) _MY_DIR = dirname(getframeinfo(currentframe()).filename) #---- Initialization ----------------------------------------------------- _namespace = { '_version_path': ospath_join(_MY_DIR, '_dimgx', 'version.py'), } if isfile(_namespace['_version_path']): with open(_namespace['_version_path']) as _version_file: exec(compile(_version_file.read(), _namespace['_version_path'], 'exec'), _namespace, _namespace) # pylint: disable=exec-used with open(ospath_join(_MY_DIR, 'README.rst')) as _readme_file: README = _readme_file.read() __version__ = _namespace.get('__version__') __version__ = u'.'.join(( str(i) for i in __version__ )) if __version__ is not None else None __release__ = _namespace.get('__release__', __version__) _SETUP_ARGS = { 'name' : 'dimgx',
def __init__(self, always_raise=None): super().__init__() self._always_raise = always_raise self._my_dir = dirname(getframeinfo(currentframe()).filename) self.layers = [] self.layers_by_id = {} self.layers_by_tag = {} num_paths = len( FauxDockerClient.SHORT_IDS_BY_PATH) # should not exceed 0x100 path_depth = len( FauxDockerClient.SHORT_IDS_BY_PATH[0]) # should not exceed 0x100 max_path_idx = path_depth - 1 for j in range(num_paths): for i in range(path_depth): layer_sig = '{:02x}{:02x}'.format(j, i) layer_id = sha256(sha256(unhexlify(layer_sig * 16)).digest()).hexdigest() short_id = layer_id[:8] + layer_sig layer_id = short_id + layer_id[12:60] + layer_sig last_layer_id = '' if i == 0 else self.layers[-1]['Id'] layer_tar_src_path = ospath_join(self._my_dir, 'data', layer_id[:12], 'layer.tar') try: # This isn't quite right, but it doesn't matter for # our purposes layer_tar_src_stat = stat(layer_tar_src_path) layer_size = layer_tar_src_stat.st_size except OSError: layer_size = 0 self.layers.append({ 'Created': '2015-04-{:02d}T{:02d}:00:{:02d}.000000000Z'.format( i + 10, i, j), 'Id': layer_id, 'Parent': last_layer_id, 'Size': layer_size, 'RepoTags': ['<none>:<none>'], 'VirtualSize': layer_size if i == 0 else layer_size + self.layers[-1]['Size'], }) assert short_id == FauxDockerClient.SHORT_IDS_BY_PATH[j][ max_path_idx - i] self.layers[max_path_idx]['RepoTags'] = ['getto:dachoppa'] self.layers[-1]['RepoTags'] = ['greatest:hits', 'greatest:latest'] for layer in self.layers: normalizeimage(layer) self.layers_by_id[layer[':id']] = layer self.layers_by_id[layer[':short_id']] = layer for repo_tag in layer[':repo_tags']: self.layers_by_tag[repo_tag] = layer for k, v in iteritems(self.layers_by_id): assert k == v['Id'][:len(k)].lower() assert self.layers_by_tag['getto:dachoppa'] == self.layers[ max_path_idx] assert self.layers_by_tag['getto:dachoppa'][ 'Id'][:12] == FauxDockerClient.SHORT_IDS_BY_PATH[0][0] assert self.layers_by_tag['greatest'] == self.layers[-1] assert self.layers_by_tag['greatest:latest'] == self.layers[-1] assert self.layers_by_tag['greatest:hits'] == self.layers[-1] assert self.layers_by_tag['greatest:hits'][ 'Id'][:12] == FauxDockerClient.SHORT_IDS_BY_PATH[1][0] self.layers.sort(key=imagekey, reverse=True)
def __init__(self, always_raise=None): super().__init__() self._always_raise = always_raise self._my_dir = dirname(getframeinfo(currentframe()).filename) self.layers = [] self.layers_by_id = {} self.layers_by_tag = {} num_paths = len(FauxDockerClient.SHORT_IDS_BY_PATH) # should not exceed 0x100 path_depth = len(FauxDockerClient.SHORT_IDS_BY_PATH[0]) # should not exceed 0x100 max_path_idx = path_depth - 1 for j in range(num_paths): for i in range(path_depth): layer_sig = '{:02x}{:02x}'.format(j, i) layer_id = sha256(sha256(unhexlify(layer_sig * 16)).digest()).hexdigest() short_id = layer_id[:8] + layer_sig layer_id = short_id + layer_id[12:60] + layer_sig last_layer_id = '' if i == 0 else self.layers[-1]['Id'] layer_tar_src_path = ospath_join(self._my_dir, 'data', layer_id[:12], 'layer.tar') try: # This isn't quite right, but it doesn't matter for # our purposes layer_tar_src_stat = stat(layer_tar_src_path) layer_size = layer_tar_src_stat.st_size except OSError: layer_size = 0 self.layers.append({ 'Created': '2015-04-{:02d}T{:02d}:00:{:02d}.000000000Z'.format(i + 10, i, j), 'Id': layer_id, 'Parent': last_layer_id, 'Size': layer_size, 'RepoTags': [ '<none>:<none>' ], 'VirtualSize': layer_size if i == 0 else layer_size + self.layers[-1]['Size'], }) assert short_id == FauxDockerClient.SHORT_IDS_BY_PATH[j][max_path_idx - i] self.layers[max_path_idx]['RepoTags'] = [ 'getto:dachoppa' ] self.layers[-1]['RepoTags'] = [ 'greatest:hits', 'greatest:latest' ] for layer in self.layers: normalizeimage(layer) self.layers_by_id[layer[':id']] = layer self.layers_by_id[layer[':short_id']] = layer for repo_tag in layer[':repo_tags']: self.layers_by_tag[repo_tag] = layer for k, v in iteritems(self.layers_by_id): assert k == v['Id'][:len(k)].lower() assert self.layers_by_tag['getto:dachoppa'] == self.layers[max_path_idx] assert self.layers_by_tag['getto:dachoppa']['Id'][:12] == FauxDockerClient.SHORT_IDS_BY_PATH[0][0] assert self.layers_by_tag['greatest'] == self.layers[-1] assert self.layers_by_tag['greatest:latest'] == self.layers[-1] assert self.layers_by_tag['greatest:hits'] == self.layers[-1] assert self.layers_by_tag['greatest:hits']['Id'][:12] == FauxDockerClient.SHORT_IDS_BY_PATH[1][0] self.layers.sort(key=imagekey, reverse=True)
INSTALL_REQUIRES = ( 'docker-py', 'future', 'humanize', 'python-dateutil', ) TESTS_REQUIRE = [ 'pytest', ] # WARNING: This imposes limitations on test/requirements.txt such that the # full Pip syntax is not supported. See also # <http://stackoverflow.com/questions/14399534/>. with open(ospath_join(_MY_DIR, 'test', 'requirements.txt')) as f: TESTS_REQUIRE.extend(f.read().splitlines()) # ---- Initialization ---------------------------------------------------- _namespace = { '_version_path': ospath_join(_MY_DIR, '_dimgx', 'version.py'), } if isfile(_namespace['_version_path']): with open(_namespace['_version_path']) as _version_file: exec(compile(_version_file.read(), _namespace['_version_path'], 'exec'), _namespace, _namespace) # pylint: disable=exec-used with codecs_open(ospath_join(_MY_DIR, 'README.rst'), encoding='utf-8') as _readme_file: README = _readme_file.read()