def update_required(self, max_age=432000): """Return True if its time to update.""" if os.path.exists(self.update_file): update = (time.time() - os.stat(self.update_file).st_mtime) > max_age if update: # Reset the timestamp of the check file os.utime(self.update_file, None) return update else: # Create missing check file and force update _open(self.update_file, "w").close() return True
def generate_document(language): reload(languageHandler) languageHandler.setLanguage(language) reload(strings) markdown_file = markdown.markdown("\n".join(strings.documentation[1:]), extensions=["markdown.extensions.toc"]) first_html_block = """<!doctype html> <html lang="%s"> <head> <title>%s</title> <meta charset="utf-8"> </head> <body> <header><h1>%s</h1></header> """ % ( language, strings.documentation[0], strings.documentation[0], ) first_html_block = first_html_block + markdown_file first_html_block = first_html_block + "\n</body>\n</html>" if not os.path.exists(language): os.mkdir(language) mdfile = _open("%s/manual.html" % language, "w", encoding="utf-8") mdfile.write(first_html_block) mdfile.close()
def parse_from_file(file, encoding='utf-8', **kwargs): if not os.path.exists(file): raise OSError('No such file exists: {}'.format(file)) with _open(file, encoding=encoding) as f: src = f.read() output = convert(src, **kwargs) return output
def set_setting(self, key, value): if not isinstance(value, (bytes, utils.unicode_type)): raise TypeError("argument 'value' for method 'setSetting' must be unicode or str not '%s'" % type(value)) path = os.path.join(self.profile, "settings.xml") if os.path.exists(path): # Load in settings xml object tree = ETree.parse(path).getroot() # Check for a pre existing setting for given key and remove it pre_existing = tree.find("./setting[@id='%s']" % key) if pre_existing is not None: tree.remove(pre_existing) else: # Create plugin data directory if don't exist settings_dir = os.path.dirname(path) if not os.path.exists(settings_dir): os.makedirs(settings_dir) # Create settings xml object tree = ETree.Element("settings") # Add setting to list of xml elements ETree.SubElement(tree, "setting", {"id": key, "value": value}) # Recreate the settings.xml file raw_xml = minidom.parseString(ETree.tostring(tree)).toprettyxml(indent=" "*4) with _open(path, "w", encoding="utf8") as stream: stream.write(raw_xml) # Update local store and return self.settings[key] = value
def save_to_file(file, src, encoding="utf-8", **kwargs): target = os.path.splitext(file)[0] + ".rst" if not options.overwrite and os.path.exists(target): confirm = input("{} already exists. overwrite it? [y/n]: ".format(target)) if confirm.upper() not in ("Y", "YES"): print("skip {}".format(file)) return with _open(target, "w", encoding=encoding) as f: f.write(src)
def _extractor(self, strings_path): """Extract the strings from the strings.po file""" with _open(strings_path, "r", "utf-8") as stream: file_data = stream.read() # Populate dict of strings search_pattern = 'msgctxt\s+"#(\d+)"\s+msgid\s+"(.+?)"\s+msgstr\s+"(.*?)' for strID, msgID, msStr in re.findall(search_pattern, file_data): self._strings[int(strID)] = msStr if msStr else msgID
def set_basic_args(args): if not args.encoding: args.encoding = sys.getdefaultencoding() # TODO: dedupe some of this for stream_name in ('input', 'output'): log.debug('Processing stream "%s"', stream_name) try: stream = getattr(args, stream_name) except AttributeError: # For example, in the case of no_output continue r_enc = codecs.getreader(args.encoding) w_enc = codecs.getwriter(args.encoding) log.debug('Got %r as stream', stream) if stream in DASH_STREAM_MAP.values(): log.debug('%s in DASH_STREAM_MAP', stream_name) if stream is args.input: args.input = srt.parse(r_enc(args.input).read()) elif stream is args.output: args.output = w_enc(args.output) else: log.debug('%s not in DASH_STREAM_MAP', stream_name) if stream is args.input: if isinstance(args.input, collections.MutableSequence): for i, input_fn in enumerate(args.input): if input_fn in DASH_STREAM_MAP.values(): if stream is args.input: args.input[i] = srt.parse( r_enc(input_fn).read() ) else: f = _open(input_fn, 'r', encoding=args.encoding) with f: args.input[i] = srt.parse(f.read()) else: f = _open(stream, 'r', encoding=args.encoding) with f: args.input = srt.parse(f.read()) else: args.output = _open(args.output, 'w', encoding=args.encoding)
def save_to_file(file, src, encoding='utf-8', **kwargs): target = os.path.splitext(file)[0] + '.rst' if not options.overwrite and os.path.exists(target): confirm = input( '{} already exists. overwrite it? [y/n]: '.format(target)) if confirm.upper() not in ('Y', 'YES'): print('skip {}'.format(file)) return with _open(target, 'w', encoding=encoding) as f: f.write(src)
def save_to_file(file, src, encoding='utf-8', **kwargs): target = os.path.splitext(file)[0] + '.rst' if not options.overwrite and os.path.exists(target): confirm = input('{} already exists. overwrite it? [y/n]: '.format( target)) if confirm.upper() not in ('Y', 'YES'): print('skip {}'.format(file)) return with _open(target, 'w', encoding=encoding) as f: f.write(src)
def set_basic_args(args): if not args.encoding: args.encoding = sys.getdefaultencoding() # TODO: dedupe some of this for stream_name in ('input', 'output'): log.debug('Processing stream "%s"', stream_name) try: stream = getattr(args, stream_name) except AttributeError: # For example, in the case of no_output continue r_enc = codecs.getreader(args.encoding) w_enc = codecs.getwriter(args.encoding) log.debug('Got %r as stream', stream) if stream in DASH_STREAM_MAP.values(): log.debug('%s in DASH_STREAM_MAP', stream_name) if stream is args.input: args.input = srt.parse(r_enc(args.input).read()) elif stream is args.output: args.output = w_enc(args.output) else: log.debug('%s not in DASH_STREAM_MAP', stream_name) if stream is args.input: if isinstance(args.input, collections.MutableSequence): for i, input_fn in enumerate(args.input): if input_fn in DASH_STREAM_MAP.values(): if stream is args.input: args.input[i] = srt.parse( r_enc(input_fn).read()) else: f = _open(input_fn, 'r', encoding=args.encoding) with f: args.input[i] = srt.parse(f.read()) else: f = _open(stream, 'r', encoding=args.encoding) with f: args.input = srt.parse(f.read()) else: args.output = _open(args.output, 'w', encoding=args.encoding)
def set_basic_args(args): encoding_explicitly_specified = True if args.encoding is None: args.encoding = DEFAULT_ENCODING encoding_explicitly_specified = False # TODO: dedupe some of this for stream_name in ('input', 'output'): log.debug('Processing stream "%s"', stream_name) try: stream = getattr(args, stream_name) except AttributeError: # For example, in the case of no_output continue log.debug('Got %r as stream', stream) if stream in DASH_STREAM_MAP.values(): log.debug('%s in DASH_STREAM_MAP', stream_name) if stream is args.input: args.input = srt.parse(args.input.read()) if encoding_explicitly_specified: log.warning(STREAM_ENC_MSG, stream.name) else: log.debug('%s not in DASH_STREAM_MAP', stream_name) if stream is args.input: if isinstance(args.input, collections.MutableSequence): for i, input_fn in enumerate(args.input): if input_fn in DASH_STREAM_MAP.values(): if encoding_explicitly_specified: log.warning(STREAM_ENC_MSG, input_fn.name) if stream is args.input: args.input[i] = srt.parse(input_fn.read()) else: with _open(input_fn, encoding=args.encoding) as f: args.input[i] = srt.parse(f.read()) else: with _open(stream, encoding=args.encoding) as input_f: args.input = srt.parse(input_f.read()) else: args.output = _open(args.output, 'w+', encoding=args.encoding)
def changelog(self): data = self._metadata.findall("news") if data is not None: return data.text else: changelog_file = safe_path( os.path.join(self.path, u"changelog-{}.txt".format(self.version))) if os.path.exists(changelog_file): with _open(changelog_file, "r", "utf8") as stream: return stream.read() else: return ""
def parse(source, tag="", attrs=None, encoding=None): """ Load an external "HTML document" into an element tree. :param source: A filename or file like object containing HTML data. :type source: str or io.TextIOBase :param str tag: (optional) Name of "tag / element" which is used to filter down "the tree" to a required section. :type tag: str :param attrs: (optional) The attributes of the element, that will be used, when searchingfor the required section. :type attrs: dict(str, str) :param encoding: (optional) Encoding used, when decoding the source data before feeding it to the parser. :type encoding: str :return: The root element of the element tree. :rtype: xml.etree.ElementTree.Element :raises UnicodeDecodeError: If decoding of *source* fails. """ # Assume that source is a file pointer if no read methods is found if not hasattr(source, "read"): source = _open(source, "rb", encoding=encoding) close_source = True else: close_source = False try: parser = HTMLement(tag, attrs, encoding) while True: # Read in 64k at a time data = source.read(65536) if not data: break # Feed the parser parser.feed(data) # Return the root element return parser.close() finally: if close_source: source.close()
def _strings(self): # type: () -> Iterator[Tuple[int, str]] # Possible locations for english strings.po res_path = os.path.join(self.path, "resources") string_loc = [os.path.join(res_path, "language", "resource.language.en_gb", "strings.po"), os.path.join(res_path, "language", "resource.language.en_us", "strings.po"), os.path.join(res_path, "language", "English", "strings.po"), os.path.join(res_path, "strings.po")] # Return the first strings.po file that is found for path in string_loc: if os.path.exists(path): # Extract the strings from the strings.po file with _open(path, "r", encoding="utf-8") as stream: file_data = stream.read() # Populate dict of strings search_pattern = r'msgctxt\s+"#(\d+)"\s+msgid\s+"(.+?)"\s+msgstr\s+"(.*?)' for strID, msgID, msStr in re.findall(search_pattern, file_data): yield int(strID), msStr if msStr else msgID
def generate_document(language): reload(languageHandler) languageHandler.setLanguage(language) reload(strings) markdown_file = markdown.markdown("\n".join(strings.documentation[1:]), extensions=["markdown.extensions.toc"]) first_html_block = """<!doctype html> <html lang="%s"> <head> <title>%s</title> <meta charset="utf-8"> </head> <body> <header><h1>%s</h1></header> """ % (language, strings.documentation[0], strings.documentation[0]) first_html_block = first_html_block+ markdown_file first_html_block = first_html_block + "\n</body>\n</html>" if not os.path.exists(language): os.mkdir(language) mdfile = _open("%s/manual.html" % language, "w", encoding="utf-8") mdfile.write(first_html_block) mdfile.close()
def download(self, addon): """ Download any requred addon :param Addon addon: The addon to download """ filename = u"{0}-{1}.zip".format(addon.id, addon.version) tmp = os.path.join(self._package_dir, filename) logger.info("Downloading: '{}'".format(filename.encode("utf8"))) # Remove old zipfile before download # This will prevent an error if the addon was manually removed by user if os.path.exists(tmp): os.remove(tmp) # Request the addon zipfile from server url_part = "{0}/{1}".format(addon.id, filename) url = self.repo_url.format(url_part) resp = self._session.get(url) # Read and save contents of zipfile to package directory with _open(tmp, "wb") as stream: for chunk in resp.iter_content(decode_unicode=False): stream.write(chunk) # Remove the old plugin directory if exists # This is needed when updating addons udst = os.path.join(self._addon_dir, addon.id) sdst = safe_path(udst) if os.path.exists(sdst): shutil.rmtree(sdst) resp.close() self.extract_zip(tmp) addon.path = udst addon.preload() avail_addons[addon.id] = addon
def generate_document(language, sourceModule): #import the source module, we will call it strings moduleName = os.path.splitext(os.path.basename(sourceModule))[0] searchpath = [os.path.dirname(os.path.abspath(sourceModule))] translation_file = moduleName reload(languageHandler) languageHandler.setLanguage(language, translation_file) f, p, d = imp.find_module(moduleName, searchpath) strings = imp.load_module(moduleName, f, p, d) f.close() markdown_file = markdown.markdown("\n".join(strings.documentation), extensions=[ "markdown.extensions.toc", "markdown.extensions.wikilinks", "markdown.extensions.tables", "markdown.extensions.fenced_code" ]) title = strings.documentation[0] filename = moduleName + ".html" first_html_block = """<!doctype html> <html lang="%s"> <head> <title>%s</title> <meta charset="utf-8"> </head> <body> <header><h1>%s</h1></header> """ % (language, title, title) first_html_block = first_html_block + markdown_file first_html_block = first_html_block + "\n</body>\n</html>" if not os.path.exists(os.path.join("..", "translated_docs", language)): os.mkdir(os.path.join("..", "translated_docs", language)) mdfile = _open("%s/%s" % ("../translated_docs/" + language, filename), "w", encoding="utf-8") mdfile.write(first_html_block) mdfile.close()
def __setitem__(self, key, value): """Set an add-on setting.""" if not isinstance(value, (bytes, unicode_type)): raise TypeError( "argument 'value' for method 'setSetting' must be unicode or str not '%s'" % type(value)) # Save setting to local dict before saving to disk super(Settings, self).__setitem__(key, value) # The easyest way to store the setting is to store all setting tree = ETree.Element("settings") for key, value in self.items(): ETree.SubElement(tree, "setting", {"id": key, "value": value}) # Create plugin data directory if don't exist settings_dir = os.path.dirname(self._settings_path) if not os.path.exists(settings_dir): os.makedirs(settings_dir) raw_xml = minidom.parseString(ETree.tostring(tree)).toprettyxml( indent=" " * 4, encoding="utf8") with _open(self._settings_path, "wb") as stream: stream.write(raw_xml)
def read_meta(self, filename): title = None metadata = {} with _open(filename, encoding='utf-8') as f: for line in f: line = line.rstrip() meta_match = self.meta_re.match(line) author_match = self.author_re.match(line) rev_match = self.rev_re.match(line) if line.strip() != '' and title == None: title = line metadata['title'] = title elif line.strip() == '' and title != None: break elif meta_match: name = meta_match.group(1).lower() value = meta_match.group(2) metadata[name] = self.process_metadata(name, value) if name == 'revdate': metadata['date'] = self.process_metadata(name, value) elif author_match: author = author_match.group(1) email = author_match.group(2) metadata['author'] = self.process_metadata('author', author) metadata['email'] = self.process_metadata('email', email) elif rev_match: rev = rev_match.group(1) date = rev_match.group(2) comment = rev_match.group(3) metadata['revdate'] = date metadata['date'] = self.process_metadata('date', date) metadata['revnumber'] = rev metadata['revremark'] = comment else: continue return metadata
from codecs import open as _open from os import path from setuptools import setup HERE = path.abspath(path.dirname(__file__)) VERSION = '0.1.4' with _open(path.join(HERE, 'README.md'), encoding='utf-8') as f: LONG_DESCRIPTION = f.read() setup(name='jsondispatch', version=VERSION, description='Dispatch commands with JSON HTTP requests', long_description=LONG_DESCRIPTION, author='Pierre Penninckx', author_email='*****@*****.**', license='GPLv3', packages=['jsondispatch', 'jsondispatch/trigger_commands'], url='https://github.com/ibizaman/jsondispatch', download_url= 'https://github.com/ibizaman/jsondispatch/archive/{}.tar.gz'.format( VERSION), keywords=['json', 'aiohttp'], entry_points={ 'console_scripts': ['jsondispatch=jsondispatch.__main__:main'], }, install_requires=[ 'aiohttp == 2.3.7', 'aiohttp_cors == 0.6.0', 'yarl == 0.18.0',
def open(filename): """Open a file and return it's content""" return _open(filename, encoding='utf-8').read()
#!/usr/bin/env python3 import sys import srt # import codecs from codecs import open as _open # r_enc = codecs.getreader(args.encoding) # w_enc = codecs.getwriter(args.encoding) filename = sys.argv[1] #shift = float(sys.argv[2]) scale = float(sys.argv[2]) encoding = sys.argv[3] or "utf8" s = _open(filename, "r", encoding=encoding).read() subs = list(srt.parse(s)) for sub in subs: # sub.start += shift # sub.end += shift sub.start = scale * sub.start sub.end = scale * sub.end print(srt.compose(subs))
def open(filename): """Open a file and return it's content""" with _open(filename, encoding='utf-8') as f: return f.read()
def __init__(self, file_path): xml = _open(file_path, encoding='utf-8').read().replace(' xmlns="%s"' % NS, '') self.root = fromstring(xml) self.root.set('xmlns', NS)
def readme(): # Get the long description from the README file readme_file = path.join(path.abspath(path.dirname(__file__)), "README.rst") with _open(readme_file, "rb", encoding='utf-8') as opened_file: return opened_file.read()
from codecs import open as _open from os import path from setuptools import setup HERE = path.abspath(path.dirname(__file__)) VERSION = "0.1.3" PACKAGE = "godaddyip" with _open(path.join(HERE, "README.md"), encoding="utf-8") as f: LONG_DESCRIPTION = f.read() setup( name=PACKAGE, version=VERSION, description="Maintains A and CNAME records matching current ip in Godaddy.", long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", author="Pierre Penninckx", author_email="*****@*****.**", license="GPLv3", packages=[PACKAGE.lower()], url="https://github.com/ibizaman/" + PACKAGE.lower(), download_url="https://github.com/ibizaman/{}/archive/{}.tar.gz".format( PACKAGE.lower(), VERSION ), keywords=["godaddy", "dyndns"], entry_points={ "console_scripts": ["{0}={0}.__main__:main".format(PACKAGE.lower())], }, install_requires=[