def ls(self, path): print "DEBUG Amazon S3 Ls => ", path fileNameList = [] pathCount = path.count('/') allKeys = self.bucket.get_all_keys() print "LS Debug = ", allKeys if len(path) == 1: for k in allKeys: keyPathCount = k.key.count('/') encoding = locale.getdefaultlocale()[1] name = ('%s' % k.name).encode(encoding) if name[-1:] == '/' and pathCount == keyPathCount: if name.startswith(path[1:]): fileNameList.append(os.path.dirname(name)) if name[-1:] != '/' and pathCount - 1 == keyPathCount: if name.startswith(path[1:]): fileNameList.append(os.path.basename(name)) else: path = path + '/' for k in allKeys: encoding = locale.getdefaultlocale()[1] name = ('%s' % k.name).encode(encoding) name = '/' + name if name.startswith(path) and len(name) > len(path): fileNameList.append(os.path.basename(name)) return fileNameList
def plugin_widget(self): self.tray_icon.set_tooltip_text("%s %s" % (time.strftime("%Y-%m-%d"), self.WEEK[int(time.strftime("%w")) - 1])) align = self.__setup_align() box = gtk.VBox(spacing = 5) calendar_align = self.__setup_align() calendar = deepin_lunar.new() self.calendar = deepin_lunar.new() if MAIN_LANG == "en_US": self.calendar = dltk_calendar.new() calendar.mark_day(time.localtime().tm_mday) calendar.set_day_padding(0) calendar.get_handle().set_property("show-details", False) if len(locale.getdefaultlocale(['LANGUAGE'])) and locale.getdefaultlocale(['LANGUAGE'])[0] is not None: if locale.getdefaultlocale(['LANGUAGE'])[0].find("zh_CN") != 0: self.width = 230 else: self.width = 230 calendar.get_handle().set_size_request(self.width, 172) calendar_align.add(calendar.get_handle()) select_align = self.__setup_align() select_button = SelectButton(_("Change date and time settings"), font_size = 10, ali_padding = 5) select_button.set_size_request(self.width, 25) select_align.add(select_button) select_button.connect("clicked", self.__on_day_selected) box.pack_start(calendar_align, False, False) box.pack_start(select_align, False, False) align.add(box) return align
def main(): artist = sys.argv[1].decode(locale.getdefaultlocale()[1]) title = sys.argv[2].decode(locale.getdefaultlocale()[1]) print 'Searching ', artist, title, '...' szList = SearchLyric(artist,title) print szList """ if len(li) > 0: print "count=%d" % len(li) j = 1 for i in li: print '[%d]\tid=%s\tartist=%s\ttitle=%s' % (j, i[0], i[1], i[2]) j += 1 try: command=raw_input('Choise:') except EOFError: command=='0' command = int(command) - 1 if command>=0 and command<len(li): print DownLoadLyric(li[command][0],li[command][1],li[command][2]) else: print 'No lyrics found' """ return 0
def set_context(self, context): """ Setup underlying editor context. You should call this method before using any Zen Coding action. @param context: context object """ self.context = context # window self.buffer = self.context.get_active_view().get_buffer() self.view = context.get_active_view() self.document = context.get_active_document() default_locale = locale.getdefaultlocale()[0] if locale.getdefaultlocale()[0] else "en_US" lang = re.sub(r'_[^_]+$', '', default_locale) if lang != default_locale: zen_core.set_variable('lang', lang) zen_core.set_variable('locale', default_locale.replace('_', '-')) else: zen_core.set_variable('lang', default_locale) zen_core.set_variable('locale', default_locale) self.encoding = self.document.get_encoding().get_charset() zen_core.set_variable('charset', self.encoding) if self.view.get_insert_spaces_instead_of_tabs(): zen_core.set_variable('indentation', " " * context.get_active_view().get_tab_width()) else: zen_core.set_variable('indentation', "\t")
def write_system_report(self): string = '' localtime = time.localtime() timeString = time.strftime("==========================%Y-%m-%d %H:%M:%S ", localtime) timezone = -(time.altzone if time.localtime().tm_isdst else time.timezone) timeString += "Z" if timezone == 0 else "+" if timezone > 0 else "-" timeString += time.strftime("%H:%M==========================", time.gmtime(abs(timezone))) string += timeString string += "\ndefault locale: " + locale.getdefaultlocale()[0] + ' ' \ + locale.getdefaultlocale()[1] string += "\nhome directory: " + path.expanduser('~') string += "\nlog file path:\t" + self.fullPath string += "\nmachine:\t" + platform.machine() string += "\narchitecture:\t" + platform.architecture()[0] + ' ' \ + platform.architecture()[1] string += "\nplatform:\t" + platform.platform() try: self.write_file('\n\n\n' + string + '\n') except: print '+++++ unable to write to file +++++' print string + '\n'
def _get_azure_cli_properties(self): if self.arg_complete_env_name and self.arg_complete_env_name in os.environ: source = 'completer' else: source = 'az' result = {} ext_info = '{}@{}'.format(self.extension_name, self.extension_version) if self.extension_name else None set_custom_properties(result, 'Source', source) set_custom_properties(result, 'ClientRequestId', lambda: self.application.data['headers'][ 'x-ms-client-request-id']) set_custom_properties(result, 'CoreVersion', _get_core_version) set_custom_properties(result, 'InstallationId', _get_installation_id) set_custom_properties(result, 'ShellType', _get_shell_type) set_custom_properties(result, 'UserAzureId', _get_user_azure_id) set_custom_properties(result, 'UserAzureSubscriptionId', _get_azure_subscription_id) set_custom_properties(result, 'DefaultOutputType', lambda: _get_config().get('core', 'output', fallback='unknown')) set_custom_properties(result, 'EnvironmentVariables', _get_env_string) set_custom_properties(result, 'Locale', lambda: '{},{}'.format(locale.getdefaultlocale()[0], locale.getdefaultlocale()[1])) set_custom_properties(result, 'StartTime', str(self.start_time)) set_custom_properties(result, 'EndTime', str(self.end_time)) set_custom_properties(result, 'OutputType', self.output_type) set_custom_properties(result, 'RawCommand', self.raw_command) set_custom_properties(result, 'Params', ','.join(self.parameters or [])) set_custom_properties(result, 'PythonVersion', platform.python_version()) set_custom_properties(result, 'ModuleCorrelation', self.module_correlation) set_custom_properties(result, 'ExtensionName', ext_info) set_custom_properties(result, 'Feedback', self.feedback) set_custom_properties(result, 'ExtensionManagementDetail', self.extension_management_detail) set_custom_properties(result, 'Mode', self.mode) return result
def addSfntNames(lst, strid = None): if lst is not None: for names in lst: try: # Python 2 if strid is None: nm = names.split(':', 2) sfnt.append([ int(nm[0], 16 if nm[0][0:2] == '0x' else 10), int(nm[1]), nm[2].decode(getdefaultlocale()[1]).encode('UTF-8') ]) else: nm = names.split(':', 1) sfnt.append([ int(nm[0], 16 if nm[0][0:2] == '0x' else 10), strid, nm[1].decode(getdefaultlocale()[1]).encode('UTF-8') ]) except AttributeError: # Python 3 if strid is None: nm = names.split(':', 2) sfnt.append([ int(nm[0], 16 if nm[0][0:2] == '0x' else 10), int(nm[1]), nm[2] ]) else: nm = names.split(':', 1) sfnt.append([ int(nm[0], 16 if nm[0][0:2] == '0x' else 10), strid, nm[1] ])
def __init__(self, desktop = None): self.Name = "" self.Type = "" # we only process those with Type "Application" self.Comment = "" #GenericName or Comment self.Exec = "" self.Keywords = "" #split by ';', composed by Name(+locale);Comment(+locale);GenericName(+locale);Categories; if desktop is not None: #Interpret the file dic = dict() f = open(desktop, "r") line = f.readline() while line: if line.find('=') > -1: line = line.strip().split('=', 1) dic[line[0]] = line[1] elif line[:8]=='[Desktop' and line[9:15]!='Entry]': break line = f.readline() f.close() self.Name = dic['Name'] if 'Name' in dic else '' self.Type = dic['Type'] if 'Type' in dic else '' self.Comment = dic['Comment'] if 'Comment' in dic else (dic['GenericName'] if 'GenericName' in dic else '') self.Exec = dic['Exec'].replace(" %u", "").replace(" %U", "") if 'Exec' in dic else '' self.Keywords = ((dic['Name']+';') if 'Name' in dic else '')+\ ((dic['Name'+'['+locale.getdefaultlocale()[0]+']'] + ';') if 'Name'+'['+locale.getdefaultlocale()[0]+']' in dic else '')+\ ((dic['Comment']+';') if 'Comment' in dic else '') +\ ((dic['Comment'+'['+locale.getdefaultlocale()[0]+']'] + ';') if 'Comment'+'['+locale.getdefaultlocale()[0]+']' in dic else '')+\ ((dic['GenericName']+';') if 'GenericName' in dic else '') +\ ((dic['GenericName'+'['+locale.getdefaultlocale()[0]+']'] + ';') if 'GenericName'+'['+locale.getdefaultlocale()[0]+']' in dic else '')+\ ((dic['Categories']+';') if 'Categories' in dic else '')
def getPlatInfo(**kwargs): """kwargs: dict: {"module_name": module,...} used to display version information about modules.""" log.debug("*** Platform information") log.debug(" System: %s"%platform.system()) log.debug(" Release: %s"%platform.release()) log.debug(" Version: %s"%platform.version()) log.debug(" Architecture: %s, %s"%platform.architecture()) log.debug(" Dist: %s, %s, %s"%platform.dist()) log.debug(" Machine: %s"%platform.machine()) log.debug(" Processor: %s"%platform.processor()) log.debug(" Locale: %s"%locale.getdefaultlocale()[0]) log.debug(" Encoding: %s"%locale.getdefaultlocale()[1]) reVer = re.compile(r"__version__|_version_|__version|_version|version|" "__ver__|_ver_|__ver|_ver|ver", re.IGNORECASE) for name, mod in kwargs.items(): s = "%s"%dir(mod) verObjNames = list(re.findall(reVer, s)) if len(verObjNames) > 0: while verObjNames: verObjName = verObjNames.pop() verObj = getattr(mod, verObjName, None) if verObj: if type(verObj) in (str, unicode, int, list, tuple): ver = "%s"%verObj break elif "%s"%type(verObj) == "<type 'module'>": verObjNames += ["%s.%s"%(verObjName, a) for a in re.findall(reVer, "%s"%dir(verObj))] else: ver = verObj() else: ver = "%s"%type(verObj) log.debug(" %s version: %s"%(name, ver)) log.debug("***")
def run_event(self): args = self.args stream = Stream(id=args.id, password=args.password, generator=args.generator) encoding = sys.stdin.encoding or locale.getdefaultlocale()[1] char_count = 0 lines = [] for line in sys.stdin: sys.stdout.write(line) line = line.decode(encoding, 'replace') if char_count + len(line) <= self.MAX_CHARS: lines.append(line) char_count += len(line) text = "".join(lines)[:self.MAX_CHARS] if isinstance(text, bytes): encoding = sys.stdin.encoding or locale.getdefaultlocale()[1] text = text.decode(encoding, 'replace') event = Event(type="code", title=args.title or 'Capture Event', markup=args.markup, description=args.description, text=text) result = stream.add_event(event) return result
def _get_azure_cli_properties(self): source = 'az' if self.arg_complete_env_name not in os.environ else 'completer' result = {} self.set_custom_properties(result, 'Source', source) self.set_custom_properties(result, 'ClientRequestId', lambda: self.application.session['headers'][ 'x-ms-client-request-id']) self.set_custom_properties(result, 'CoreVersion', _get_core_version) self.set_custom_properties(result, 'InstallationId', _get_installation_id) self.set_custom_properties(result, 'ShellType', _get_shell_type) self.set_custom_properties(result, 'UserAzureId', _get_user_azure_id) self.set_custom_properties(result, 'UserAzureSubscriptionId', _get_azure_subscription_id) self.set_custom_properties(result, 'DefaultOutputType', lambda: _get_azure_cli_config().get('core', 'output', fallback='unknown')) self.set_custom_properties(result, 'EnvironmentVariables', _get_env_string) self.set_custom_properties(result, 'Locale', lambda: '{},{}'.format(locale.getdefaultlocale()[0], locale.getdefaultlocale()[1])) self.set_custom_properties(result, 'StartTime', str(self.start_time)) self.set_custom_properties(result, 'EndTime', str(self.end_time)) self.set_custom_properties(result, 'OutputType', self.output_type) self.set_custom_properties(result, 'Parameters', self.parameters) self.set_custom_properties(result, 'PythonVersion', platform.python_version()) return result
def DetectEncodingAndRead(self, fd): encodings = ["utf-8", "utf-16"] if locale.getpreferredencoding() not in encodings: encodings.append(locale.getpreferredencoding()) if sys.getdefaultencoding() not in encodings: encodings.append(sys.getdefaultencoding()) if locale.getdefaultlocale()[1] not in encodings: encodings.append(locale.getdefaultlocale()[1]) if sys.getfilesystemencoding() not in encodings: encodings.append(sys.getfilesystemencoding()) if 'latin-1' not in encodings: encodings.append('latin-1') for enc in encodings: fd.seek(0) try: reader = codecs.getreader(enc)(fd) content = reader.read() except: continue else: self._encoding = enc logger.info("Detect file %s 's encoding is %s" % (self.GetFilename(), self._encoding)) return content logger.error("Fail to detect the encoding for file %s" % self.GetFilename()) return None
def _run(cmd, args=[], source="", cwd=None, env=None): if not type(args) is list: args = [args] if sys.platform == "win32": args = [cmd] + args if sys.version_info[0] == 2: for i in range(len(args)): args[i] = args[i].encode(locale.getdefaultlocale()[1]) proc = Popen(args, env=env, cwd=cwd, stdout=PIPE, stdin=PIPE, stderr=PIPE, shell=True) try: stat = proc.communicate(input=source) except: stat = proc.communicate(input=source.encode("utf8")) okay = proc.returncode == 0 return {"okay": okay, "out": stat[0].decode(locale.getdefaultlocale()[1]), "err": stat[1].decode(locale.getdefaultlocale()[1])} else: if env is None: env = {"PATH": settings_get('binDir', '/usr/local/bin')} # adding custom PATHs from settings customEnv = settings_get('envPATH', "") if customEnv: env["PATH"] = env["PATH"]+":"+customEnv if source == "": command = [cmd] + args else: command = [cmd] + args + [source] proc = Popen(command, env=env, cwd=cwd, stdout=PIPE, stderr=PIPE) stat = proc.communicate() okay = proc.returncode == 0 return {"okay": okay, "out": stat[0].decode('utf-8'), "err": stat[1].decode('utf-8')}
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context['locale'] = str(locale.getlocale()) context['default_locale'] = str(locale.getdefaultlocale()) context['default_locale'] = str(locale.getdefaultlocale()) context['filesystem_encoding'] = str(sys.getfilesystemencoding()) context['default_encoding'] = str(sys.getdefaultencoding()) return context
def convStr(str, errors = 'ignore'): """ 字符串合法化+转换函数 """ if locale.getdefaultlocale()[0] == 'zh_TW':conv = chans.toTW elif locale.getdefaultlocale()[0] == 'zh_HK':conv = chans.toHK else :conv = lambda x:x return conv(str).decode('utf-8').encode(locale.getdefaultlocale()[1], errors)
def uniCoding(str): try: if locale.getdefaultlocale()[1] is None: return str else: return str.decode(locale.getdefaultlocale()[1]).encode(vim.eval("&encoding")) except UnicodeDecodeError: return str
def lfDecode(str): try: if locale.getdefaultlocale()[1] is None: return str else: return str.decode(lfEval("&encoding")).encode( locale.getdefaultlocale()[1]) except UnicodeDecodeError: return str
def check_functional_environment(config): try: locale.getdefaultlocale() except (KeyError, ValueError) as e: config.error("\n".join([ "Your environment has incorrect locale settings. This means python cannot handle strings safely.", " Please check 'LANG', 'LC_CTYPE', 'LC_ALL' and 'LANGUAGE'" " are either unset or set to a valid locale.", str(e) ]))
def add_translation(): lc, enc = locale.getdefaultlocale() if os.path.exists(os.path.join(os.path.dirname(sys.argv[0]), "i18n")): # Running locally logging.debug("Running local") gettext.bindtextdomain("youtubed-2x", "i18n") lc, enc = locale.getdefaultlocale() lang = gettext.translation("youtubed-2x", "i18n", languages=[lc], fallback=True) logging.debug(lang) lang.install() # gettext.install ("youtubed-2x", "i18n") gtk.glade.bindtextdomain("youtubed-2x", "i18n") elif gettext.find("youtubed-2x"): # Installed. .mo file is in default locale location logging.debug("Found default locale") gettext.install("youtubed-2x") gtk.glade.bindtextdomain("youtubed-2x") elif WINDOWS and os.path.exists(os.path.join(sys.prefix, "share", "locale")): # Windows when using build made with Py2exe logging.debug("Py2exe build") locale_dir = os.path.join(sys.prefix, "share", "locale") gettext.bindtextdomain("youtubed-2x", locale_dir) lang = gettext.translation("youtubed-2x", locale_dir, languages=[lc], fallback=True) logging.debug(lang) lang.install() # gettext.install ("youtubed-2x", "i18n") gtk.glade.bindtextdomain("youtubed-2x", locale_dir) else: # Installed. Try to discover locale location logging.debug("Installed") locale_dir = None if "XDG_DATA_DIRS" in os.environ: data_dirs = os.environ["XDG_DATA_DIRS"].split(":") for data_dir in data_dirs: mofile = gettext.find("youtubed-2x", os.path.join(data_dir, "locale")) logging.debug(mofile) if mofile: locale_dir = os.path.join(data_dir, "locale") break logging.debug(locale_dir) if locale_dir: gettext.install("youtubed-2x", locale_dir) gtk.glade.bindtextdomain("youtubed-2x", locale_dir) else: # If .mo file could not be found, ignore the issue. # Non-translated strings will be used. Install _() # to global namespace gettext.install("youtubed-2x") gtk.glade.bindtextdomain("youtubed-2x") # gettext.install ("youtubed-2x") gtk.glade.textdomain("youtubed-2x") logging.debug(_)
def register(): bpy.utils.register_module(__name__) bpy.types.INFO_MT_file_import.append(model_import.menu_func) bpy.types.INFO_MT_file_export.append(model_export.menu_func) bpy.types.INFO_MT_file_import.append(anm_import.menu_func) bpy.types.INFO_MT_file_export.append(anm_export.menu_func) bpy.types.IMAGE_MT_image.append(tex_import.menu_func) bpy.types.IMAGE_MT_image.append(tex_export.menu_func) bpy.types.TEXT_MT_text.append(mate_import.TEXT_MT_text) bpy.types.TEXT_MT_text.append(mate_export.TEXT_MT_text) bpy.types.DATA_PT_context_arm.append(misc_DATA_PT_context_arm.menu_func) bpy.types.DATA_PT_modifiers.append(misc_DATA_PT_modifiers.menu_func) bpy.types.DATA_PT_vertex_groups.append(misc_DATA_PT_vertex_groups.menu_func) bpy.types.IMAGE_HT_header.append(misc_IMAGE_HT_header.menu_func) bpy.types.IMAGE_PT_image_properties.append(misc_IMAGE_PT_image_properties.menu_func) bpy.types.INFO_MT_add.append(misc_INFO_MT_add.menu_func) bpy.types.INFO_MT_curve_add.append(misc_INFO_MT_curve_add.menu_func) bpy.types.INFO_MT_help.append(misc_INFO_MT_help.menu_func) bpy.types.MATERIAL_PT_context_material.append(misc_MATERIAL_PT_context_material.menu_func) bpy.types.MESH_MT_shape_key_specials.append(misc_MESH_MT_shape_key_specials.menu_func) bpy.types.MESH_MT_vertex_group_specials.append(misc_MESH_MT_vertex_group_specials.menu_func) bpy.types.OBJECT_PT_context_object.append(misc_OBJECT_PT_context_object.menu_func) bpy.types.OBJECT_PT_transform.append(misc_OBJECT_PT_transform.menu_func) bpy.types.RENDER_PT_bake.append(misc_RENDER_PT_bake.menu_func) bpy.types.RENDER_PT_render.append(misc_RENDER_PT_render.menu_func) bpy.types.TEXTURE_PT_context_texture.append(misc_TEXTURE_PT_context_texture.menu_func) bpy.types.TEXT_HT_header.append(misc_TEXT_HT_header.menu_func) bpy.types.VIEW3D_MT_pose_apply.append(misc_VIEW3D_MT_pose_apply.menu_func) bpy.types.INFO_HT_header.append(misc_INFO_HT_header.menu_func) pcoll = bpy.utils.previews.new() dir = os.path.dirname(__file__) pcoll.load('KISS', os.path.join(dir, "kiss.png"), 'IMAGE') common.preview_collections['main'] = pcoll system = bpy.context.user_preferences.system if not system.use_international_fonts: system.use_international_fonts = True if not system.use_translate_interface: system.use_translate_interface = True try: import locale if system.language == 'DEFAULT' and locale.getdefaultlocale()[0] != 'ja_JP': system.language = 'en_US' except: pass try: import locale if locale.getdefaultlocale()[0] != 'ja_JP': unregister() except: pass
def __init__(self, raise_event, on_disconnect, idlequeue, estabilish_tls, certs, tls_version, cipher_list, xmpp_server, domain, bosh_dict, proxy_creds): NonBlockingTransport.__init__(self, raise_event, on_disconnect, idlequeue, estabilish_tls, certs, tls_version, cipher_list) self.bosh_sid = None if locale.getdefaultlocale()[0]: self.bosh_xml_lang = locale.getdefaultlocale()[0].split('_')[0] else: self.bosh_xml_lang = 'en' self.http_version = 'HTTP/1.1' self.http_persistent = True self.http_pipelining = bosh_dict['bosh_http_pipelining'] self.bosh_to = domain self.route_host, self.route_port = xmpp_server self.bosh_wait = bosh_dict['bosh_wait'] if not self.http_pipelining: self.bosh_hold = 1 else: self.bosh_hold = bosh_dict['bosh_hold'] self.bosh_requests = self.bosh_hold self.bosh_uri = bosh_dict['bosh_uri'] self.bosh_content = bosh_dict['bosh_content'] self.over_proxy = bosh_dict['bosh_useproxy'] if estabilish_tls: self.bosh_secure = 'true' else: self.bosh_secure = 'false' self.tls_version = tls_version self.cipher_list = cipher_list self.use_proxy_auth = bosh_dict['useauth'] self.proxy_creds = proxy_creds self.wait_cb_time = None self.http_socks = [] self.stanza_buffer = [] self.prio_bosh_stanzas = [] self.current_recv_handler = None self.current_recv_socket = None self.key_stack = None self.ack_checker = None self.after_init = False self.proxy_dict = {} if self.over_proxy and self.estabilish_tls: self.proxy_dict['type'] = 'http' # with SSL over proxy, we do HTTP CONNECT to proxy to open a channel to # BOSH Connection Manager host, port = urisplit(self.bosh_uri)[1:3] self.proxy_dict['xmpp_server'] = (host, port) self.proxy_dict['credentials'] = self.proxy_creds # ssl variables self.ssl_certificate = None self.ssl_errnum = 0
def decodeHtmlEntity(s) : '''decodeHtmlEntity''' if s=='' or not s: return '' result = s import locale result = result.decode(locale.getdefaultlocale()[1],"ignore").encode(locale.getdefaultlocale()[1]).replace("\xc2\xa0"," ") return result
def set_language(): method = get_dbus_method('getLocale') if method != -1: loc = method() if loc == "system_default": loc, enc = locale.getdefaultlocale() else: loc, enc = locale.getdefaultlocale() lang = gettext.translation(APPNAME, LANGDIR, [loc]) lang.install()
def lfEncode(str): try: if locale.getdefaultlocale()[1] is None: return str else: return str.decode(locale.getdefaultlocale()[1]).encode(lf_encoding) except ValueError: return str except UnicodeDecodeError: return str
def do_sync_folder(self, path): term = DropboxTerm(APP_KEY, APP_SECRET) """""" resp = self.api_client.metadata(path) dirlist = os.listdir(droppath + "/" + path) rand1 = random.random() if 'contents' in resp: for f in resp['contents']: name = os.path.basename(f['path']) encoding = locale.getdefaultlocale()[1] if ('%s' % name).encode(encoding) not in dirlist: print ('%s' % name).encode(encoding) + " not found." if not os.path.isfile(('%s' % name).encode(encoding)): dir = f['is_dir'] if not dir: term.onecmd('get \"' + path + "/" + ('%s' % name).encode(encoding) + '\" \"' + droppath + "/" + path + ('%s' % name).encode(encoding) + '\"') if dir: os.system('mkdir \"' + droppath + "/" + path + ('%s' % name).encode(encoding) + '\"') else: name = os.path.basename(f['path']) encoding = locale.getdefaultlocale()[1] print ('%s' % name).encode(encoding) + " found. Checking..." modified = f['modified'] date1 = modified[5:] if os.path.isfile(('%s' % name).encode(encoding)): t = time.ctime(os.path.getmtime(('%s' % name).encode(encoding))) date2 = t[4:] hour = str(int(hour) +1) timestamp1_rnd = date_rewrite.generate_timestampd(date1) print(date1 + " converted to " + timestamp1_rnd) timestamp2_rnd = date_rewrite.generate_timestamp(date2) print(date2 + " converted to " + timestamp2_rnd) dir = f['is_dir'] if timestamp1_rnd < timestamp2_rnd: if not dir: print(" - Dropbox version of file \"" + ('%s' % name).encode(encoding) + "\" is older. Updating...") term.onecmd('rm \"' + path + "/" + ('%s' % name).encode(encoding) + '\"') term.onecmd('sync \"' + ('%s' % name).encode(encoding) + '\" \"' + path + "/" + ('%s' % name).encode(encoding) + '\"') else: print + " x " + name + " is directory. Skipping." elif timestamp1_rnd > timestamp2_rnd: term.onecmd('get \"' + path + "/" + ('%s' % name).encode(encoding) + '\" \"' + ('%s' % name).encode(encoding) + '\"') print(" - Dropbox verion of file \"" + ('%s' % name).encode(encoding) + "\" is newer. Updating.") else: print(" x File \"" + ('%s' % name).encode(encoding) + "\" is identical. Skipping.") print(" > Command '" + parameters[1] + "' executed")
def messageCB(con,msg): global ontesting whoid = getjid(msg.getFrom()) if conf.general.debug > 2: try: print '>>>', time.strftime('%Y-%m-%d %H:%M:%S'), '[MESSAGE]', unicode(msg).encode(locale.getdefaultlocale()[1]) except: print '>>>', time.strftime('%Y-%m-%d %H:%M:%S'), '[MESSAGE]', unicode(msg).encode('utf-8') if msg.getError()!=None: if conf.general.debug > 2: try: print '>>> [ERROR]', unicode(msg).encode(locale.getdefaultlocale()[1]) except: print '>>> [ERROR]', unicode(msg).encode('utf-8') # if statuses.has_key(getdisplayname(msg.getFrom())): # sendstatus(unicode(msg.getFrom()),_("away"), _("Blocked")) # boot(msg.getFrom().getStripped()) elif msg.getBody(): #check quality if msg.getFrom().getStripped() == getjid(JID): body = msg.getBody() if body and body[0] == 'Q': ontesting = False t = int(body[1:].split(':', 1)[0]) t1 = int(time.time()) if t1 - t > reconnectime: if conf.general.debug > 1: print '>>>', time.strftime('%Y-%m-%d %H:%M:%S'), 'RECONNECT... network delay it too long: %d\'s' % (t1-t) raise RECONNECT_COMMAND xmllogf.flush() return userjid[whoid] = unicode(msg.getFrom()) if len(msg.getBody())>1024: systoall(_("%s is being a moron trying to flood the channel").para(getdisplayname(msg.getFrom()))) elif msg.getBody()[:1] in commandchrs: if conf.general.debug > 1: print '......CMD......... %s [%s]' % (msg.getFrom(), msg.getBody()) cmd(msg.getFrom(),msg.getBody()) else: #check away if has_userflag(msg.getFrom().getStripped(), 'away'): del_userflag(msg.getFrom().getStripped(), 'away') # systoone(msg.getFrom().getStripped(), _('Warning: Because you set "away" flag, so you can not receive and send any message from this bot, until you reset using "/away" command')) # xmllogf.flush() # return global suppressing,last_activity suppressing=0 last_activity=time.time() sendtoall('<%s> %s' % (getdisplayname(msg.getFrom()),msg.getBody()), butnot=[getdisplayname(msg.getFrom())], ) # if con.getRoster().getShow(msg.getFrom().getStripped()) not in ['available','chat','online',None]: # systoone(msg.getFrom(), _('Warning: You are marked as "busy" in your client,\nyou will not see other people talk,\nset yourself "available" in your client to see their replies.')) xmllogf.flush() # just so flushes happen regularly
def strToDate(arg,fmt): locale.getdefaultlocale() try: arg=datetime.strptime(arg,fmt) except ValueError as e: if len(e.args)>0 and e.args[0].startswith('unconverted data remains:'): arg= arg[:-(len(e.args[0])-26)] arg= datetime.strptime(arg,fmt) else: raise e return arg
def isOS(self): if locale.getdefaultlocale()[0] == 'zh_CN': print locale.getdefaultlocale()[0] sleep(1) self.filedialog = application.Application().Connect_(title_re=ur'选择安装语言') combox.ComboBoxWrapper(self.filedialog.TSelectLanguageForm.TNewComboBox).Select(self.appLanguage) self.filedialog[u'选择安装语言'][u'确定'].Click() else: self.filedialog = application.Application().Connect_(title_re=ur'Select Setup Language') combox.ComboBoxWrapper(self.filedialog.TSelectLanguageForm.TNewComboBox).Select(self.appLanguage) self.filedialog.TSelectLanguageForm.OK.Click()
def __init__(self, path, section): # credits to nrgaway. self.path = path self.section = section self.language = DEFAULT_LANG self.result = None try: if locale.getdefaultlocale()[0] != None: self.language = locale.getdefaultlocale()[0].split('_')[0] except: self.language = DEFAULT_LANG print('ERROR: locale.getdefaultlocale failed. Using "{}" as default'.format(self.language))
def testGetData(): print '\n[test] getData' pathName1 = './testGetData' pathName2 = './测试数据获取' fileName1 = 'readme.haml' fileName2 = '说明.haml' # test case 1 # 针对情况:文件名及文件都为assic字符的情况下 acmd.vim.vimFullFileName = os.path.realpath(pathName1 +'/'+ fileName1) getTc1Result = acmd.getData() tc1Result = [os.path.realpath(pathName1 +'/'+ fileName1).replace('\\', '/'), os.path.realpath(pathName1).replace('\\', '/')+'/', 'readme', 'haml'] #tc1Result = [os.path.realpath(pathName1 +'/'+ fileName1).replace('\\', '/'), os.path.realpath(pathName1).replace('\\', '/')+'/', 'readme', 'haml.'] if getTc1Result == tc1Result: print ' test case 1:success' else: print ' test case 1:failure\n [tc1Result]\n %s\n [getTc1Result]\n %s' %(str(tc1Result), str(getTc1Result)) sys.exit() # test case 2 # 针对情况:路径及文件名中带中文,且未开启文件名转码 acmd.vim.vimFullFileName = os.path.realpath(pathName2 +'/'+ fileName2) getTc2Result = acmd.getData() codepage = locale.getdefaultlocale()[1].lower() tmp1 = os.path.realpath(pathName2+'/'+fileName2).replace('\\', '/').decode('utf-8') tmp2 = os.path.realpath(pathName2).replace('\\', '/').decode('utf-8')+'/' tc2Result = [ tmp1, tmp2, re.sub( r'\.[^.]*$', '', fileName2).decode('utf-8'), 'haml' ] #tc2Result = [ tmp1, tmp2, re.sub( r'\.[^.]*$', '', fileName2).decode('utf-8').encode(codepage), 'haml.' ] if getTc2Result == tc2Result: print ' test case 2:success' else: print ' test case 2:failure\n [tc2Result]\n %s\n [getTc2Result]\n %s' %(str(tc2Result), str(getTc2Result)) sys.exit() # test case 3 # 针对情况:路径及文件名中带中文,且开启文件名转码 acmd.vim.vimFullFileName = os.path.realpath(pathName2 +'/'+ fileName2) acmd.vim.vimAcmdAutoEncode = '0' getTc3Result = acmd.getData() codepage = locale.getdefaultlocale()[1].lower() tmp1 = os.path.realpath(pathName2).replace('\\', '/')+'/'+fileName2 tmp2 = os.path.realpath(pathName2).replace('\\', '/').decode('utf-8')+'/' tc3Result = [ tmp1.decode('utf-8'), tmp2, re.sub( r'\.[^.]*$', '', fileName2).decode('utf-8'), 'haml' ] #tc3Result = [ tmp1, tmp2, re.sub( r'\.[^.]*$', '', fileName2), 'haml.' ] if getTc3Result == tc3Result: print ' test case 3:success' else: print ' test case 3:failure\n [tc3Result]\n %s\n [getTc3Result]\n %s' %(str(tc3Result), str(getTc3Result)) sys.exit()
if sys.platform.startswith('win'): IS_UNIX = False else: IS_UNIX = True LOCALE_PATH = os.path.abspath(os.path.join(APP_PATH, "data/locale")) EXT_LOCALE = os.path.abspath(os.path.join(os.getcwd(), "locale")) if os.path.isfile(EXT_LOCALE): with open(EXT_LOCALE, "r") as in_file: LINE = in_file.readline() LINE_LIST = LINE.split() if LINE_LIST[0] in SUPPORTED_LOCALE: CURRENT_LOCALE = LINE_LIST[0] else: CURRENT_LOCALE = locale.getdefaultlocale()[0] else: CURRENT_LOCALE = locale.getdefaultlocale()[0] GOOGLE_SPEECH_V2_API_KEY = "AIzaSyBOti4mM-6x9WDnZIjIeyEU21OpBXqWBgw" GOOGLE_SPEECH_V2_API_URL = \ "www.google.com/speech-api/v2/recognize?client=chromium&lang={lang}&key={key}" if multiprocessing.cpu_count() > 3: DEFAULT_CONCURRENCY = multiprocessing.cpu_count() >> 1 else: DEFAULT_CONCURRENCY = 2 DEFAULT_SRC_LANGUAGE = 'en-US' DEFAULT_ENERGY_THRESHOLD = 45 MAX_REGION_SIZE = 6.0
# If the content of the environment variable LANG contains a string which # represents a language or encoding not supported by the system, the # following line will raise an exception. LC_ALL = locale.setlocale(locale.LC_ALL, '') except locale.Error, error_msg: # Here we tell user that it's system is set to an unsupported language, # and that Umit will proceed using the system's default. # Latter, we call setlocale again, but now providing None as the second # argument, avoiding the occourrance of the exception. # Gtk will raise a warning in this case, but will work just perfectly. print "Your locale setting is not supported. Umit will continue using \ using your system's preferred language." LC_ALL = locale.setlocale(locale.LC_ALL, None) try: LANG, ENC = locale.getdefaultlocale() ERRORS = "ignore" except ValueError: LANG = None ENC = None # If not correct locale could be retrieved, set en_US.utf8 as default if ENC is None: ENC = "utf8" if LANG is None: LANG = "en_US" try: import gettext
r = requests.get(url) with open(resourcesFolder + "/langs.json", "wb") as f: f.write(r.content) # Load language file with open(f"{resourcesFolder}/langs.json", "r", encoding="utf8") as lang_file: lang_file = json.load(lang_file) # OS X invalid locale hack if platform.system() == 'Darwin': if locale.getlocale()[0] is None: locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') # Check if miner is configured, if it isn't, autodetect language if not Path(resourcesFolder + "/Miner_config.cfg").is_file(): locale = locale.getdefaultlocale()[0] if locale.startswith("es"): lang = "spanish" elif locale.startswith("sk"): lang = "slovak" elif locale.startswith("ru"): lang = "russian" elif locale.startswith("pl"): lang = "polish" elif locale.startswith("fr"): lang = "french" else: lang = "english" # Read language from configfile else: try:
MIN_QGIS_VERSION = '2.10.1' MIN_GDAL_VERSION_INT = 11103 MIN_GDAL_VERSION = '1.11.3' def winenv(): global eol, encoding eol = '\n' if platform.startswith('win'): eol = '\r\n' if os.getenv('LANG') is None: os.environ['LANG'] = language #locale.setlocale(locale.LC_ALL, '') language, encoding = locale.getdefaultlocale() terminal = compat.Terminal(encoding) app_path = terminal.decode(os.path.dirname(__file__)) localedir = os.path.join(app_path, 'locale', 'po') platform = sys.platform winenv() compat.install_gettext(app_name, localedir) log_level = 'INFO' # Default log level log_file = 'catatom2osm.log' log_format = '%(asctime)s - %(levelname)s - %(message)s' log = logging.getLogger(app_name) fh = logging.FileHandler(log_file) ch = logging.StreamHandler(compat.get_stderr()) fh.setLevel(logging.DEBUG)
def main(): toplevel_envvar_value = os.environ.get(TOP_LEVEL_ENVVAR, None) is_toplevel_instance = False if toplevel_envvar_value is None: os.environ[TOP_LEVEL_ENVVAR] = '1' is_toplevel_instance = True to_parse, cmd = split_args_to_parse() # get the module name (if any), then load it capture_module_name = os.path.basename(cmd[0]) if len(cmd) > 0 else None mod_name = get_module_name(capture_module_name) imported_module = None if mod_name: # There is module that supports the command imported_module = load_module(mod_name) # get the module's argparser and merge it with the global argparser module_argparser = [] if imported_module: module_argparser.append( imported_module.create_argparser(capture_module_name)) global_argparser = create_argparser(module_argparser) args = global_argparser.parse_args(to_parse) validate_args(imported_module, args) remove_infer_out = (imported_module is not None and not args.reactive and capture_module_name != 'analyze' and not args.buck) if remove_infer_out: analyze.remove_infer_out(args.infer_out) if imported_module is not None: analyze.create_results_dir(args.infer_out) analyze.reset_start_file(args.infer_out, touch_if_present=not args.continue_capture) utils.configure_logging(args) logging.info('output of locale.getdefaultlocale(): %s', str(locale.getdefaultlocale())) logging.info('encoding we chose in the end: %s', config.CODESET) logging.info('Running command %s', ' '.join(map(utils.decode, sys.argv))) logging.info('Path to infer script %s (%s)', utils.decode(__file__), os.path.realpath(utils.decode(__file__))) logging.info(analyze.get_infer_version()) logging.info('Platform: %s', utils.decode(platform.platform())) def log_getenv(k): v = os.getenv(k) if v is not None: v = utils.decode(v) else: v = '<NOT SET>' logging.info('%s=%s', k, v) log_getenv('PATH') log_getenv('SHELL') log_getenv('PWD') capture_exitcode = imported_module.gen_instance(args, cmd).capture() if capture_exitcode != os.EX_OK: logging.error('Error during capture phase, exiting') exit(capture_exitcode) logging.info('Capture phase was successful') elif capture_module_name is not None: # There was a command, but it's not supported utils.stdout('Command "{cmd}" not recognised'.format( cmd='' if capture_module_name is None else capture_module_name)) global_argparser.print_help() sys.exit(1) else: global_argparser.print_help() sys.exit(os.EX_OK) if not (mod_name == 'buck' or mod_name == 'javac'): # Something should be already captured, otherwise analysis would fail if not os.path.exists(os.path.join(args.infer_out, 'captured')): print('There was nothing to analyze, exiting') exit(os.EX_USAGE) analysis = analyze.AnalyzerWrapper(args) analysis.analyze_and_report() analysis.save_stats() if is_toplevel_instance is True: buck_out_for_stats_aggregator = None if (mod_name == 'buck' and os.path.isfile( os.path.join(args.infer_out, config.INFER_BUCK_DEPS_FILENAME))): buck_out_for_stats_aggregator = 'buck-out' logging.info('Aggregating stats') output = utils.run_infer_stats_aggregator( args.infer_out, buck_out_for_stats_aggregator) logging.info(output) if args.fail_on_bug: bugs_filename = os.path.join(args.infer_out, config.JSON_REPORT_FILENAME) try: bugs = utils.load_json_from_path(bugs_filename) if len(bugs) > 0: sys.exit(config.BUG_FOUND_ERROR_CODE) except OSError: pass
def get_locale(self): default_locale = locale.getdefaultlocale() if len(default_locale) > 1: return default_locale[0] + "." + default_locale[1] else: return default_locale[0]
# pylint: disable=C0111,R0903 """Displays the current date and time. Parameters: * datetime.format: strftime()-compatible formatting string * date.format : alias for datetime.format * time.format : alias for datetime.format """ from __future__ import absolute_import import datetime import locale import bumblebee.engine locale.setlocale(locale.LC_TIME, locale.getdefaultlocale()) ALIASES = [ "date", "time" ] def default_format(module): default = "%x %X" if module == "date": default = "%x" if module == "time": default = "%X" return default class Module(bumblebee.engine.Module): def __init__(self, engine, config): super(Module, self).__init__(engine, config, bumblebee.output.Widget(full_text=self.get_time)
def init(): if 'LANG' not in os.environ: if os.name == 'nt': lang, enc = locale.getdefaultlocale() os.environ['LANG'] = lang elif jnius: # android Locale = jnius.autoclass('java.util.Locale') os.environ['LANG'] = Locale.getDefault().getLanguage() locale.setlocale(locale.LC_ALL, '') # install gettext locale_locations = ( # locale/ next to the pysol.py script sys.path[0], # locale/ next to library.zip (py2exe) os.path.dirname(sys.path[0]), # locale/ in curdir (works for e.g. py2app) os.curdir) # leaving the domain unbound means sys.prefix+'/share/locale' for par in locale_locations: locale_dir = os.path.join(par, 'locale') if os.path.isdir(locale_dir): gettext.bindtextdomain('pysol', locale_dir) break gettext.textdomain('pysol') # debug if 'PYSOL_CHECK_GAMES' in os.environ or 'PYSOL_DEBUG' in os.environ: pysollib.settings.CHECK_GAMES = True print('PySol debugging: set CHECK_GAMES to True') if 'PYSOL_DEBUG' in os.environ: try: pysollib.settings.DEBUG = int(os.environ['PYSOL_DEBUG']) except Exception: pysollib.settings.DEBUG = 1 print(('PySol debugging: set DEBUG to', pysollib.settings.DEBUG)) # init toolkit if '--gtk' in sys.argv: pysollib.settings.TOOLKIT = 'gtk' sys.argv.remove('--gtk') elif '--tk' in sys.argv: pysollib.settings.TOOLKIT = 'tk' pysollib.settings.USE_TILE = False sys.argv.remove('--tk') elif '--tile' in sys.argv: pysollib.settings.TOOLKIT = 'tk' pysollib.settings.USE_TILE = True sys.argv.remove('--tile') elif '--kivy' in sys.argv: pysollib.settings.TOOLKIT = 'kivy' pysollib.settings.USE_TILE = False pysollib.settings.SELECT_GAME_MENU = False sys.argv.remove('--kivy') if pysollib.settings.TOOLKIT == 'tk': from six.moves import tkinter root = tkinter.Tk(className=pysollib.settings.TITLE) root.withdraw() if tkinter.TkVersion < 8.4: # we need unicode support sys.exit("%s needs Tcl/Tk 8.4 or better (you have %s)" % (pysollib.settings.TITLE, str(tkinter.TkVersion))) pysollib.settings.WIN_SYSTEM = root.tk.call('tk', 'windowingsystem') if pysollib.settings.WIN_SYSTEM == 'aqua': # TkAqua displays the console automatically in application # bundles, so we hide it here. from pysollib.macosx.appSupport import hideTkConsole hideTkConsole(root) # if pysollib.settings.USE_TILE == 'auto': # check Tile pysollib.settings.USE_TILE = False try: root.tk.eval('package require tile 0.7.8') except tkinter.TclError: pass else: pysollib.settings.USE_TILE = True # "can't invoke event <<ThemeChanged>>: application has been destroyed" # root.destroy() tkinter._default_root = None # check FreeCell-Solver pysollib.settings.USE_FREECELL_SOLVER = False if os.name == 'nt': if sys.path[0] and not os.path.isdir(sys.path[0]): # i.e. library.zip d = os.path.dirname(sys.path[0]) os.chdir(d) # for read presets fcs_command = os.path.join( 'freecell-solver', 'bin', 'fc-solve.exe') pysollib.settings.FCS_COMMAND = fcs_command f = os.path.join('freecell-solver', 'presetrc') os.environ['FREECELL_SOLVER_PRESETRC'] = f if os.name in ('posix', 'nt'): try: kw = {'shell': True, 'stdout': subprocess.PIPE, 'stderr': subprocess.PIPE, 'stdin': subprocess.PIPE, } if os.name != 'nt': kw['close_fds'] = True p = subprocess.Popen(pysollib.settings.FCS_COMMAND+' --help', **kw) p.stdin.close() line = p.stdout.readline() if sys.version_info >= (3,): line = line.decode("utf-8") if line.startswith('fc-solve'): pysollib.settings.USE_FREECELL_SOLVER = True if os.name == 'posix': os.wait() # kill zombi except Exception: # traceback.print_exc() pass os.environ['FREECELL_SOLVER_QUIET'] = '1' # run app without games menus (more fast start) if '--no-games-menu' in sys.argv: sys.argv.remove('--no-games-menu') pysollib.settings.SELECT_GAME_MENU = False
from pyspark import SparkConf, SparkContext import locale locale.getdefaultlocale() locale.getpreferredencoding() conf = SparkConf().set('spark.driver.host', '127.0.0.1') sc = SparkContext(master='local', appName='myAppName', conf=conf) files = "hdfs://namenode:9000/" txtFiles = sc.wholeTextFiles(files, 20) words_in_files = txtFiles.map(lambda s: s[1].split()) all_word = txtFiles.flatMap(lambda s: s[1].split()) word_map = all_word.map(lambda s: (s, 1)) word_reduce = word_map.reduceByKey(lambda s, t: s + t) print(word_reduce.sortBy(lambda s: s[1]).collect())
def get_default_language(): try: language = 'zh_CN' if locale.getdefaultlocale()[0] == 'zh_CN' else 'en' except ValueError: language = 'en' return language
def createArguments(self): trackers = [ "zero://boot3rdez4rzn36x.onion:15441", "zero://zero.booth.moe#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:443", # US/NY "udp://tracker.coppersurfer.tk:6969", # DE "udp://tracker.port443.xyz:6969", # UK "udp://104.238.198.186:8000", # US/LA "http://tracker2.itzmx.com:6961/announce", # US/LA "http://open.acgnxtracker.com:80/announce", # DE "http://open.trackerlist.xyz:80/announce", # Cloudflare "https://1.tracker.eu.org:443/announce", # Google App Engine "zero://2602:ffc5::c5b2:5360:26312" # US/ATL ] # Platform specific if sys.platform.startswith("win"): coffeescript = "type %s | tools\\coffee\\coffee.cmd" else: coffeescript = None try: language, enc = locale.getdefaultlocale() language = language.lower().replace("_", "-") if language not in ["pt-br", "zh-tw"]: language = language.split("-")[0] except Exception: language = "en" use_openssl = True if repr(1483108852.565 ) != "1483108852.565": # Fix for weird Android issue fix_float_decimals = True else: fix_float_decimals = False config_file = self.start_dir + "/zeronet.conf" data_dir = self.start_dir + "/data" log_dir = self.start_dir + "/log" ip_local = ["127.0.0.1", "::1"] # Main action = self.subparsers.add_parser( "main", help='Start UiServer and FileServer (default)') # SiteCreate action = self.subparsers.add_parser("siteCreate", help='Create a new site') # SiteNeedFile action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site') action.add_argument('address', help='Site address') action.add_argument('inner_path', help='File inner path') # SiteDownload action = self.subparsers.add_parser("siteDownload", help='Download a new site') action.add_argument('address', help='Site address') # SiteSign action = self.subparsers.add_parser( "siteSign", help='Update and sign content.json: address [privatekey]') action.add_argument('address', help='Site to sign') action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?') action.add_argument( '--inner_path', help='File you want to sign (default: content.json)', default="content.json", metavar="inner_path") action.add_argument( '--remove_missing_optional', help='Remove optional files that is not present in the directory', action='store_true') action.add_argument('--publish', help='Publish site after the signing', action='store_true') # SitePublish action = self.subparsers.add_parser( "sitePublish", help='Publish site to other peers: address') action.add_argument('address', help='Site to publish') action.add_argument( 'peer_ip', help='Peer ip to publish (default: random peers ip from tracker)', default=None, nargs='?') action.add_argument( 'peer_port', help= 'Peer port to publish (default: random peer port from tracker)', default=15441, nargs='?') action.add_argument( '--inner_path', help='Content.json you want to publish (default: content.json)', default="content.json", metavar="inner_path") # SiteVerify action = self.subparsers.add_parser( "siteVerify", help='Verify site files using sha512: address') action.add_argument('address', help='Site to verify') # SiteCmd action = self.subparsers.add_parser( "siteCmd", help='Execute a ZeroFrame API command on a site') action.add_argument('address', help='Site address') action.add_argument('cmd', help='API command name') action.add_argument('parameters', help='Parameters of the command', nargs='?') # dbRebuild action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache') action.add_argument('address', help='Site to rebuild') # dbQuery action = self.subparsers.add_parser("dbQuery", help='Query site sql cache') action.add_argument('address', help='Site to query') action.add_argument('query', help='Sql query') # PeerPing action = self.subparsers.add_parser("peerPing", help='Send Ping command to peer') action.add_argument('peer_ip', help='Peer ip') action.add_argument('peer_port', help='Peer port', nargs='?') # PeerGetFile action = self.subparsers.add_parser( "peerGetFile", help='Request and print a file content from peer') action.add_argument('peer_ip', help='Peer ip') action.add_argument('peer_port', help='Peer port') action.add_argument('site', help='Site address') action.add_argument('filename', help='File name to request') action.add_argument( '--benchmark', help='Request file 10x then displays the total time', action='store_true') # PeerCmd action = self.subparsers.add_parser( "peerCmd", help='Request and print a file content from peer') action.add_argument('peer_ip', help='Peer ip') action.add_argument('peer_port', help='Peer port') action.add_argument('cmd', help='Command to execute') action.add_argument('parameters', help='Parameters to command', nargs='?') # CryptSign action = self.subparsers.add_parser( "cryptSign", help='Sign message using Bitcoin private key') action.add_argument('message', help='Message to sign') action.add_argument('privatekey', help='Private key') # Crypt Verify action = self.subparsers.add_parser( "cryptVerify", help='Verify message using Bitcoin public address') action.add_argument('message', help='Message to verify') action.add_argument('sign', help='Signiture for message') action.add_argument('address', help='Signer\'s address') # Crypt GetPrivatekey action = self.subparsers.add_parser( "cryptGetPrivatekey", help='Generate a privatekey from master seed') action.add_argument('master_seed', help='Source master seed') action.add_argument('site_address_index', help='Site address index', type=int) action = self.subparsers.add_parser("getConfig", help='Return json-encoded info') action = self.subparsers.add_parser("testConnection", help='Testing') action = self.subparsers.add_parser("testAnnounce", help='Testing') # Config parameters self.parser.add_argument('--verbose', help='More detailed logging', action='store_true') self.parser.add_argument('--debug', help='Debug mode', action='store_true') self.parser.add_argument('--silent', help='Disable logging to terminal output', action='store_true') self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') self.parser.add_argument( '--batch', help="Batch mode (No interactive input for commands)", action='store_true') self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path") self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path") self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path") self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR"]) self.parser.add_argument('--log_rotate', help='Log rotate interval', default="daily", choices=["hourly", "daily", "weekly", "off"]) self.parser.add_argument('--log_rotate_backup_count', help='Log rotate backup count', default=5, type=int) self.parser.add_argument('--language', help='Web interface language', default=language, metavar='language') self.parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip') self.parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port') self.parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*') self.parser.add_argument('--ui_host', help='Allow access using this hosts', metavar='host', nargs='*') self.parser.add_argument('--ui_trans_proxy', help='Allow access using a transparent proxy', action='store_true') self.parser.add_argument( '--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name') self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D', metavar='address') self.parser.add_argument('--updatesite', help='Source code update site', default='1UPDatEDxnvHDo7TXvq6AEBARfNkyfxsp', metavar='address') self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='limit') self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit') self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit') self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit') self.parser.add_argument('--workers', help='Download workers per site', default=5, type=int, metavar='workers') self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip') self.parser.add_argument('--fileserver_port', help='FileServer bind port (0: randomize)', default=0, type=int, metavar='port') self.parser.add_argument('--fileserver_port_range', help='FileServer randomization range', default="10000-40000", metavar='port') self.parser.add_argument('--fileserver_ip_type', help='FileServer ip type', default="dual", choices=["ipv4", "ipv6", "dual"]) self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*') self.parser.add_argument( '--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip', nargs='*') self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true') self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port') self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip') self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*') self.parser.add_argument( '--trackers_file', help='Load torrent trackers dynamically from a file', default=False, metavar='path') self.parser.add_argument( '--trackers_proxy', help= 'Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable") self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=use_openssl) self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true') self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true') self.parser.add_argument( '--force_encryption', help="Enforce encryption to all peer connections", action='store_true') self.parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory', type='bool', choices=[True, False], default=True) self.parser.add_argument( '--keep_ssl_cert', help='Disable new SSL cert generation on startup', action='store_true') self.parser.add_argument( '--max_files_opened', help= 'Change maximum opened files allowed by OS to this value on startup', default=2048, type=int, metavar='limit') self.parser.add_argument('--stack_size', help='Change thread stack size', default=None, type=int, metavar='thread_stack_size') self.parser.add_argument( '--use_tempfiles', help='Use temporary files when downloading (experimental)', type='bool', choices=[True, False], default=False) self.parser.add_argument( '--stream_downloads', help='Stream download directly to files (experimental)', type='bool', choices=[True, False], default=False) self.parser.add_argument( "--msgpack_purepython", help='Use less memory, but a bit more CPU power', type='bool', choices=[True, False], default=False) self.parser.add_argument( "--fix_float_decimals", help= 'Fix content.json modification date float precision on verification', type='bool', choices=[True, False], default=fix_float_decimals) self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed") self.parser.add_argument("--download_optional", choices=["manual", "auto"], default="manual") self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, metavar='executable_path') self.parser.add_argument( '--tor', help= 'enable: Use only for Tor peers, always: Use Tor for every connection', choices=["disable", "enable", "always"], default='enable') self.parser.add_argument('--tor_controller', help='Tor controller address', metavar='ip:port', default='127.0.0.1:9051') self.parser.add_argument('--tor_proxy', help='Tor proxy address', metavar='ip:port', default='127.0.0.1:9050') self.parser.add_argument('--tor_password', help='Tor controller password', metavar='password') self.parser.add_argument( '--tor_use_bridges', help='Use obfuscated bridge relays to avoid Tor block', action='store_true') self.parser.add_argument( '--tor_hs_limit', help='Maximum number of hidden services in Tor always mode', metavar='limit', type=int, default=10) self.parser.add_argument('--tor_hs_port', help='Hidden service port in Tor always mode', metavar='limit', type=int, default=15441) self.parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev)) self.parser.add_argument('--end', help='Stop multi value argument parsing', action='store_true') return self.parser
#mtp-lastfm is distributed in the hope that it will be useful, #but WITHOUT ANY WARRANTY; without even the implied warranty of #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #GNU General Public License for more details. # #You should have received a copy of the GNU General Public License #along with mtp-lastfm. If not, see http://www.gnu.org/licenses/ import os import locale import gettext import gtk.glade APP_NAME = "mtp-lastfm" local_path = "/usr/share/locale" langs = [] lc, encoding = locale.getdefaultlocale() if (lc): langs = [lc] language = os.environ.get('LANGUAGE', None) if (language): langs += language.split(":") gettext.bindtextdomain(APP_NAME, local_path) gettext.textdomain(APP_NAME) gtk.glade.bindtextdomain(APP_NAME, local_path) gtk.glade.textdomain(APP_NAME) lang = gettext.translation(APP_NAME, local_path, languages=langs, fallback = True) def set_get_text(): return lang.gettext
from datetime import date, datetime, time import locale hoy = date.today() fecha_nacimiento = date(1993, 6, 4) dias_desde = hoy - fecha_nacimiento print("Hoy es {}, han pasado {} dias desde que nací en {}".format( hoy, dias_desde, fecha_nacimiento)) # Hoy es 2018-07-16, han pasado 9173 days, 0:00:00 dias desde que nací en 1993-06-04 print("\n") print("Trabajando con la localización") locale.setlocale(locale.LC_ALL, locale.getdefaultlocale()) formateado = hoy.strftime("%m-%d-%y. %d %b %Y es %A. hoy es %d de %B.") print(formateado) # 07-16-18. 16 jul 2018 es lunes. hoy es 16 de julio. print("\n") print("datetime.now()") print(datetime.now()) print("\n") print("hh:mm:ss") print(time.hour, time.minute, time.second)
def getAddonInfo( self, property ): """getAddonInfo(id) -- Returns the value of an addon property as a string. id : string - id of the property that the module needs to access. *Note, choices are (author, changelog, description, disclaimer, fanart. icon, id, name, path profile, stars, summary, type, version) You can use the above as keywords for arguments. example: - version = self.Addon.getAddonInfo('version') """ try: lang = getdefaultlocale()[ 0 ][ :2 ].lower() except: lang = "en" if ( property == "author" ): author = re.search( 'provider-name="(.*?)"', self.addon_xml ) if author: return author.group( 1 ) if hasattr( sys.modules[ "__main__" ], "__author__" ): return sys.modules[ "__main__" ].__author__ elif ( property == "changelog" ) and os.path.exists( os.path.join( self.cwd, "changelog.txt" ) ): return os.path.join( self.cwd, "changelog.txt" ) elif (property == "description" ): desc = re.search( '<description lang="%s">(.*?)</description>' % lang, self.addon_xml, re.S ) if desc: return desc.group( 1 ) desc = re.search( '<description>(.*?)</description>', self.addon_xml, re.S ) if desc: return desc.group( 1 ) elif ( property == "disclaimer" ): disc = re.search( '<disclaimer lang="%s">(.*?)</disclaimer>' % lang, self.addon_xml, re.S ) if disc: return disc.group( 1 ) disc = re.search( '<disclaimer>(.*?)</disclaimer>', self.addon_xml, re.S ) if disc: return disc.group( 1 ) elif ( property == "fanart" ) and os.path.exists( os.path.join( self.cwd, "fanart.jpg" ) ): return os.path.join( self.cwd, "fanart.ipg" ) elif ( property == "icon" ): if os.path.exists( os.path.join( self.cwd, "icon.png" ) ): return os.path.join( self.cwd, "icon.png" ) if os.path.exists( os.path.join( self.cwd, "default.tbn" ) ): return os.path.join( self.cwd, "default.tbn" ) elif ( property == "id" ): ID = re.search( 'id="(.*?)"', self.addon_xml ) if ID: return ID.group( 1 ) if hasattr( sys.modules[ "__main__" ], "__addonID__" ): return sys.modules[ "__main__" ].__addonID__ elif ( property == "name" ): name = re.search( ' name="(.*?)"', self.addon_xml ) if name: return name.group( 1 ) if hasattr( sys.modules[ "__main__" ], "__script__" ): return sys.modules[ "__main__" ].__script__ elif ( property == "path" ): return self.cwd elif ( property == "profile" ): path = "special://profile/" if self.type == "script": path += "script_data/" elif self.type == "plugin": path += "plugin_data/%s/" % self.pluginType else: path += "addon_data/" path += "%s/" % os.path.basename( self.cwd ) return path elif ( property == "stars" ): return 0 elif ( property == "summary" ): summ = re.search( '<summary lang="%s">(.*?)</summary>' % lang, self.addon_xml, re.S ) if summ: return summ.group( 1 ) summ = re.search( '<summary>(.*?)</summary>', self.addon_xml, re.S ) if summ: return summ.group( 1 ) elif ( property == "type" ): type = re.search( 'point="(.*?)"', self.addon_xml ) if type: return type.group( 1 ) else: return self.type.title() elif ( property == "version" ): version = re.search( '<addon.*?version="(.*?)"', self.addon_xml, re.S ) if version: return version.group( 1 ) if hasattr( sys.modules[ "__main__" ], "__version__" ): return sys.modules[ "__main__" ].__version__ return ""
def datetime(self, frmt: str = '%c'): """Tells all players the current datetime.""" setlocale(LC_TIME, getdefaultlocale()) # Fix loacale. text = datetime.now().strftime(frmt) return self.say(text)
def _fetch_addon_info(self): try: from glob import glob from locale import getdefaultlocale #get lang info for getting summary g_langInfo = str(getdefaultlocale())[2:4] or "en" # list the contents of the addons folder and get addons listing addons = glob( os.path.join(xbmc.translatePath('special://home/addons'), "*", "addon.xml")) addons += glob( os.path.join(xbmc.translatePath('special://xbmc/addons'), "*", "addon.xml")) # get total value self.WINDOW.setProperty("RandomAddon.Count", str(len(addons))) # count thru our addons count = 0 while count < self.LIMIT: # check if we don't run out of items before LIMIT is reached if len(addons) == 0: break # Shuffle addons in place. random.shuffle(addons) # select a random xml addon_xml = random.choice(addons) # remove the xml from our list addons.remove(addon_xml) # read xml str_xml = open(addon_xml).read() # find plugins and scripts only if re.search('point="xbmc.python.(script|pluginsource)"', str_xml): count += 1 # set base property b_property = "RandomAddon.%d." % (count) # get summary summary = re.search( '<summary.*?lang="[%s|en]">(.*?)</summary>' % g_langInfo, str_xml, re.S) summary = summary or re.search('<summary>(.*?)</summary>', str_xml, re.S) if summary: summary = summary.group(1) else: summary = "" # set properties self.WINDOW.setProperty(b_property + "Summary", summary) self.WINDOW.setProperty( b_property + "Title", re.search('<addon.*?name="(.*?)"', str_xml, re.S).group(1)) self.WINDOW.setProperty( b_property + "Author", re.search('<addon.*?provider-name="(.*?)"', str_xml, re.S).group(1)) self.WINDOW.setProperty( b_property + "Version", re.search('<addon.*?version="(.*?)"', str_xml, re.S).group(1)) self.WINDOW.setProperty( b_property + "Fanart", addon_xml.replace('addon.xml', 'fanart.jpg')) self.WINDOW.setProperty( b_property + "Thumb", addon_xml.replace('addon.xml', 'icon.png')) self.WINDOW.setProperty( b_property + "Type", "".join( re.findall('<provides>(.*?)</provides>', str_xml)) or "executable") self.WINDOW.setProperty( b_property + "Path", re.search('<addon.*?id="(.*?)"', str_xml, re.S).group(1)) self.WINDOW.setProperty(b_property + "Url", "") self.WINDOW.setProperty(b_property + "HasAddon", "1") # #print "Addon: %r" % self.WINDOW.getProperty( b_property + "Path" ) except: print_exc()
def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None): db = table._db table._migrate = migrate fields = [] # PostGIS geo fields are added after the table has been created postcreation_fields = [] sql_fields = {} sql_fields_aux = {} TFK = {} tablename = table._tablename types = self.adapter.types for sortable, field in enumerate(table, start=1): if self.db._ignore_field_case: field_name = field.name.lower() field_rname = field._rname.lower() else: field_name = field.name field_rname = field._rname if self.dbengine == "oracle": # Oracle needs all field names quoted to ensure consistent case field_rname = self.dialect.quote(field_rname) field_type = field.type if isinstance(field_type, SQLCustomType): ftype = field_type.native or field_type.type elif field_type.startswith(("reference", "big-reference")): if field_type.startswith("reference"): referenced = field_type[10:].strip() type_name = "reference" else: referenced = field_type[14:].strip() type_name = "big-reference" if referenced == ".": referenced = tablename constraint_name = self.dialect.constraint_name( table._raw_rname, field._raw_rname) # if not '.' in referenced \ # and referenced != tablename \ # and hasattr(table,'_primarykey'): # ftype = types['integer'] # else: try: rtable = db[referenced] rfield = rtable._id rfieldname = rfield.name rtablename = referenced except (KeyError, ValueError, AttributeError) as e: self.db.logger.debug("Error: %s" % e) try: rtablename, rfieldname = referenced.split(".") rtable = db[rtablename] rfield = rtable[rfieldname] except Exception as e: self.db.logger.debug("Error: %s" % e) raise KeyError( "Cannot resolve reference %s in %s definition" % (referenced, table._tablename)) # must be PK reference or unique rfield_rname = rfield._rname if self.dbengine == "oracle": rfield_rname = self.dialect.quote(rfield_rname) if (not rfield.type.startswith(("reference", "big-reference")) and getattr(rtable, "_primarykey", None) and rfieldname in rtable._primarykey or rfield.unique): ftype = types[rfield.type[:9]] % dict(length=rfield.length) # multicolumn primary key reference? if not rfield.unique and len(rtable._primarykey) > 1: # then it has to be a table level FK if rtablename not in TFK: TFK[rtablename] = {} TFK[rtablename][rfieldname] = field_name else: fk = rtable._rname + " (" + rfield._rname + ")" if self.dbengine == "oracle": fk = (self.dialect.quote(rtable._rname) + " (" + rfield_rname + ")") ftype = ftype + types["reference FK"] % dict( # should be quoted constraint_name=constraint_name, foreign_key=fk, table_name=table._rname, field_name=field._rname, on_delete_action=field.ondelete, on_update_action=field.onupdate, ) else: # make a guess here for circular references if referenced in db: id_fieldname = db[referenced]._id._rname elif referenced == tablename: id_fieldname = table._id._rname else: # make a guess id_fieldname = self.dialect.quote("id") # gotcha: the referenced table must be defined before # the referencing one to be able to create the table # Also if it's not recommended, we can still support # references to tablenames without rname to make # migrations and model relationship work also if tables # are not defined in order if referenced == tablename: real_referenced = db[referenced]._rname else: real_referenced = (referenced in db and db[referenced]._rname or referenced) if self.dbengine == "oracle": real_referenced = self.dialect.quote(real_referenced) rfield = db[referenced]._id ftype_info = dict( index_name=self.dialect.quote(field._raw_rname + "__idx"), field_name=field_rname, constraint_name=self.dialect.quote(constraint_name), foreign_key="%s (%s)" % (real_referenced, rfield_rname), on_delete_action=field.ondelete, on_update_action=field.onupdate, ) ftype_info["null"] = (" NOT NULL" if field.notnull else self.dialect.allow_null) ftype_info["unique"] = " UNIQUE" if field.unique else "" ftype = types[type_name] % ftype_info elif field_type.startswith("list:reference"): ftype = types[field_type[:14]] elif field_type.startswith("decimal"): precision, scale = map(int, field_type[8:-1].split(",")) ftype = types[field_type[:7]] % dict(precision=precision, scale=scale) elif field_type.startswith("geo"): if not hasattr(self.adapter, "srid"): raise RuntimeError("Adapter does not support geometry") srid = self.adapter.srid geotype, parms = field_type[:-1].split("(") if geotype not in types: raise SyntaxError("Field: unknown field type: %s for %s" % (field_type, field_name)) ftype = types[geotype] if self.dbengine == "postgres" and geotype == "geometry": if self.db._ignore_field_case is True: field_name = field_name.lower() # parameters: schema, srid, dimension dimension = 2 # GIS.dimension ??? parms = parms.split(",") if len(parms) == 3: schema, srid, dimension = parms elif len(parms) == 2: schema, srid = parms else: schema = parms[0] ftype = ( "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype]) ftype = ftype % dict( schema=schema, tablename=table._raw_rname, fieldname=field._raw_rname, srid=srid, dimension=dimension, ) postcreation_fields.append(ftype) elif field_type not in types: raise SyntaxError("Field: unknown field type: %s for %s" % (field_type, field_name)) else: ftype = types[field_type] % {"length": field.length} if not field_type.startswith(("id", "reference", "big-reference")): if field.notnull: ftype += " NOT NULL" else: ftype += self.dialect.allow_null if field.unique: ftype += " UNIQUE" if field.custom_qualifier: ftype += " %s" % field.custom_qualifier # add to list of fields sql_fields[field_name] = dict( length=field.length, unique=field.unique, notnull=field.notnull, sortable=sortable, type=str(field_type), sql=ftype, rname=field_rname, raw_rname=field._raw_rname, ) if field.notnull and field.default is not None: # Caveat: sql_fields and sql_fields_aux # differ for default values. # sql_fields is used to trigger migrations and sql_fields_aux # is used for create tables. # The reason is that we do not want to trigger # a migration simply because a default value changes. not_null = self.dialect.not_null(field.default, field_type) ftype = ftype.replace("NOT NULL", not_null) sql_fields_aux[field_name] = dict(sql=ftype) # Postgres - PostGIS: # geometry fields are added after the table has been created, not now if not (self.dbengine == "postgres" and field_type.startswith("geom")): fields.append("%s %s" % (field_rname, ftype)) other = ";" # backend-specific extensions to fields if self.dbengine == "mysql": if not hasattr(table, "_primarykey"): fields.append("PRIMARY KEY (%s)" % (table._id._rname)) engine = self.adapter.adapter_args.get("engine", "InnoDB") other = " ENGINE=%s CHARACTER SET utf8;" % engine fields = ",\n ".join(fields) for rtablename in TFK: rtable = db[rtablename] rfields = TFK[rtablename] pkeys = [rtable[pk]._rname for pk in rtable._primarykey] fk_fields = [table[rfields[k]] for k in rtable._primarykey] fkeys = [f._rname for f in fk_fields] constraint_name = self.dialect.constraint_name( table._raw_rname, "_".join(f._raw_rname for f in fk_fields)) on_delete = list(set(f.ondelete for f in fk_fields)) on_update = list(set(f.onupdate for f in fk_fields)) if len(on_delete) > 1: raise SyntaxError( "Table %s has incompatible ON DELETE actions in multi-field foreign key." % table._dalname) if len(on_update) > 1: raise SyntaxError( "Table %s has incompatible ON UPDATE actions in multi-field foreign key." % table._dalname) tfk_field_name = ", ".join(fkeys) tfk_foreign_key = ", ".join(pkeys) tfk_foreign_table = rtable._rname if self.dbengine == "oracle": tfk_field_name = ", ".join( [self.dialect.quote(fkey) for fkey in fkeys]) tfk_foreign_key = ", ".join( [self.dialect.quote(pkey) for pkey in pkeys]) tfk_foreign_table = self.dialect.quote(rtable._rname) fields = (fields + ",\n " + types["reference TFK"] % dict( constraint_name=constraint_name, table_name=table._rname, field_name=tfk_field_name, foreign_table=tfk_foreign_table, foreign_key=tfk_foreign_key, on_delete_action=on_delete[0], on_update_action=on_update[0], )) table_rname = table._rname if self.dbengine == "oracle": # must be explicitly quoted to preserve case table_rname = self.dialect.quote(table_rname) if getattr(table, "_primarykey", None): query = "CREATE TABLE %s(\n %s,\n %s) %s" % ( table_rname, fields, self.dialect.primary_key(", ".join( [table[pk]._rname for pk in table._primarykey])), other, ) else: query = "CREATE TABLE %s(\n %s\n)%s" % (table_rname, fields, other) uri = self.adapter.uri if uri.startswith("sqlite:///") or uri.startswith("spatialite:///"): if PY2: path_encoding = (sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or "utf8") dbpath = uri[9:uri.rfind("/")].decode("utf8").encode( path_encoding) else: dbpath = uri[9:uri.rfind("/")] else: dbpath = self.adapter.folder if not migrate: return query elif uri.startswith("sqlite:memory") or uri.startswith( "spatialite:memory"): table._dbt = None elif isinstance(migrate, string_types): table._dbt = pjoin(dbpath, migrate) else: table._dbt = pjoin(dbpath, "%s_%s.table" % (db._uri_hash, tablename)) if not table._dbt or not self.file_exists(table._dbt): if table._dbt: self.log( "timestamp: %s\n%s\n" % (datetime.datetime.today().isoformat(), query), table, ) if not fake_migrate: self.adapter.create_sequence_and_triggers(query, table) db.commit() # Postgres geom fields are added now, # after the table has been created for query in postcreation_fields: self.adapter.execute(query) db.commit() if table._dbt: tfile = self.file_open(table._dbt, "wb") pickle.dump(sql_fields, tfile) self.file_close(tfile) if fake_migrate: self.log("faked!\n", table) else: self.log("success!\n", table) else: tfile = self.file_open(table._dbt, "rb") try: sql_fields_old = pickle.load(tfile) except EOFError: self.file_close(tfile) raise RuntimeError("File %s appears corrupted" % table._dbt) self.file_close(tfile) # add missing rnames for key, item in sql_fields_old.items(): tmp = sql_fields.get(key) if tmp: item.setdefault("rname", tmp["rname"]) item.setdefault("raw_rname", tmp["raw_rname"]) else: item.setdefault("rname", self.dialect.quote(key)) item.setdefault("raw_rname", key) if sql_fields != sql_fields_old: self.migrate_table( table, sql_fields, sql_fields_old, sql_fields_aux, None, fake_migrate=fake_migrate, ) return query
def setUp(self): self._orig_loc = None CryptoTestCase.setUp(self) self._orig_loc = ".".join(locale.getdefaultlocale())
def sendAnalytics( name, version="1.0.0", an="StudioLibrary", tid=None, ): """ Send an analytic event to google analytics. This is only used once and is not used to send any personal/user data. Example: # logs an event named "mainWindow" sendAnalytics("mainWindow") :type name: str :type version: str :type an: str :type tid: str :rtype: None """ def _send(url): try: url = url.replace(" ", "") f = urllib.request.urlopen(url) except Exception: pass # Ignore analytics when reloading if os.environ.get("STUDIO_LIBRARY_RELOADED") == "1": return if not studiolibrary.config().get('analyticsEnabled'): return tid = tid or studiolibrary.config().get('analyticsId') cid = userUuid() # In python 2.7 the getdefaultlocale function could return a None "ul" ul, _ = locale.getdefaultlocale() ul = ul or "" ul = ul.replace("_", "-").lower() # -- Legacy begin -- # This can be removed after October 2019 url = "http://www.google-analytics.com/collect?" \ "v=1" \ "&ul=en-us" \ "&a=448166238" \ "&_u=.sB" \ "&_v=ma1b3" \ "&qt=2500" \ "&z=185" \ "&tid={tid}" \ "&an={an}" \ "&av={av}" \ "&cid={cid}" \ "&t=appview" \ "&cd={name}" url = url.format( tid=tid, an=an, av=version, cid=cid, name=name, ) t = threading.Thread(target=_send, args=(url,)) t.start() # -- Legacy end -- # Page View tid = "UA-50172384-3" url = "https://www.google-analytics.com/collect?" \ "v=1" \ "&ul={ul}" \ "&tid={tid}" \ "&an={an}" \ "&av={av}" \ "&cid={cid}" \ "&t=pageview" \ "&dp=/{name}" \ "&dt={av}" \ url = url.format( tid=tid, an=an, av=version, cid=cid, name=name, ul=ul, ) t = threading.Thread(target=_send, args=(url,)) t.start()
__date__ = '' import os import sys import glob import random import colorsys import math import pygame from .._internals import android, get_settings_folder, get_version try: from locale import getdefaultlocale LOCALE_ENC = getdefaultlocale()[1] except ImportError: LOCALE_ENC = None # Not available on Android if LOCALE_ENC is None: LOCALE_ENC = 'utf-8' FS_ENC = sys.getfilesystemencoding() if FS_ENC is None: FS_ENC = 'utf-8' def round(number, ndigits=0): # TODO: overrides Python's round, maybe renaming? """Round half away from zero.
captcha_img_local_uri = Path(captcha_img_file.name).as_uri() self._scihub_captcha.showWindowCaptcha.emit(captcha_img_local_uri) def log(self, message, level=None): self.appendLogs.emit(message, level) if __name__ == '__main__': app_path = os.path.abspath(os.path.dirname(sys.argv[0])) os.environ['QT_QUICK_CONTROLS_CONF'] = os.path.join( app_path, 'qtquickcontrols2.conf') app = QGuiApplication(sys.argv) lang = locale.getdefaultlocale()[0] lang_file_path = os.path.join( app_path, 'translations/SciHubEVA_{lang}.qm'.format(lang=lang)) translator = QTranslator() translator.load(lang_file_path) app.installTranslator(translator) icon_file_path = os.path.join(app_path, 'images/SciHubEVA-icon.png') app.setWindowIcon(QIcon(icon_file_path)) if sys.platform == 'win32': app.setFont(QFont('Microsoft YaHei')) eva = SciHubEVA() sys.exit(app.exec_())
__builtin__.license = _Printer( "license", "See http://www.pythonlabs.com/products/python2.0/license.html", ["LICENSE.txt", "LICENSE"], [here, os.path.join(here, os.pardir), os.curdir]) # Set the string encoding used by the Unicode implementation. The # default is 'ascii', but if you're willing to experiment, you can # change this. encoding = "ascii" # Default value set by _PyUnicode_Init() if 0: # Enable to support locale aware default string encodings. import locale loc = locale.getdefaultlocale() if loc[1]: encoding = loc[1] if 0: # Enable to switch off string to Unicode coercion and implicit # Unicode to string conversion. encoding = "undefined" if encoding != "ascii": sys.setdefaultencoding(encoding) # # Run custom site specific code, if available. # try:
def _get_encoding(): encoding = locale.getdefaultlocale()[1] if not encoding: encoding = 'UTF-8' return encoding
def tree_view(root, verbose=None, use_utf8=None): """ Generate tree-view of the given node :param root: root node :param verbose: verbosity (0, 1, 2, 3) :param use_utf8: Use utf-8 encoding (None=autodetect) :return: string representing this node's tree structure """ def prefixed_write(prefix1, prefix2, value): """ Split value's lines and prepend empty prefix to 2nd+ lines :return: list of lines """ value = astring.to_text(value) if '\n' not in value: return [prefix1 + prefix2 + value] value = value.splitlines() empty_prefix2 = ' ' * len(prefix2) return [prefix1 + prefix2 + value[0] ] + [prefix1 + empty_prefix2 + _ for _ in value[1:]] def process_node(node): """ Generate this node's tree-view :return: list of lines """ if getattr(node, "multiplex", None): down = charset['DoubleDown'] down_right = charset['DoubleDownRight'] right = charset['DoubleRight'] else: down = charset['Down'] down_right = charset['DownRight'] right = charset['Right'] out = [node.name] if verbose is not None and verbose >= 2 and node.is_leaf: values = itertools.chain(iter(node.environment.items()), [("filter-only", _) for _ in node.environment.filter_only], [("filter-out", _) for _ in node.environment.filter_out]) elif verbose in (1, 3): values = itertools.chain(iter(node.value.items()), [("filter-only", _) for _ in node.filters[0]], [("filter-out", _) for _ in node.filters[1]]) else: values = None if values: val = charset['Value'] if node.children: val_prefix = down else: val_prefix = ' ' for key, value in values: out.extend(prefixed_write(val_prefix, f"{val}{key}: ", value)) if node.children: for child in node.children[:-1]: lines = process_node(child) out.append(down_right + lines[0]) out.extend(down + line for line in lines[1:]) lines = process_node(node.children[-1]) out.append(right + lines[0]) empty_down_right = ' ' * len(down_right) out.extend(empty_down_right + line for line in lines[1:]) return out if use_utf8 is None: use_utf8 = locale.getdefaultlocale()[1] == 'UTF-8' if use_utf8: charset = { 'DoubleDown': ' \u2551 ', 'DoubleDownRight': ' \u2560\u2550\u2550 ', 'DoubleRight': ' \u255a\u2550\u2550 ', 'Down': ' \u2503 ', 'DownRight': ' \u2523\u2501\u2501 ', 'Right': ' \u2517\u2501\u2501 ', 'Value': '\u2192 ' } else: # ASCII fallback charset = { 'Down': ' | ', 'DownRight': ' |-- ', 'Right': ' \\-- ', 'DoubleDown': ' # ', 'DoubleDownRight': ' #== ', 'DoubleRight': ' #== ', 'Value': ' -> ' } if getattr(root, "multiplex", None): down = charset['DoubleDown'] down_right = charset['DoubleDownRight'] right = charset['DoubleRight'] else: down = charset['Down'] down_right = charset['DownRight'] right = charset['Right'] out = [] if verbose is not None and verbose >= 2 and root.is_leaf: values = root.environment.items() elif verbose in (1, 3): values = root.value.items() else: values = None if values: prefix = charset['Value'].lstrip() for key, value in values: out.extend(prefixed_write(prefix, key + ': ', value)) if root.children: for child in root.children[:-1]: lines = process_node(child) out.append(down_right + lines[0]) out.extend(down + line for line in lines[1:]) lines = process_node(root.children[-1]) out.append(right + lines[0]) out.extend(' ' * len(down_right) + line for line in lines[1:]) # When not on TTY we need to force the encoding return '\n'.join(out).encode('utf-8' if use_utf8 else 'ascii', errors='xmlcharrefreplace')
for qtype in ('QString', 'QVariant'): sip.setapi(qtype, pyqt_api) except AttributeError: # Old version of sip pass except ImportError: pass #============================================================================== # Setting console encoding (otherwise Python does not recognize encoding) # for Windows platforms #============================================================================== if os.name == 'nt': try: import locale, ctypes _t, _cp = locale.getdefaultlocale('LANG') try: _cp = int(_cp[2:]) ctypes.windll.kernel32.SetConsoleCP(_cp) ctypes.windll.kernel32.SetConsoleOutputCP(_cp) except (ValueError, TypeError): # Code page number in locale is not valid pass except ImportError: pass #============================================================================== # Settings for our MacOs X app #============================================================================== if sys.platform == 'darwin': from spyderlib.baseconfig import MAC_APP_NAME
import StringIO import locale import copy import tokenize import token from sorteddict import SortedDict __all__ = ['SortedDict', 'Section', 'Ini'] try: set except: from sets import Set as set try: defaultencoding = locale.getdefaultlocale()[1] except: defaultencoding = None if not defaultencoding: defaultencoding = 'UTF-8' try: codecs.lookup(defaultencoding) except: defaultencoding = 'UTF-8' r_encoding = re.compile(r'\s*coding\s*[=:]\s*([-\w.]+)') __default_env__ = {} def set_env(env=None):
def initialize_calibre(): if hasattr(initialize_calibre, 'initialized'): return initialize_calibre.initialized = True # Ensure that all temp files/dirs are created under a calibre tmp dir from calibre.ptempfile import base_dir try: base_dir() except EnvironmentError: pass # Ignore this error during startup, so we can show a better error message to the user later. # # Ensure that the max number of open files is at least 1024 if iswindows: # See https://msdn.microsoft.com/en-us/library/6e3b887c.aspx from calibre_extensions import winutil winutil.setmaxstdio(max(1024, winutil.getmaxstdio())) else: import resource soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) if soft < 1024: try: resource.setrlimit(resource.RLIMIT_NOFILE, (min(1024, hard), hard)) except Exception: if DEBUG: import traceback traceback.print_exc() # # Fix multiprocessing from multiprocessing import spawn, util def get_command_line(**kwds): prog = 'from multiprocessing.spawn import spawn_main; spawn_main(%s)' prog %= ', '.join('%s=%r' % item for item in kwds.items()) return get_debug_executable() + ['--fix-multiprocessing', '--', prog] spawn.get_command_line = get_command_line orig_spawn_passfds = util.spawnv_passfds def spawnv_passfds(path, args, passfds): try: idx = args.index('-c') except ValueError: return orig_spawn_passfds(args[0], args, passfds) patched_args = get_debug_executable() + [ '--fix-multiprocessing', '--' ] + args[idx + 1:] return orig_spawn_passfds(patched_args[0], patched_args, passfds) util.spawnv_passfds = spawnv_passfds # # Setup resources import calibre.utils.resources as resources resources # # Setup translations from calibre.utils.localization import set_translators set_translators() # # Initialize locale # Import string as we do not want locale specific # string.whitespace/printable, on windows especially, this causes problems. # Before the delay load optimizations, string was loaded before this point # anyway, so we preserve the old behavior explicitly. import string string try: locale.setlocale(locale.LC_ALL, '') # set the locale to the user's default locale except: dl = locale.getdefaultlocale() try: if dl: locale.setlocale(locale.LC_ALL, dl[0]) except: pass builtins.__dict__['lopen'] = open # legacy compatibility from calibre.utils.icu import title_case, lower as icu_lower, upper as icu_upper builtins.__dict__['icu_lower'] = icu_lower builtins.__dict__['icu_upper'] = icu_upper builtins.__dict__['icu_title'] = title_case def connect_lambda(bound_signal, self, func, **kw): import weakref r = weakref.ref(self) del self num_args = func.__code__.co_argcount - 1 if num_args < 0: raise TypeError('lambda must take at least one argument') def slot(*args): ctx = r() if ctx is not None: if len(args) != num_args: args = args[:num_args] func(ctx, *args) bound_signal.connect(slot, **kw) builtins.__dict__['connect_lambda'] = connect_lambda if islinux or ismacos or isfreebsd: # Name all threads at the OS level created using the threading module, see # http://bugs.python.org/issue15500 import threading from calibre_extensions import speedup orig_start = threading.Thread.start def new_start(self): orig_start(self) try: name = self.name if not name or name.startswith('Thread-'): name = self.__class__.__name__ if name == 'Thread': name = self.name if name: if isinstance(name, unicode_type): name = name.encode('ascii', 'replace').decode('ascii') speedup.set_thread_name(name[:15]) except Exception: pass # Don't care about failure to set name threading.Thread.start = new_start
def execute_script(self, script, become_root=False, combine_stderr=False, shell='sh -eux', timeout=None, **params): """Connect to remote machine and executes script. Implementation note: it passes script lines to shell interpreter via STDIN. Therefore script line number could be not available to some script interpreters for debugging porposes. :param script: script lines to be executed. :param become_root: executes interpreter as root with sudo. :param combine_stderr (bool): whenever to redirect STDERR to STDOUT so that output from both streams are returned together. True by default. :param shell: command line used to launch script interpreter. By default it executes Bash with -eux options enabled. This means that any command returning non-zero exist status or any any undefined variable would interrupt script execution with an error and every command executed by the script is going to be traced to STDERR. :param timeout: time in seconds to wait before brutally aborting script execution. :param **params: script parameter values to be assigned at the beginning of the script. :returns output written by script to STDOUT. :raises tempest.lib.exceptions.SSHTimeout: in case it fails to connect to remote server or it fails to open a channel. :raises tempest.lib.exceptions.SSHExecCommandFailed: in case command script exits with non zero exit status or times out. """ if params: # Append script parameters at the beginning of the script header = ''.join(sorted(["{!s}={!s}\n".format(k, v) for k, v in params.items()])) script = header + '\n' + script timeout = timeout or self.timeout end_of_time = time.time() + timeout output_data = b'' error_data = b'' exit_status = None channel = self.open_session() with channel: # Combine STOUT and STDERR to have to handle with only one stream channel.set_combine_stderr(combine_stderr) # Update local environment lang, encoding = locale.getlocale() if not lang: lang, encoding = locale.getdefaultlocale() _locale = '.'.join([lang, encoding]) channel.update_environment({'LC_ALL': _locale, 'LANG': _locale}) if become_root: shell = 'sudo ' + shell # Spawn a Bash channel.exec_command(shell) end_of_script = False lines_iterator = iter(script.splitlines()) while (not channel.exit_status_ready() and time.time() < end_of_time): # Drain incoming data buffers while channel.recv_ready(): output_data += channel.recv(self.buf_size) while channel.recv_stderr_ready(): error_data += channel.recv_stderr(self.buf_size) if not end_of_script and channel.send_ready(): try: line = next(lines_iterator) except StopIteration: # Finalize Bash script execution channel.shutdown_write() end_of_script = True else: # Send script to Bash STDIN line by line channel.send((line + '\n').encode(encoding)) continue time.sleep(.1) # Get exit status and drain incoming data buffers if channel.exit_status_ready(): exit_status = channel.recv_exit_status() while channel.recv_ready(): output_data += channel.recv(self.buf_size) while channel.recv_stderr_ready(): error_data += channel.recv_stderr(self.buf_size) stdout = _buffer_to_string(output_data, encoding) if exit_status == 0: return stdout stderr = _buffer_to_string(error_data, encoding) if exit_status is None: raise exc.SSHScriptTimeoutExpired( command=shell, host=self.host, script=script, stderr=stderr, stdout=stdout, timeout=timeout) else: raise exc.SSHScriptFailed( command=shell, host=self.host, script=script, stderr=stderr, stdout=stdout, exit_status=exit_status)
def get_locale(): """Returns the OS's UI active locale.""" locales = locale.getdefaultlocale() if locales[0]: return u'.'.join(locales)
# Copyright (C) 2016 Osmo Salomaa # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Internationalization functions.""" import gettext import locale import pan _translation = gettext.translation( "pan-bikes", localedir=pan.LOCALE_DIR, languages=[locale.getdefaultlocale()[0] or ""], fallback=True) def _(message): """Return the localized translation of `message`.""" return _translation.gettext(message)