def set_channel_setting(name, value, channel): """ Fija el valor de configuracion del parametro indicado. Establece 'value' como el valor del parametro 'name' en la configuracion propia del canal 'channel'. Devuelve el valor cambiado o None si la asignacion no se ha podido completar. Si se especifica el nombre del canal busca en la ruta \addon_data\plugin.video.pelisalacarta\settings_channels el archivo channel_data.json y establece el parametro 'name' al valor indicado por 'value'. Si el parametro 'name' no existe lo añade, con su valor, al archivo correspondiente. @param name: nombre del parametro @type name: str @param value: valor del parametro @type value: str @param channel: nombre del canal @type channel: str @return: 'value' en caso de que se haya podido fijar el valor y None en caso contrario @rtype: str, None """ # Creamos la carpeta si no existe if not os.path.exists( os.path.join(config.get_data_path(), "settings_channels")): os.mkdir(os.path.join(config.get_data_path(), "settings_channels")) file_settings = os.path.join(config.get_data_path(), "settings_channels", channel + "_data.json") dict_settings = {} dict_file = None if os.path.exists(file_settings): # Obtenemos configuracion guardada de ../settings/channel_data.json try: dict_file = jsontools.load_json(open(file_settings, "r").read()) dict_settings = dict_file.get('settings', {}) except EnvironmentError: logger.info("ERROR al leer el archivo: {0}".format(file_settings)) dict_settings[name] = value # comprobamos si existe dict_file y es un diccionario, sino lo creamos if dict_file is None or not dict_file: dict_file = {} dict_file['settings'] = dict_settings # Creamos el archivo ../settings/channel_data.json try: json_data = jsontools.dump_json(dict_file).encode("utf-8") open(file_settings, "w").write(json_data) except EnvironmentError: logger.info("[config.py] ERROR al salvar el archivo: {0}".format( file_settings)) return None return value
def get_channel_setting(name, channel): """ Retorna el valor de configuracion del parametro solicitado. Devuelve el valor del parametro 'name' en la configuracion propia del canal 'channel'. Si se especifica el nombre del canal busca en la ruta \addon_data\plugin.video.pelisalacarta\settings_channels el archivo channel_data.json y lee el valor del parametro 'name'. Si el archivo channel_data.json no existe busca en la carpeta channels el archivo channel.xml y crea un archivo channel_data.json antes de retornar el valor solicitado. @param name: nombre del parametro @type name: str @param channel: nombre del canal @type channel: str @return: El valor del parametro 'name' @rtype: str """ # Creamos la carpeta si no existe if not os.path.exists( os.path.join(config.get_data_path(), "settings_channels")): os.mkdir(os.path.join(config.get_data_path(), "settings_channels")) file_settings = os.path.join(config.get_data_path(), "settings_channels", channel + "_data.json") dict_settings = {} dict_file = {} if os.path.exists(file_settings): # Obtenemos configuracion guardada de ../settings/channel_data.json try: dict_file = jsontools.load_json(open(file_settings, "rb").read()) if isinstance(dict_file, dict) and 'settings' in dict_file: dict_settings = dict_file['settings'] except EnvironmentError: logger.error("ERROR al leer el archivo: %s" % file_settings) if not dict_settings or name not in dict_settings: # Obtenemos controles del archivo ../channels/channel.xml try: list_controls, default_settings = get_channel_controls_settings( channel) except: default_settings = {} if name in default_settings: # Si el parametro existe en el channel.xml creamos el channel_data.json default_settings.update(dict_settings) dict_settings = default_settings dict_file['settings'] = dict_settings # Creamos el archivo ../settings/channel_data.json json_data = jsontools.dump_json(dict_file) try: open(file_settings, "wb").write(json_data) except EnvironmentError: logger.error("ERROR al salvar el archivo: %s" % file_settings) # Devolvemos el valor del parametro local 'name' si existe, si no se devuelve None return dict_settings.get(name, None)
def get_channel_setting(name, channel): """ Retorna el valor de configuracion del parametro solicitado. Devuelve el valor del parametro 'name' en la configuracion propia del canal 'channel'. Si se especifica el nombre del canal busca en la ruta \addon_data\plugin.video.pelisalacarta\settings_channels el archivo channel_data.json y lee el valor del parametro 'name'. Si el archivo channel_data.json no existe busca en la carpeta channels el archivo channel.xml y crea un archivo channel_data.json antes de retornar el valor solicitado. @param name: nombre del parametro @type name: str @param channel: nombre del canal @type channel: str @return: El valor del parametro 'name' @rtype: str """ # Creamos la carpeta si no existe if not os.path.exists(os.path.join(config.get_data_path(), "settings_channels")): os.mkdir(os.path.join(config.get_data_path(), "settings_channels")) file_settings = os.path.join(config.get_data_path(), "settings_channels", channel+"_data.json") dict_settings = {} dict_file = {} if os.path.exists(file_settings): # Obtenemos configuracion guardada de ../settings/channel_data.json try: dict_file = jsontools.load_json(open(file_settings, "rb").read()) if isinstance(dict_file, dict) and 'settings' in dict_file: dict_settings = dict_file['settings'] except EnvironmentError: logger.info("ERROR al leer el archivo: %s" % file_settings) if len(dict_settings) == 0 or name not in dict_settings: # Obtenemos controles del archivo ../channels/channel.xml try: list_controls, default_settings = get_channel_controls_settings(channel) except: default_settings = {} if name in default_settings: # Si el parametro existe en el channel.xml creamos el channel_data.json default_settings.update(dict_settings) dict_settings = default_settings dict_file['settings'] = dict_settings # Creamos el archivo ../settings/channel_data.json json_data = jsontools.dump_json(dict_file) try: open(file_settings, "wb").write(json_data) except EnvironmentError: logger.info("ERROR al salvar el archivo: %s" % file_settings) # Devolvemos el valor del parametro local 'name' si existe if name in dict_settings: return dict_settings[name] else: return None
def get_channel_setting(name, channel): """ Retorna el valor de configuracion del parametro solicitado. Devuelve el valor del parametro 'name' en la configuracion propia del canal 'channel'. Si se especifica el nombre del canal busca en la ruta \addon_data\plugin.video.pelisalacarta\settings_channels el archivo channel_data.json y lee el valor del parametro 'name'. Si el archivo channel_data.json no existe busca en la carpeta channels el archivo channel.xml y crea un archivo channel_data.json antes de retornar el valor solicitado. Parametros: name -- nombre del parametro channel [ -- nombre del canal Retorna: value -- El valor del parametro 'name' """ #Creamos la carpeta si no existe if not os.path.exists(os.path.join(config.get_data_path(), "settings_channels")): os.mkdir(os.path.join(config.get_data_path(), "settings_channels")) file_settings= os.path.join(config.get_data_path(), "settings_channels", channel+"_data.json") dict_settings ={} if os.path.exists(file_settings): # Obtenemos configuracion guardada de ../settings/channel_data.json try: dict_file = jsontools.load_json(open(file_settings, "r").read()) if isinstance(dict_file, dict) and dict_file.has_key('settings'): dict_settings = dict_file['settings'] except EnvironmentError: logger.info("ERROR al leer el archivo: {0}".format(file_settings)) if len(dict_settings) == 0 or not dict_settings.has_key(name): # Obtenemos controles del archivo ../channels/channel.xml from core import channeltools try: list_controls, default_settings = channeltools.get_channel_controls_settings(channel) except: default_settings = {} if default_settings.has_key(name): # Si el parametro existe en el channel.xml creamos el channel_data.json default_settings.update(dict_settings) dict_settings = default_settings dict_file = {} dict_file['settings']= dict_settings # Creamos el archivo ../settings/channel_data.json json_data = jsontools.dump_json(dict_file).encode("utf-8") try: open(file_settings, "w").write(json_data) except EnvironmentError: logger.info("[config.py] ERROR al salvar el archivo: {0}".format(file_settings)) # Devolvemos el valor del parametro local 'name' si existe if dict_settings.has_key(name): return dict_settings[name] else: return None
def set_channel_setting(name, value, channel): """ Fija el valor de configuracion del parametro indicado. Establece 'value' como el valor del parametro 'name' en la configuracion propia del canal 'channel'. Devuelve el valor cambiado o None si la asignacion no se ha podido completar. Si se especifica el nombre del canal busca en la ruta \addon_data\plugin.video.streamondemand\settings_channels el archivo channel_data.json y establece el parametro 'name' al valor indicado por 'value'. Si el parametro 'name' no existe lo añade, con su valor, al archivo correspondiente. @param name: nombre del parametro @type name: str @param value: valor del parametro @type value: str @param channel: nombre del canal @type channel: str @return: 'value' en caso de que se haya podido fijar el valor y None en caso contrario @rtype: str, None """ # Creamos la carpeta si no existe if not os.path.exists(os.path.join(config.get_data_path(), "settings_channels")): os.mkdir(os.path.join(config.get_data_path(), "settings_channels")) file_settings = os.path.join(config.get_data_path(), "settings_channels", channel+"_data.json") dict_settings = {} dict_file = None if os.path.exists(file_settings): # Obtenemos configuracion guardada de ../settings/channel_data.json try: dict_file = jsontools.load_json(open(file_settings, "r").read()) dict_settings = dict_file.get('settings', {}) except EnvironmentError: logger.info("ERROR al leer el archivo: %s" % file_settings) dict_settings[name] = value # comprobamos si existe dict_file y es un diccionario, sino lo creamos if dict_file is None or not dict_file: dict_file = {} dict_file['settings'] = dict_settings # Creamos el archivo ../settings/channel_data.json try: json_data = jsontools.dump_json(dict_file) open(file_settings, "w").write(json_data) except EnvironmentError: logger.info("ERROR al salvar el archivo: %s" % file_settings) return None return value
def create(report_path, placeholders2data, pack=True, templatename="report"): """ Create a report with all placeholders substituted by data. """ # read report template templatefilename = "%s.html" % templatename report_html_template_path = get_data_path( os.path.join("report", templatefilename)) if not report_html_template_path: raise IOError(lang.getstr("file.missing", templatefilename)) try: report_html_template = codecs.open(report_html_template_path, "r", "UTF-8") except (IOError, OSError) as exception: raise exception.__class__( lang.getstr("error.file.open", report_html_template_path)) report_html = report_html_template.read() report_html_template.close() # create report for placeholder, data in placeholders2data.items(): report_html = report_html.replace(placeholder, data) for include in ("base.css", "compare.css", "print.css", "jsapi-packages.js", "jsapi-patches.js", "compare.constants.js", "compare.variables.js", "compare.functions.js", "compare.init.js", "uniformity.functions.js"): path = get_data_path(os.path.join("report", include)) if not path: raise IOError(lang.getstr("file.missing", include)) try: f = codecs.open(path, "r", "UTF-8") except (IOError, OSError) as exception: raise exception.__class__(lang.getstr("error.file.open", path)) if include.endswith(".js"): js = f.read() if pack: packer = jspacker.JavaScriptPacker() js = packer.pack(js, 62, True).strip() report_html = report_html.replace( 'src="%s">' % include, ">/*<![CDATA[*/\n" + js + "\n/*]]>*/") else: report_html = report_html.replace('@import "%s";' % include, f.read().strip()) f.close() # write report try: report_html_file = codecs.open(report_path, "w", "UTF-8") except (IOError, OSError) as exception: raise exception.__class__( lang.getstr("error.file.create", report_path) + "\n\n" + safe_unicode(exception)) report_html_file.write(report_html) report_html_file.close()
def set_channel_setting(name, value, channel): """ Fija el valor de configuracion del parametro indicado. Establece 'value' como el valor del parametro 'name' en la configuracion propia del canal 'channel'. Devuelve el valor cambiado o None si la asignacion no se ha podido completar. Si se especifica el nombre del canal busca en la ruta \addon_data\plugin.video.pelisalacarta\settings_channels el archivo channel_data.json y establece el parametro 'name' al valor indicado por 'value'. Si el parametro 'name' no existe lo añade, con su valor, al archivo correspondiente. Parametros: name -- nombre del parametro value -- valor del parametro channel -- nombre del canal Retorna: 'value' en caso de que se haya podido fijar el valor y None en caso contrario """ #Creamos la carpeta si no existe if not os.path.exists(os.path.join(config.get_data_path(), "settings_channels")): os.mkdir(os.path.join(config.get_data_path(), "settings_channels")) file_settings= os.path.join(config.get_data_path(), "settings_channels", channel+"_data.json") dict_settings ={} if os.path.exists(file_settings): # Obtenemos configuracion guardada de ../settings/channel_data.json try: dict_file = jsontools.load_json(open(file_settings, "r").read()) if dict_file.has_key('settings'): dict_settings = dict_file['settings'] except EnvironmentError: logger.info("ERROR al leer el archivo: {0}".format(file_settings)) dict_settings[name] = value dict_file = {} dict_file['settings']= dict_settings # Creamos el archivo ../settings/channel_data.json try: open(file_settings, "w").write(jsontools.dump_json(dict_file)) except EnvironmentError: logger.info("[config.py] ERROR al salvar el archivo: {0}".format(file_settings)) return None return value
def load_data(map_size, use_econ, t): path = c.get_data_path(use_econ, t) print 'load data for', path, map_size f = c.get_data_file(path, map_size, 'r') hash_map = cPickle.load(f) f.close() return hash_map
def __init__(self, parent=None): BaseFrame.__init__(self, parent, -1, lang.getstr("measurement_report")) self.Bind(wx.EVT_CLOSE, self.OnClose) self.SetIcons(config.get_icon_bundle([256, 48, 32, 16], appname)) res = TempXmlResource(get_data_path(os.path.join("xrc", "report.xrc"))) res.InsertHandler(xh_fancytext.StaticFancyTextCtrlXmlHandler()) res.InsertHandler( xh_filebrowsebutton.FileBrowseButtonWithHistoryXmlHandler()) res.InsertHandler(xh_hstretchstatbmp.HStretchStaticBitmapXmlHandler()) res.InsertHandler(xh_bitmapctrls.BitmapButton()) res.InsertHandler(xh_bitmapctrls.StaticBitmap()) self.panel = res.LoadPanel(self, "panel") self.Sizer = wx.BoxSizer(wx.VERTICAL) self.Sizer.Add(self.panel, 1, flag=wx.EXPAND) self.set_child_ctrls_as_attrs(self) self.measurement_report_btn = wx.Button(self.panel, -1, lang.getstr("measure")) self.panel.Sizer.Insert(2, self.measurement_report_btn, flag=wx.RIGHT | wx.BOTTOM | wx.ALIGN_RIGHT, border=16) self.worker = worker.Worker(self) self.worker.set_argyll_version("xicclu") BaseFrame.setup_language(self) self.mr_setup_language() self.mr_init_controls() self.mr_update_controls() self.mr_init_frame()
def get_file_handle_for_reading(filename,url): logger.info("[samba.py] get_file_handle_for_reading") # Separa la URL en los elementos server_name,share_name,path,user,password = parse_url(url) # Conecta con el servidor remoto remote = connect(server_name,user,password) # Crea un fichero temporal con el bookmark logger.info("[samba.py] Crea fichero temporal") try: import xbmc localfilename = xbmc.translatePath( "special://temp" ) except: localfilename = config.get_data_path() logger.info("[samba.py] localfilename="+localfilename) localfilename = os.path.join(localfilename,"bookmark.tmp") # Lo abre bookmarkfile = open(localfilename,"wb") # Lo copia de la URL try: remote.retr_file(share_name, path + filename, bookmarkfile.write, password = password) finally: bookmarkfile.close() return open(localfilename)
def __init__(self): try: self.start_p = load_cache( os.path.join(config.get_data_path(), 'start_p.p')) self.trans_p = load_cache( os.path.join(config.get_data_path(), 'trans_p.p')) self.emit_p = load_cache( os.path.join(config.get_data_path(), 'emit_p.p')) self.mini_d_emit_p = self.get_mini_emit_p('d') standard_address_library = pd.read_excel( os.path.join(config.get_data_path(), 'adress_area.xlsx')) self.standard_address_library = standard_address_library.fillna('') self.time = datetime.datetime.now() self.time_takes = {} except Exception: raise
def update(item): logger.info("pelisalacarta.core.updater update") # Valores por defecto published_version_url = "" published_version_filename = "" # Lee la versión remota from core import api latest_packages = api.plugins_get_latest_packages() for latest_package in latest_packages["body"]: if latest_package["package"] == "plugin": published_version_url = latest_package["url"] published_version_filename = latest_package["filename"] published_version_number = latest_package["version"] break # La URL viene del API, y lo descarga en "userdata" remotefilename = published_version_url localfilename = os.path.join(config.get_data_path(), published_version_filename) download_and_install(remotefilename, localfilename) set_current_plugin_version(published_version_number)
def write_file(filename,filecontent,url): # Separa la URL en los elementos server_name,share_name,path,user,password = parse_url(url) # Conecta con el servidor remoto remote = connect(server_name,user,password) # Crea un fichero temporal con el bookmark logger.info("Crea fichero temporal") try: import xbmc localfilename = xbmc.translatePath( "special://temp" ) except: localfilename = config.get_data_path() logger.info("localfilename="+localfilename) localfilename = os.path.join(localfilename,"bookmark.tmp") bookmarkfile = open(localfilename,"w") bookmarkfile.write(filecontent) bookmarkfile.flush() bookmarkfile.close() # Copia el bookmark al directorio Samba logger.info("Crea el fichero remoto") bookmarkfile = open(localfilename,"rb") remote.stor_file(share_name, path+"/"+filename, bookmarkfile.read) bookmarkfile.close() # Borra el fichero temporal logger.info("Borra el fichero local") os.remove(localfilename)
def write_file(filename, filecontent, url): # Separa la URL en los elementos server_name, share_name, path, user, password = parse_url(url) # Conecta con el servidor remoto remote = connect(server_name, user, password) # Crea un fichero temporal con el bookmark logger.info("Crea fichero temporal") try: import xbmc localfilename = xbmc.translatePath("special://temp") except: localfilename = config.get_data_path() logger.info("localfilename=" + localfilename) localfilename = os.path.join(localfilename, "bookmark.tmp") bookmarkfile = open(localfilename, "w") bookmarkfile.write(filecontent) bookmarkfile.flush() bookmarkfile.close() # Copia el bookmark al directorio Samba logger.info("Crea el fichero remoto") bookmarkfile = open(localfilename, "rb") remote.stor_file(share_name, path + "/" + filename, bookmarkfile.read) bookmarkfile.close() # Borra el fichero temporal logger.info("Borra el fichero local") os.remove(localfilename)
def get_file_handle_for_reading(filename, url): logger.info("[samba.py] get_file_handle_for_reading") # Separa la URL en los elementos server_name, share_name, path, user, password = parse_url(url) # Conecta con el servidor remoto remote = connect(server_name, user, password) # Crea un fichero temporal con el bookmark logger.info("[samba.py] Crea fichero temporal") try: import xbmc localfilename = xbmc.translatePath("special://temp") except: localfilename = config.get_data_path() logger.info("[samba.py] localfilename=" + localfilename) localfilename = os.path.join(localfilename, "bookmark.tmp") # Lo abre bookmarkfile = open(localfilename, "wb") # Lo copia de la URL try: remote.retr_file(share_name, path + filename, bookmarkfile.write, password=password) finally: bookmarkfile.close() return open(localfilename)
def load(self, path=None, encoding=None, errors=None, raise_exceptions=False): if not self._isloaded and (path or self.path): self._isloaded = True if not path: path = self.path if path and not os.path.isabs(path): path = get_data_path(path) if path and os.path.isfile(path): self.path = path if encoding: self.encoding = encoding if errors: self.errors = errors else: handle_error(UserWarning("Warning - file not found:\n\n%s" % safe_unicode(path)), tb=False) return try: with codecs.open(path, "rU", self.encoding, self.errors) as f: self.parse(f) except EnvironmentError as exception: if raise_exceptions: raise handle_error(exception) except Exception as exception: if raise_exceptions: raise handle_error(UserWarning("Error parsing file:\n\n%s\n\n%s" % tuple(safe_unicode(s) for s in (path, exception))), tb=False)
def check_argyll_bin(paths=None): """ Check if the Argyll binaries can be found. """ prev_dir = None for name in argyll_names: exe = get_argyll_util(name, paths) if not exe: if name in argyll_optional: continue return False cur_dir = os.path.dirname(exe) if prev_dir: if cur_dir != prev_dir: if name in argyll_optional: if verbose: safe_print("Warning: Optional Argyll " "executable %s is not in the same " "directory as the main executables " "(%s)." % (exe, prev_dir)) else: if verbose: safe_print("Error: Main Argyll " "executable %s is not in the same " "directory as the other executables " "(%s)." % (exe, prev_dir)) return False else: prev_dir = cur_dir if verbose >= 3: safe_print("Argyll binary directory:", cur_dir) if debug: safe_print("[D] check_argyll_bin OK") if debug >= 2: if not paths: paths = getenvu("PATH", os.defpath).split(os.pathsep) argyll_dir = (getcfg("argyll.dir") or "").rstrip(os.path.sep) if argyll_dir: if argyll_dir in paths: paths.remove(argyll_dir) paths = [argyll_dir] + paths safe_print("[D] Searchpath:\n ", "\n ".join(paths)) # Fedora doesn't ship Rec709.icm config.defaults["3dlut.input.profile"] = get_data_path(os.path.join("ref", "Rec709.icm")) or \ get_data_path(os.path.join("ref", "sRGB.icm")) or "" config.defaults["testchart.reference"] = get_data_path( os.path.join("ref", "ColorChecker.cie")) or "" config.defaults["gamap_profile"] = get_data_path( os.path.join("ref", "sRGB.icm")) or "" return True
def play(item, ItemVideo): import xbmc import xbmcgui import xbmcplugin if not ItemVideo == None: mediaurl = ItemVideo.url[1] if len(ItemVideo.url)>2: wait_time = ItemVideo.url[2] else: wait_time = 0 if wait_time>0: handle_wait(wait_time,server,"Cargando vídeo...") xlistitem = xbmcgui.ListItem( item.title, iconImage="DefaultVideo.png", thumbnailImage=item.thumbnail, path=mediaurl) xlistitem.setInfo( "video", { "Title": item.title, "Plot" : item.plot , "Studio" : item.channel , "Genre" : item.category } ) if item.subtitle!="": import os ficherosubtitulo = os.path.join( config.get_data_path(), 'subtitulo.srt' ) if os.path.exists(ficherosubtitulo): os.remove(ficherosubtitulo) from core import scrapertools data = scrapertools.cache_page(item.subtitle) fichero = open(ficherosubtitulo,"w") fichero.write(data) fichero.close() if config.get_setting("player_mode")=="3": #download_and_play import download_and_play download_and_play.download_and_play( mediaurl , "download_and_play.tmp" , config.get_setting("downloadpath")) elif config.get_setting("player_mode")=="0" or (config.get_setting("player_mode")=="3" and mediaurl.startswith("rtmp")): #Direct playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO) playlist.clear() playlist.add(mediaurl, xlistitem) playersettings = config.get_setting('player_type') player_type = xbmc.PLAYER_CORE_AUTO if playersettings == "0": player_type = xbmc.PLAYER_CORE_AUTO logger.info("[xbmctools.py] PLAYER_CORE_AUTO") elif playersettings == "1": player_type = xbmc.PLAYER_CORE_MPLAYER logger.info("[xbmctools.py] PLAYER_CORE_MPLAYER") elif playersettings == "2": player_type = xbmc.PLAYER_CORE_DVDPLAYER logger.info("[xbmctools.py] PLAYER_CORE_DVDPLAYER") xbmcPlayer = xbmc.Player(player_type) xbmcPlayer.play(playlist) elif config.get_setting("player_mode")=="1": #setResolvedUrl pass elif config.get_setting("player_mode")=="2": #Built-in xbmc.executebuiltin( "PlayMedia("+mediaurl+")" )
def extract(self, file, dir, folder_to_extract="", overwrite_question=False, backup=False): logger.info("file=%s" % file) logger.info("dir=%s" % dir) if not dir.endswith(':') and not os.path.exists(dir): os.mkdir(dir) zf = zipfile.ZipFile(file) if not folder_to_extract: self._createstructure(file, dir) num_files = len(zf.namelist()) for name in zf.namelist(): logger.info("name=%s" % name) if not name.endswith('/'): content = zf.read(name) name = name.replace('-master', '') logger.info("no es un directorio") try: (path, filename) = os.path.split(os.path.join(dir, name)) logger.info("path=%s" % path) logger.info("name=%s" % name) if folder_to_extract: if path != os.path.join(dir, folder_to_extract): break else: os.makedirs(path) except: pass if folder_to_extract: outfilename = os.path.join(dir, filename) else: outfilename = os.path.join(dir, name) logger.info("outfilename=%s" % outfilename) try: if os.path.exists(outfilename) and overwrite_question: from platformcode import platformtools dyesno = platformtools.dialog_yesno("Il file esiste già", "Il file %s esiste già" \ ", vuoi sovrascrivere?" \ % os.path.basename(outfilename)) if not dyesno: break if backup: import time import shutil hora_folder = "Copia seguridad [%s]" % time.strftime("%d-%m_%H-%M", time.localtime()) backup = os.path.join(config.get_data_path(), 'backups', hora_folder, folder_to_extract) if not os.path.exists(backup): os.makedirs(backup) shutil.copy2(outfilename, os.path.join(backup, os.path.basename(outfilename))) outfile = open(outfilename, 'wb') outfile.write(content) except: logger.info("Error en fichero " + name)
def get_channel_setting(name, channel, default=None): """ Retorna el valor de configuracion del parametro solicitado. Devuelve el valor del parametro 'name' en la configuracion propia del canal 'channel'. Busca en la ruta \addon_data\plugin.video.alfa\settings_channels el archivo channel_data.json y lee el valor del parametro 'name'. Si el archivo channel_data.json no existe busca en la carpeta channels el archivo channel.json y crea un archivo channel_data.json antes de retornar el valor solicitado. Si el parametro 'name' tampoco existe en el el archivo channel.json se devuelve el parametro default. @param name: nombre del parametro @type name: str @param channel: nombre del canal @type channel: str @param default: valor devuelto en caso de que no exista el parametro name @type default: cualquiera @return: El valor del parametro 'name' @rtype: El tipo del valor del parametro """ file_settings = os.path.join(config.get_data_path(), "settings_channels", channel + "_data.json") dict_settings = {} dict_file = {} if os.path.exists(file_settings): # Obtenemos configuracion guardada de ../settings/channel_data.json try: dict_file = jsontools.load(open(file_settings, "rb").read()) if isinstance(dict_file, dict) and 'settings' in dict_file: dict_settings = dict_file['settings'] except EnvironmentError: logger.error("ERROR al leer el archivo: %s" % file_settings) if not dict_settings or name not in dict_settings: # Obtenemos controles del archivo ../channels/channel.json try: list_controls, default_settings = get_channel_controls_settings( channel) except: default_settings = {} if name in default_settings: # Si el parametro existe en el channel.json creamos el channel_data.json default_settings.update(dict_settings) dict_settings = default_settings dict_file['settings'] = dict_settings # Creamos el archivo ../settings/channel_data.json json_data = jsontools.dump(dict_file) try: open(file_settings, "wb").write(json_data) except EnvironmentError: logger.error("ERROR al salvar el archivo: %s" % file_settings) # Devolvemos el valor del parametro local 'name' si existe, si no se devuelve default return dict_settings.get(name, default)
def getDownloadListPath(): # La ruta de la lista de descargas es un parámetro downloadpath = config.get_setting("downloadlistpath") # No está fijada, intenta forzarla try: if downloadpath == "": logger.info("[downloadtools.py] downloadpath está vacio") # Busca un setting del skin (Telebision) try: import xbmc downloadpath = xbmc.getInfoLabel('Skin.String(downloadpath)') logger.info("[downloadtools.py] downloadpath en el skin es " + downloadpath) except: pass # No es Telebision, fuerza el directorio home de XBMC if downloadpath == "": downloadpath = os.path.join(config.get_data_path(), "downloads", "list") logger.info( "[downloadtools.py] getDownloadPath: downloadpath=%s" % downloadpath) if not os.path.exists(downloadpath): logger.info( "[downliadtools.py] download path doesn't exist:" + downloadpath) os.mkdir(downloadpath) config.set_setting("downloadlistpath", downloadpath) # Es Telebision, lo pone en el skin else: # guardar setting del skin en setting del plugin downloadpath = os.path.join(downloadpath, "list") downloadpath = xbmc.translatePath(downloadpath) logger.info("[downloadtools.py] downloadpath nativo es " + downloadpath) config.set_setting("downloadlistpath", downloadpath) except: pass logger.info("[downloadtools.py] downloadlistpath=" + downloadpath) try: os.mkdir(downloadpath) except: pass return downloadpath
def prepare_data(t): # t is the number of samples to drop from beginning of each game for use_econ in (True, False): print '\n\nUse econ', use_econ path = c.get_data_path(use_econ, t) if not os.path.isdir(path): os.makedirs(path) for map_size in c.map_sizes: print '\nMap size', map_size data = get_data(use_econ, t, map_size) f = c.get_data_file(path, map_size, 'w') cPickle.dump(data, f) f.close()
def getDownloadListPath(): # La ruta de la lista de descargas es un parámetro downloadpath = config.get_setting("downloadlistpath") # No está fijada, intenta forzarla try: if downloadpath == "": logger.info("[downloadtools.py] downloadpath está vacio") # Busca un setting del skin (Telebision) try: import xbmc downloadpath = xbmc.getInfoLabel('Skin.String(downloadpath)') logger.info("[downloadtools.py] downloadpath en el skin es "+downloadpath) except: pass # No es Telebision, fuerza el directorio home de XBMC if downloadpath == "": downloadpath = os.path.join (config.get_data_path(),"downloads","list") logger.info("[downloadtools.py] getDownloadPath: downloadpath=%s" % downloadpath) if not os.path.exists(downloadpath): logger.info("[downliadtools.py] download path doesn't exist:"+downloadpath) os.mkdir(downloadpath) config.set_setting("downloadlistpath",downloadpath) # Es Telebision, lo pone en el skin else: # guardar setting del skin en setting del plugin downloadpath = os.path.join( downloadpath , "list" ) downloadpath = xbmc.translatePath( downloadpath ) logger.info("[downloadtools.py] downloadpath nativo es "+downloadpath) config.set_setting("downloadlistpath", downloadpath) except: pass logger.info("[downloadtools.py] downloadlistpath="+downloadpath) try: os.mkdir(downloadpath) except: pass return downloadpath
def actualiza(item): logger.info("deportesalacarta.channels.update_sports actualiza") local_folder = os.path.join(xbmc.translatePath("special://home"), "addons") error = False url = "https://github.com/CmosGit/Mod_pelisalacarta_deportes/raw/addon/plugin.video.deportesalacarta-%s.zip" % item.version progreso = platformtools.dialog_progress("Progreso de la actualización", "Descargando...") filename = 'deportesalacarta-%s.zip' % item.version localfilename = filetools.join(config.get_data_path(), filename) try: result = downloadtools.downloadfile(url, localfilename, continuar=True) progreso.update(50, "Descargando archivo", "Descargando...") # Lo descomprime logger.info( "deportesalacarta.channels.configuracion descomprime fichero...") from core import ziptools unzipper = ziptools.ziptools() logger.info("deportesalacarta.channels.configuracion destpathname=%s" % local_folder) unzipper.extract(localfilename, local_folder, update=True) progreso.close() except: import traceback logger.info("Detalle del error: %s" % traceback.format_exc()) # Borra el zip descargado filetools.remove(localfilename) progreso.close() platformtools.dialog_ok( "Error", "Se ha producido un error extrayendo el archivo") return # Borra el zip descargado logger.info("deportesalacarta.channels.configuracion borra fichero...") filetools.remove(localfilename) logger.info("deportesalacarta.channels.configuracion ...fichero borrado") platformtools.dialog_ok("Actualizado correctamente", "Versión %s instalada con éxito" % item.version) xbmc.executebuiltin("Container.Refresh")
def update(item): logger.info("pelisalacarta.core.updater update") # Valores por defecto published_version_url = "" published_version_filename = "" # Lee la versión remota from core import api latest_packages = api.plugins_get_latest_packages() for latest_package in latest_packages["body"]: if latest_package["package"]=="plugin": published_version_url = latest_package["url"] published_version_filename = latest_package["filename"] published_version_number = latest_package["version"] break # La URL viene del API, y lo descarga en "userdata" remotefilename = published_version_url localfilename = os.path.join(config.get_data_path(),published_version_filename) download_and_install(remotefilename,localfilename)
def extract(self, file, dir, folder_to_extract="", overwrite_question=False, backup=False, update=False): logger.info("file=%s" % file) logger.info("dir=%s" % dir) if update: progreso = platformtools.dialog_progress( "Descomprimiendo", "Extrayendo archivos de la nueva versión") if not dir.endswith(':') and not os.path.exists(dir): os.mkdir(dir) zf = zipfile.ZipFile(file) if not folder_to_extract: self._createstructure(file, dir) num_files = len(zf.namelist()) lenght = len(zf.namelist()) for i, name in enumerate(zf.namelist()): if update: percent = lenght / 100 + i progreso.update(percent, "Descomprimiendo", "Extrayendo archivos de la nueva versión") logger.info("name=%s" % name) if not name.endswith('/'): logger.info("no es un directorio") try: (path, filename) = os.path.split(os.path.join(dir, name)) logger.info("path=%s" % path) logger.info("name=%s" % name) if folder_to_extract: if path != os.path.join(dir, folder): break else: os.makedirs(path) except: pass if folder_to_extract: outfilename = os.path.join(dir, filename) else: outfilename = os.path.join(dir, name) logger.info("outfilename=%s" % outfilename) try: if os.path.exists(outfilename) and overwrite_question: dyesno = platformtools.dialog_yesno("El archivo ya existe", "El archivo %s a descomprimir ya existe" \ ", ¿desea sobrescribirlo?" \ % os.path.basename(outfilename)) if not dyesno: break if backup: import time import shutil hora_folder = "Copia seguridad [%s]" % time.strftime( "%d-%m_%H-%M", time.localtime()) backup = os.path.join(config.get_data_path(), 'backups', hora_folder, folder_to_extract) if not os.path.exists(backup): os.makedirs(backup) shutil.copy2( outfilename, os.path.join(backup, os.path.basename(outfilename))) outfile = open(outfilename, 'wb') outfile.write(zf.read(name)) except: logger.info("Error en fichero " + name) if update: progreso.close()
def downloadpageGzip(url): # Inicializa la librería de las cookies ficherocookies = os.path.join(config.get_data_path(), 'cookies.dat') logger.info("Cookiefile=" + ficherocookies) inicio = time.clock() cj = None ClientCookie = None cookielib = None # Let's see if cookielib is available try: import cookielib except ImportError: # If importing cookielib fails # let's try ClientCookie try: import ClientCookie except ImportError: # ClientCookie isn't available either urlopen = urllib2.urlopen Request = urllib2.Request else: # imported ClientCookie urlopen = ClientCookie.urlopen Request = ClientCookie.Request cj = ClientCookie.MozillaCookieJar() else: # importing cookielib worked urlopen = urllib2.urlopen Request = urllib2.Request cj = cookielib.MozillaCookieJar() # This is a subclass of FileCookieJar # that has useful load and save methods # --------------------------------- # Instala las cookies # --------------------------------- if cj is not None: # we successfully imported # one of the two cookie handling modules if os.path.isfile(ficherocookies): # if we have a cookie file already saved # then load the cookies into the Cookie Jar try: cj.load(ficherocookies) except: logger.info( "[scrapertools.py] El fichero de cookies existe pero es ilegible, se borra" ) os.remove(ficherocookies) # Now we need to get our Cookie Jar # installed in the opener; # for fetching URLs if cookielib is not None: # if we use cookielib # then we get the HTTPCookieProcessor # and install the opener in urllib2 opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener) else: # if we use ClientCookie # then we get the HTTPCookieProcessor # and install the opener in ClientCookie opener = ClientCookie.build_opener( ClientCookie.HTTPCookieProcessor(cj)) ClientCookie.install_opener(opener) #print "-------------------------------------------------------" theurl = url # an example url that sets a cookie, # try different urls here and see the cookie collection you can make ! #txheaders = {'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3', # 'Referer':'http://www.megavideo.com/?s=signup'} parsedurl = urlparse.urlparse(url) logger.info("parsedurl=" + str(parsedurl)) txheaders = { 'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'es-es,es;q=0.8,en-us;q=0.5,en;q=0.3', 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7', 'Accept-Encoding': 'gzip,deflate', 'Keep-Alive': '300', 'Connection': 'keep-alive', 'Referer': parsedurl[0] + "://" + parsedurl[1] } logger.info(str(txheaders)) # fake a user agent, some websites (like google) don't like automated exploration req = Request(theurl, None, txheaders) handle = urlopen(req) cj.save(ficherocookies) # save the cookies again data = handle.read() handle.close() fin = time.clock() logger.info("[scrapertools.py] Descargado 'Gzipped data' en %d segundos " % (fin - inicio + 1)) # Descomprime el archivo de datos Gzip try: fin = inicio import StringIO compressedstream = StringIO.StringIO(data) import gzip gzipper = gzip.GzipFile(fileobj=compressedstream) data1 = gzipper.read() gzipper.close() fin = time.clock() logger.info( "[scrapertools.py] 'Gzipped data' descomprimido en %d segundos " % (fin - inicio + 1)) return data1 except: return data
def actualiza(item): logger.info("deportesalacarta.channels.update_sports actualiza") local_folder = os.path.join(xbmc.translatePath("special://home"), "addons") error = False if not item.url: url = "https://github.com/CmosGit/Mod_pelisalacarta_deportes/raw/addon/plugin.video.deportesalacarta-%s.zip" % item.version else: import servertools urls, puede, msg = servertools.resolve_video_urls_for_playing( item.server, item.url, "", False, True) if puede: data_ = httptools.downloadpage(urls[0], hide=True).data url = scrapertools.find_single_match( data_, '"downloadUrl"\s*:\s*"([^"]+)"') if not url: url = scrapertools.find_single_match( data_, '<a id="download_button".*?href="([^"]+)"') if not item.server and not url: try: name, value = scrapertools.find_single_match( data_, 'method="post">.*?name="([^"]+)" value="([^"]+)"') post = "%s=%s" % (name, value) data_ = httptools.downloadpage(urls[0], post, hide=True).data url = scrapertools.find_single_match( data_, '"downloadUrl"\s*:\s*"([^"]+)"') except: pass if not url: urls, puede, msg = servertools.resolve_video_urls_for_playing( item.server, base64.b64decode(item.url)) url = urls[0][1] progreso = platformtools.dialog_progress("Progreso de la actualización", "Descargando...") filename = 'deportesalacarta-%s.zip' % item.version localfilename = filetools.join(config.get_data_path(), filename) try: result = downloadtools.downloadfile(url, localfilename, [], False, True, False) progreso.update(50, "Descargando archivo", "Descargando...") # Lo descomprime logger.info( "deportesalacarta.channels.configuracion descomprime fichero...") from core import ziptools unzipper = ziptools.ziptools() logger.info("deportesalacarta.channels.configuracion destpathname=%s" % local_folder) unzipper.extract(localfilename, local_folder, update=True) progreso.close() except: import traceback logger.info("Detalle del error: %s" % traceback.format_exc()) # Borra el zip descargado try: filetools.remove(localfilename) except: pass progreso.close() platformtools.dialog_ok( "Error", "Se ha producido un error extrayendo el archivo") return # Borra el zip descargado logger.info("deportesalacarta.channels.configuracion borra fichero...") try: filetools.remove(localfilename) except: pass logger.info("deportesalacarta.channels.configuracion ...fichero borrado") platformtools.dialog_notification( "Actualizado correctamente", "Versión %s instalada con éxito" % item.version) xbmc.executebuiltin("Container.Refresh")
# jurrabi ([email protected]) # bandavi ([email protected]) # Licencia: GPL (http://www.gnu.org/licenses/gpl-3.0.html) #------------------------------------------------------------ # Historial de cambios: #------------------------------------------------------------ import sys, os.path import re import urllib,urllib2 import time import socket import config import logger DOWNLOAD_HISTORY_FILE = os.path.join( config.get_data_path() , "download_history.xml" ) entitydefs = { 'AElig': u'\u00C6', # latin capital letter AE = latin capital ligature AE, U+00C6 ISOlat1' 'Aacute': u'\u00C1', # latin capital letter A with acute, U+00C1 ISOlat1' 'Acirc': u'\u00C2', # latin capital letter A with circumflex, U+00C2 ISOlat1' 'Agrave': u'\u00C0', # latin capital letter A with grave = latin capital letter A grave, U+00C0 ISOlat1' 'Alpha': u'\u0391', # greek capital letter alpha, U+0391' 'Aring': u'\u00C5', # latin capital letter A with ring above = latin capital letter A ring, U+00C5 ISOlat1' 'Atilde': u'\u00C3', # latin capital letter A with tilde, U+00C3 ISOlat1' 'Auml': u'\u00C4', # latin capital letter A with diaeresis, U+00C4 ISOlat1' 'Beta': u'\u0392', # greek capital letter beta, U+0392' 'Ccedil': u'\u00C7', # latin capital letter C with cedilla, U+00C7 ISOlat1' 'Chi': u'\u03A7', # greek capital letter chi, U+03A7' 'Dagger': u'\u2021', # double dagger, U+2021 ISOpub' 'Delta': u'\u0394', # greek capital letter delta, U+0394 ISOgrk3'
# -*- coding: utf-8 -*- #------------------------------------------------------------ # tvalacarta - XBMC Plugin # Suscription management #------------------------------------------------------------ import urllib import os import sys import config import logger import scrapertools from item import Item SUSCRIPTIONS_FILE = os.path.join( config.get_data_path() , "suscriptions.xml" ) # ------------------------------------------------------------------------------------------ # Public functions # ------------------------------------------------------------------------------------------ # Read all the suscriptions def get_current_suscriptions(): logger.info("suscription.get_current_suscriptions") return _read_suscription_file() # Append a new suscription to file def append_suscription(item): logger.info("suscription.append_suscription item="+item.tostring()) # Read suscriptions from file current_suscriptions = _read_suscription_file()
def downloadpage(url,post=None,headers=None,follow_redirects=True, timeout=None, getheaders=False, header_to_get=None, cookies=True,getdata=True,addheaders=None): if header_to_get is not None and getheaders == False: getheaders = True getdata = False url = urllib.quote(url, safe="%/:=&?~#+!$,;'@()*[]") dominio = urlparse.urlparse(url)[1] cookiesdir= config.get_setting("cookies.dir") if cookiesdir == config.get_data_path(): cookiesdir = os.path.join(cookiesdir,"Cookies") ficherocookies = os.path.join(cookiesdir, dominio + ".dat" ) #headers por defecto si no se especifica otra cosa: if headers ==None: headers =[] headers.append(['User-Agent', 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; es-ES; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12']) headers.append(["Accept","text/html;charset=UTF-8"]) headers.append(["Accept-Charset","UTF-8"]) if addheaders != None: keys=[] for header in addheaders: keys.append(header[0]) for header in headers: if header[0] in keys: headers.remove(header) headers.extend(addheaders) if cookies==True: try: import cookielib except: cookies = False if timeout ==None: timeout = 25 logger.info("----------------------------------------------") logger.info("[scrapertools.py] - downloadpage") logger.info("----------------------------------------------") if timeout is None: logger.info("Timeout: Sin timeout") else: logger.info("Timeout: " + str(timeout)) logger.info("URL: " + url) logger.info("Dominio: " + dominio) logger.info("Headers:" ) for header in headers: logger.info("--------->"+header[0] + ":" + header[1]) if post is not None: logger.info("Peticion: POST" ) logger.info("Post: " + post) else: logger.info("Peticion: GET") if cookies==True: logger.info("Usar Cookies: Si") logger.info("Fichero de Cookies: " + ficherocookies) else: logger.info("Usar Cookies: No") # --------------------------------- # Instala las cookies # --------------------------------- if cookies==True: cj = cookielib.MozillaCookieJar() cj.set_policy(MyCookiePolicy()) if os.path.isfile(ficherocookies): logger.debug("Leyendo fichero cookies") try: cj.load(ficherocookies,ignore_discard=True) except: logger.debug("El fichero de cookies existe pero es ilegible, se borra") os.remove(ficherocookies) if not follow_redirects: opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=DEBUG_LEVEL),urllib2.HTTPCookieProcessor(cj),NoRedirectHandler()) else: opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=DEBUG_LEVEL),urllib2.HTTPCookieProcessor(cj)) else: if not follow_redirects: opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=DEBUG_LEVEL),NoRedirectHandler()) else: opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=DEBUG_LEVEL)) urllib2.install_opener(opener) # ------------------------------------------------- # Lanza la petición # ------------------------------------------------- logger.debug("Realizando Peticion") # Contador inicio = time.clock() # Diccionario para las cabeceras txheaders = {} # Añade las cabeceras for header in headers: txheaders[header[0]]=header[1] req = urllib2.Request(url, post, txheaders) try: handle=urllib2.urlopen(req, timeout=timeout) logger.debug("Peticion Realizada") if cookies==True: logger.debug("Guardando cookies...") cj.save(ficherocookies,ignore_discard=True) # ,ignore_expires=True # Lee los datos y cierra if getdata==True: if handle.info().get('Content-Encoding') == 'gzip': logger.debug("Encoding: gzip") logger.debug("Descomprimiendo...") fin = inicio import StringIO data=handle.read() compressedstream = StringIO.StringIO(data) import gzip gzipper = gzip.GzipFile(fileobj=compressedstream) data = gzipper.read() gzipper.close() logger.debug("Descomprimido") fin = time.clock() else: logger.debug("Encoding: Normal") data = handle.read() except urllib2.HTTPError,e: if e.code == 304: logger.info("Pagina no modificada, cargará del cache") else: logger.error("No se ha podido realizar la petición (Codigo: "+str(e.code) + ")") return None
import urllib import os import sys import downloadtools import config import logger import samba from item import Item CHANNELNAME = "favoritos" DEBUG = True BOOKMARK_PATH = config.get_setting( "bookmarkpath" ) if not BOOKMARK_PATH.upper().startswith("SMB://"): if BOOKMARK_PATH=="": BOOKMARK_PATH = os.path.join( config.get_data_path() , "bookmarks" ) if not os.path.exists(BOOKMARK_PATH): logger.debug("[favoritos.py] Path de bookmarks no existe, se crea: "+BOOKMARK_PATH) os.mkdir(BOOKMARK_PATH) logger.info("[favoritos.py] path="+BOOKMARK_PATH) def isGeneric(): return True def mainlist(item): logger.info("[favoritos.py] mainlist") itemlist=[] # Crea un listado con las entradas de favoritos if usingsamba(BOOKMARK_PATH):
#------------------------------------------------------------ import urlparse, urllib2, urllib, re import os, sys, time, datetime, io import codecs, json import scrapertools, config, logger from core.item import Item from distutils.version import StrictVersion PLUGIN_NAME = "pelisalacarta" ROOT_DIR = config.get_runtime_path() REMOTE_VERSION_FILE = "http://blog.tvalacarta.info/descargas/" + PLUGIN_NAME + "-version.xml" LOCAL_VERSION_FILE = os.path.join(ROOT_DIR, "version.xml") URL_BASE_REPOSITORIO = "http://xbmc-tvalacarta.googlecode.com/svn/trunk/" + PLUGIN_NAME LOCAL_FILE = os.path.join(ROOT_DIR, PLUGIN_NAME + "-") DIRECTORIO_PATH_CONFIG = os.path.join(config.get_data_path(), 'channels') PATH_LIST_CHANNELS_JSON = os.path.join(config.get_data_path(), "list_channels.json") PATH_LIST_SERVERS_JSON = os.path.join(config.get_data_path(), "list_servers.json") try: logger.info("[updater.py] get_platform=" + config.get_platform()) logger.info("[updater.py] get_system_platform=" + config.get_system_platform()) REMOTE_FILE = "http://blog.tvalacarta.info/descargas/" + PLUGIN_NAME if config.get_platform() == "xbmcdharma" and config.get_system_platform( ) == "xbox": # Añadida a la opcion : si plataforma xbmcdharma es "True", no debe ser con la plataforma de la xbox # porque seria un falso "True", ya que el xbmc en las xbox no son dharma por lo tanto no existen los addons
#!/usr/bin/env python # coding: utf-8 from pathlib import Path import numpy as np import pandas as pd import config DATA_PATH = config.get_data_path() CUST_TRAIN_FILE = DATA_PATH / 'customer_train.csv' CUST_TEST_FILE = DATA_PATH / 'customer_test.csv' CUST_TRAIN_SAVE = DATA_PATH / 'customer_train_processed.csv' CUST_TEST_SAVE = DATA_PATH / 'customer_test_processed.csv' def process(cust): mar_statuses = cust['marital_status_cd'].unique() mar2id = dict(zip(mar_statuses, range(len(mar_statuses)))) cust['marital_status_cd'] = cust['marital_status_cd'].map(mar2id) cust['gender_cd'] = cust['gender_cd'].map({'M': 0, 'F': 1}) for i in range(7): cust[f'product_{i}'] = cust[f'product_{i}'].map({'OPN': 1, 'UTL': 2, 'CLS': 3}) prod_cols = [f'product_{i}' for i in range(7)] cust['prod_not_nan'] = (cust[prod_cols] > 0).sum(axis=1) cust['prod_sum_opn'] = (cust[prod_cols] == 1).sum(axis=1) cust['prod_sum_utl'] = (cust[prod_cols] == 2).sum(axis=1)
import urlparse, urllib2, urllib, re import os, sys, time, datetime, io import codecs, json import scrapertools, config, logger from core.item import Item from distutils.version import StrictVersion PLUGIN_NAME = "pelisalacarta-3-9X" ROOT_DIR = config.get_runtime_path() REMOTE_VERSION_FILE = "http://blog.tvalacarta.info/descargas/" + PLUGIN_NAME + "-version.xml" LOCAL_VERSION_FILE = os.path.join(ROOT_DIR, "version.xml") URL_BASE_REPOSITORIO = "http://xbmc-tvalacarta.googlecode.com/svn/trunk/" + PLUGIN_NAME LOCAL_FILE = os.path.join(ROOT_DIR, PLUGIN_NAME + "-") DIRECTORIO_PATH_CONFIG = os.path.join(config.get_data_path(), "channels") PATH_LIST_CHANNELS_JSON = os.path.join(config.get_data_path(), "list_channels.json") PATH_LIST_SERVERS_JSON = os.path.join(config.get_data_path(), "list_servers.json") try: logger.info("[updater.py] get_platform=" + config.get_platform()) logger.info("[updater.py] get_system_platform=" + config.get_system_platform()) REMOTE_FILE = "http://blog.tvalacarta.info/descargas/" + PLUGIN_NAME if config.get_platform() == "xbmcdharma" and config.get_system_platform() == "xbox": # Añadida a la opcion : si plataforma xbmcdharma es "True", no debe ser con la plataforma de la xbox # porque seria un falso "True", ya que el xbmc en las xbox no son dharma por lo tanto no existen los addons REMOTE_FILE = "" elif config.get_platform() == "xbmc": REMOTE_FILE += "-xbmc-plugin-"
def play(item, ItemVideo): import xbmc import xbmcgui import xbmcplugin if not ItemVideo == None: mediaurl = ItemVideo.url[1] if len(ItemVideo.url) > 2: wait_time = ItemVideo.url[2] else: wait_time = 0 if wait_time > 0: handle_wait(wait_time, server, "Cargando vídeo...") xlistitem = xbmcgui.ListItem(item.title, iconImage="DefaultVideo.png", thumbnailImage=item.thumbnail, path=mediaurl) xlistitem.setInfo( "video", { "Title": item.title, "Plot": item.plot, "Studio": item.channel, "Genre": item.category }) if item.subtitle != "": import os ficherosubtitulo = os.path.join(config.get_data_path(), 'subtitulo.srt') if os.path.exists(ficherosubtitulo): os.remove(ficherosubtitulo) from core import scrapertools data = scrapertools.cache_page(item.subtitle) fichero = open(ficherosubtitulo, "w") fichero.write(data) fichero.close() if config.get_setting("player_mode") == "3": #download_and_play import download_and_play download_and_play.download_and_play( mediaurl, "download_and_play.tmp", config.get_setting("downloadpath")) elif config.get_setting("player_mode") == "0" or ( config.get_setting("player_mode") == "3" and mediaurl.startswith("rtmp")): #Direct playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO) playlist.clear() playlist.add(mediaurl, xlistitem) playersettings = config.get_setting('player_type') player_type = xbmc.PLAYER_CORE_AUTO if playersettings == "0": player_type = xbmc.PLAYER_CORE_AUTO logger.info("[xbmctools.py] PLAYER_CORE_AUTO") elif playersettings == "1": player_type = xbmc.PLAYER_CORE_MPLAYER logger.info("[xbmctools.py] PLAYER_CORE_MPLAYER") elif playersettings == "2": player_type = xbmc.PLAYER_CORE_DVDPLAYER logger.info("[xbmctools.py] PLAYER_CORE_DVDPLAYER") xbmcPlayer = xbmc.Player(player_type) xbmcPlayer.play(playlist) elif config.get_setting("player_mode") == "1": #setResolvedUrl pass elif config.get_setting("player_mode") == "2": #Built-in xbmc.executebuiltin("PlayMedia(" + mediaurl + ")")
import urllib import os import sys import downloadtools import config import logger import samba from item import Item CHANNELNAME = "favoritos" DEBUG = True BOOKMARK_PATH = config.get_setting("bookmarkpath") if not BOOKMARK_PATH.upper().startswith("SMB://"): if BOOKMARK_PATH == "": BOOKMARK_PATH = os.path.join(config.get_data_path(), "bookmarks") if not os.path.exists(BOOKMARK_PATH): logger.debug("[favoritos.py] Path de bookmarks no existe, se crea: " + BOOKMARK_PATH) os.mkdir(BOOKMARK_PATH) logger.info("[favoritos.py] path=" + BOOKMARK_PATH) def isGeneric(): return True def mainlist(item): logger.info("[favoritos.py] mainlist") itemlist = []
def downloadpagewithcookies(url): # --------------------------------- # Instala las cookies # --------------------------------- # Inicializa la librería de las cookies ficherocookies = os.path.join( config.get_data_path(), 'cookies.lwp' ) logger.info("[scrapertools.py] Cookiefile="+ficherocookies) cj = None ClientCookie = None cookielib = None # Let's see if cookielib is available try: import cookielib except ImportError: # If importing cookielib fails # let's try ClientCookie try: import ClientCookie except ImportError: # ClientCookie isn't available either urlopen = urllib2.urlopen Request = urllib2.Request else: # imported ClientCookie urlopen = ClientCookie.urlopen Request = ClientCookie.Request cj = ClientCookie.LWPCookieJar() else: # importing cookielib worked urlopen = urllib2.urlopen Request = urllib2.Request cj = cookielib.LWPCookieJar() # This is a subclass of FileCookieJar # that has useful load and save methods if cj is not None: # we successfully imported # one of the two cookie handling modules if os.path.isfile(ficherocookies): # if we have a cookie file already saved # then load the cookies into the Cookie Jar cj.load(ficherocookies) # Now we need to get our Cookie Jar # installed in the opener; # for fetching URLs if cookielib is not None: # if we use cookielib # then we get the HTTPCookieProcessor # and install the opener in urllib2 opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener) else: # if we use ClientCookie # then we get the HTTPCookieProcessor # and install the opener in ClientCookie opener = ClientCookie.build_opener(ClientCookie.HTTPCookieProcessor(cj)) ClientCookie.install_opener(opener) #print "-------------------------------------------------------" theurl = url # an example url that sets a cookie, # try different urls here and see the cookie collection you can make ! #txheaders = {'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3', # 'Referer':'http://www.megavideo.com/?s=signup'} txheaders = { 'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3', 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Host':'www.meristation.com', 'Accept-Language':'es-es,es;q=0.8,en-us;q=0.5,en;q=0.3', 'Accept-Charset':'ISO-8859-1,utf-8;q=0.7,*;q=0.7', 'Keep-Alive':'300', 'Connection':'keep-alive'} # fake a user agent, some websites (like google) don't like automated exploration req = Request(theurl, None, txheaders) handle = urlopen(req) cj.save(ficherocookies) # save the cookies again data=handle.read() handle.close() return data
def __init__(self, parent=None, handler=None, keyhandler=None, start_timer=True): BaseFrame.__init__(self, parent, wx.ID_ANY, lang.getstr("measurement.untethered"), style=wx.DEFAULT_FRAME_STYLE | wx.TAB_TRAVERSAL, name="untetheredframe") self.SetIcons(get_icon_bundle([256, 48, 32, 16], appname)) self.sizer = wx.FlexGridSizer(2, 1, 0, 0) self.sizer.AddGrowableCol(0) self.sizer.AddGrowableRow(0) self.sizer.AddGrowableRow(1) self.panel = wx_Panel(self) self.SetSizer(self.sizer) self.sizer.Add(self.panel, 1, wx.EXPAND) self.panel.SetBackgroundColour(BGCOLOUR) panelsizer = wx.FlexGridSizer(3, 2, 8, 8) panelsizer.AddGrowableCol(0) panelsizer.AddGrowableCol(1) panelsizer.AddGrowableRow(1) self.panel.SetSizer(panelsizer) self.label_RGB = wx.StaticText(self.panel, wx.ID_ANY, " ") self.label_RGB.SetForegroundColour(FGCOLOUR) panelsizer.Add(self.label_RGB, 0, wx.TOP | wx.LEFT | wx.EXPAND, border=8) self.label_XYZ = wx.StaticText(self.panel, wx.ID_ANY, " ") self.label_XYZ.SetForegroundColour(FGCOLOUR) panelsizer.Add(self.label_XYZ, 0, wx.TOP | wx.RIGHT | wx.EXPAND, border=8) if sys.platform == "darwin": style = wx.BORDER_THEME else: style = wx.BORDER_SIMPLE self.panel_RGB = BitmapBackgroundPanel(self.panel, size=(256, 256), style=style) self.panel_RGB.scalebitmap = (True, True) self.panel_RGB.SetBitmap( getbitmap("theme/checkerboard-32x32x5-333-444")) panelsizer.Add(self.panel_RGB, 1, wx.LEFT | wx.EXPAND, border=8) self.panel_XYZ = BitmapBackgroundPanel(self.panel, size=(256, 256), style=style) self.panel_XYZ.scalebitmap = (True, True) self.panel_XYZ.SetBitmap( getbitmap("theme/checkerboard-32x32x5-333-444")) panelsizer.Add(self.panel_XYZ, 1, wx.RIGHT | wx.EXPAND, border=8) sizer = wx.BoxSizer(wx.HORIZONTAL) self.back_btn = FlatShadedButton(self.panel, bitmap=geticon(10, "back"), label="", fgcolour=FGCOLOUR) self.back_btn.Bind(wx.EVT_BUTTON, self.back_btn_handler) sizer.Add(self.back_btn, 0, wx.LEFT | wx.RIGHT, border=8) self.label_index = wx.StaticText(self.panel, wx.ID_ANY, " ") self.label_index.SetForegroundColour(FGCOLOUR) sizer.Add(self.label_index, 0, wx.ALIGN_CENTER_VERTICAL) self.next_btn = FlatShadedButton(self.panel, bitmap=geticon(10, "play"), label="", fgcolour=FGCOLOUR) self.next_btn.Bind(wx.EVT_BUTTON, self.next_btn_handler) sizer.Add(self.next_btn, 0, wx.LEFT, border=8) sizer.Add((12, 1), 1) self.measure_auto_cb = CustomCheckBox(self.panel, wx.ID_ANY, lang.getstr("auto")) self.measure_auto_cb.SetForegroundColour(FGCOLOUR) self.measure_auto_cb.Bind(wx.EVT_CHECKBOX, self.measure_auto_ctrl_handler) sizer.Add(self.measure_auto_cb, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT) panelsizer.Add(sizer, 0, wx.BOTTOM | wx.EXPAND, border=8) sizer = wx.BoxSizer(wx.HORIZONTAL) self.measure_btn = FlatShadedButton(self.panel, bitmap=geticon(10, "play"), label=lang.getstr("measure"), fgcolour=FGCOLOUR) self.measure_btn.Bind(wx.EVT_BUTTON, self.measure_btn_handler) sizer.Add(self.measure_btn, 0, wx.RIGHT, border=6) # Sound when measuring # Needs to be stereo! self.measurement_sound = audio.Sound(get_data_path("beep.wav")) self.commit_sound = audio.Sound(get_data_path("camera_shutter.wav")) bitmap = self.get_sound_on_off_btn_bitmap() self.sound_on_off_btn = FlatShadedButton(self.panel, bitmap=bitmap, fgcolour=FGCOLOUR) self.sound_on_off_btn.SetToolTipString( lang.getstr("measurement.play_sound")) self.sound_on_off_btn.Bind(wx.EVT_BUTTON, self.measurement_play_sound_handler) sizer.Add(self.sound_on_off_btn, 0) sizer.Add((12, 1), 1) self.finish_btn = FlatShadedButton(self.panel, label=lang.getstr("finish"), fgcolour=FGCOLOUR) self.finish_btn.Bind(wx.EVT_BUTTON, self.finish_btn_handler) sizer.Add(self.finish_btn, 0, wx.RIGHT, border=8) panelsizer.Add(sizer, 0, wx.BOTTOM | wx.EXPAND, border=8) self.grid = CustomGrid(self, -1, size=(536, 256)) self.grid.DisableDragColSize() self.grid.DisableDragRowSize() self.grid.SetScrollRate(0, 5) self.grid.SetCellHighlightROPenWidth(0) self.grid.SetColLabelSize(self.grid.GetDefaultRowSize()) self.grid.SetDefaultCellAlignment(wx.ALIGN_CENTER, wx.ALIGN_CENTER) self.grid.SetRowLabelAlignment(wx.ALIGN_RIGHT, wx.ALIGN_CENTER) self.grid.draw_horizontal_grid_lines = False self.grid.draw_vertical_grid_lines = False self.grid.style = "" self.grid.CreateGrid(0, 9) self.grid.SetRowLabelSize(62) for i in range(9): if i in (3, 4): size = self.grid.GetDefaultRowSize() if i == 4: attr = wx.grid.GridCellAttr() attr.SetBackgroundColour(wx.Colour(0, 0, 0, 0)) self.grid.SetColAttr(i, attr) else: size = 62 self.grid.SetColSize(i, size) for i, label in enumerate( ["R", "G", "B", "", "", "L*", "a*", "b*", ""]): self.grid.SetColLabelValue(i, label) self.grid.SetCellHighlightPenWidth(0) self.grid.SetDefaultCellBackgroundColour( self.grid.GetLabelBackgroundColour()) font = self.grid.GetDefaultCellFont() if font.PointSize > 11: font.PointSize = 11 self.grid.SetDefaultCellFont(font) self.grid.SetSelectionMode(wx.grid.Grid.wxGridSelectRows) self.grid.EnableEditing(False) self.grid.EnableGridLines(False) self.grid.Bind(wx.grid.EVT_GRID_LABEL_LEFT_CLICK, self.grid_left_click_handler) self.grid.Bind(wx.grid.EVT_GRID_SELECT_CELL, self.grid_left_click_handler) self.sizer.Add(self.grid, 1, wx.EXPAND) self.Fit() self.SetMinSize(self.GetSize()) self.keyhandler = keyhandler self.id_to_keycode = {} if sys.platform == "darwin": # Use an accelerator table for tab, space, 0-9, A-Z, numpad, # navigation keys and processing keys keycodes = [wx.WXK_TAB, wx.WXK_SPACE] keycodes.extend(list(range(ord("0"), ord("9")))) keycodes.extend(list(range(ord("A"), ord("Z")))) keycodes.extend(numpad_keycodes) keycodes.extend(nav_keycodes) keycodes.extend(processing_keycodes) for keycode in keycodes: self.id_to_keycode[wx.Window.NewControlId()] = keycode accels = [] for id, keycode in self.id_to_keycode.items(): self.Bind(wx.EVT_MENU, self.key_handler, id=id) accels.append((wx.ACCEL_NORMAL, keycode, id)) if keycode == wx.WXK_TAB: accels.append((wx.ACCEL_SHIFT, keycode, id)) self.SetAcceleratorTable(wx.AcceleratorTable(accels)) else: self.Bind(wx.EVT_CHAR_HOOK, self.key_handler) self.Bind(wx.EVT_KEY_DOWN, self.key_handler) # Event handlers self.Bind(wx.EVT_CLOSE, self.OnClose, self) self.Bind(wx.EVT_MOVE, self.OnMove, self) self.Bind(wx.EVT_SIZE, self.OnResize, self) self.timer = wx.Timer(self) if handler: self.Bind(wx.EVT_TIMER, handler, self.timer) self.Bind(wx.EVT_WINDOW_DESTROY, self.OnDestroy, self) # Final initialization steps for child in self.GetAllChildren(): if (sys.platform == "win32" and sys.getwindowsversion() >= (6, ) and isinstance(child, wx.Panel)): # No need to enable double buffering under Linux and Mac OS X. # Under Windows, enabling double buffering on the panel seems # to work best to reduce flicker. child.SetDoubleBuffered(True) self.logger = get_file_logger("untethered") self._setup() self.Show() if start_timer: self.start_timer()
# pelisalacarta - XBMC Plugin # Conector para Cinshare # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/ #------------------------------------------------------------ import urlparse,urllib2,urllib,re import os.path import sys import xbmc import os import scrapertools import unpackerjs import socket import config COOKIEFILE = os.path.join (config.get_data_path() , "cookies.lwp") def geturl(urlvideo): xbmc.output("[cinshare.py] url="+urlvideo) # --------------------------------------- # Inicializa la libreria de las cookies # --------------------------------------- ficherocookies = COOKIEFILE try: os.remove(ficherocookies) except: pass # the path and filename to save your cookies in cj = None ClientCookie = None
def downloadpageGzip(url): # Inicializa la librería de las cookies ficherocookies = os.path.join( config.get_data_path(), 'cookies.dat' ) if (DEBUG==True): logger.info("Cookiefile="+ficherocookies) inicio = time.clock() cj = None ClientCookie = None cookielib = None # Let's see if cookielib is available try: import cookielib except ImportError: # If importing cookielib fails # let's try ClientCookie try: import ClientCookie except ImportError: # ClientCookie isn't available either urlopen = urllib2.urlopen Request = urllib2.Request else: # imported ClientCookie urlopen = ClientCookie.urlopen Request = ClientCookie.Request cj = ClientCookie.MozillaCookieJar() else: # importing cookielib worked urlopen = urllib2.urlopen Request = urllib2.Request cj = cookielib.MozillaCookieJar() # This is a subclass of FileCookieJar # that has useful load and save methods # --------------------------------- # Instala las cookies # --------------------------------- if cj is not None: # we successfully imported # one of the two cookie handling modules if os.path.isfile(ficherocookies): # if we have a cookie file already saved # then load the cookies into the Cookie Jar try: cj.load(ficherocookies) except: if (DEBUG==True): logger.info("[scrapertools.py] El fichero de cookies existe pero es ilegible, se borra") os.remove(ficherocookies) # Now we need to get our Cookie Jar # installed in the opener; # for fetching URLs if cookielib is not None: # if we use cookielib # then we get the HTTPCookieProcessor # and install the opener in urllib2 opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener) else: # if we use ClientCookie # then we get the HTTPCookieProcessor # and install the opener in ClientCookie opener = ClientCookie.build_opener(ClientCookie.HTTPCookieProcessor(cj)) ClientCookie.install_opener(opener) #print "-------------------------------------------------------" theurl = url # an example url that sets a cookie, # try different urls here and see the cookie collection you can make ! #txheaders = {'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3', # 'Referer':'http://www.megavideo.com/?s=signup'} import httplib parsedurl = urlparse.urlparse(url) if (DEBUG==True): logger.info("parsedurl="+str(parsedurl)) txheaders = { 'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3', 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language':'es-es,es;q=0.8,en-us;q=0.5,en;q=0.3', 'Accept-Charset':'ISO-8859-1,utf-8;q=0.7,*;q=0.7', 'Accept-Encoding':'gzip,deflate', 'Keep-Alive':'300', 'Connection':'keep-alive', 'Referer':parsedurl[0]+"://"+parsedurl[1]} if (DEBUG==True): logger.info(str(txheaders)) # fake a user agent, some websites (like google) don't like automated exploration req = Request(theurl, None, txheaders) handle = urlopen(req) cj.save(ficherocookies) # save the cookies again data=handle.read() handle.close() fin = time.clock() if (DEBUG==True): logger.info("[scrapertools.py] Descargado 'Gzipped data' en %d segundos " % (fin-inicio+1)) # Descomprime el archivo de datos Gzip try: fin = inicio import StringIO compressedstream = StringIO.StringIO(data) import gzip gzipper = gzip.GzipFile(fileobj=compressedstream) data1 = gzipper.read() gzipper.close() fin = time.clock() if (DEBUG==True): logger.info("[scrapertools.py] 'Gzipped data' descomprimido en %d segundos " % (fin-inicio+1)) return data1 except: return data
import time import traceback import ujson import webbrowser import requests from config import get_console_handler, get_data_path, FB_ACCESS_TOKEN FACEBOOK_GRAPH_URL = "https://graph.facebook.com/" logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) logger.addHandler(get_console_handler()) POST_DATABASE_PATH = os.path.join(get_data_path(), "posts_db.json") REQUEST_FREQUENCY = 10 EVENT_ID = 166530800395978 TIMEOUT = 5 ERROR_URL = "http://bluegg.co.uk/404" SELLING_KEYWORDS = ["vend", "à vendre", "en vente"] TICKETSWAP_KEYWORDS = ["Jeremy Underground le 30 avril 2016", "Jeremy Underground on 30 avril"] TICKETSWAP_NEGATIVE_KEYWORDS = ["I'm looking", "Je recherche"] def get_feed_url(event_id):
def __init__(self): super(PortalWindow, self).__init__() # Important self._request_size() self.store = STORE self.day_iter = self.store.today self.pages_loaded = 0 self.current_zoom = 1 self.in_erase_mode = False self.view = ViewContainer(self.store) self.toolbar = Toolbar() default_views = (MultiViewContainer(), ThumbViewContainer(), TimelineViewContainer()) default_views[0].connect("view-ready", self._on_view_ready) map(self.view._register_default_view, default_views) map(self.toolbar.add_new_view_button, self.view.tool_buttons[::-1]) self.preferences_dialog = PreferencesDialog(parent=self) ContextMenu.set_parent_window(self) ContextMenuMolteplicity.set_parent_window(self) self.histogram = HistogramWidget() self.histogram.set_store(self.store) self.backward_button, ev_backward_button = DayButton.new(0) self.forward_button, ev_forward_button = DayButton.new(1, sensitive=False) self.nicebar = NiceBar() self.nicebar_timeout = None # use a table for the spinner (otherwise the spinner is massive!) spinner_table = gtk.Table(3, 3, False) label = gtk.Label() label.set_markup(_("<span size=\"larger\"><b>Loading Journal...</b></span>")) vbox = gtk.VBox(False, 5) pix = gtk.gdk.pixbuf_new_from_file(get_data_path("zeitgeist-logo.svg")) pix = pix.scale_simple(100, 100, gtk.gdk.INTERP_BILINEAR) zlogo = gtk.image_new_from_pixbuf(pix) vbox.pack_start(zlogo, False, False) vbox.pack_start(label, True) spinner_table.attach(vbox, 1, 2, 1, 2, gtk.EXPAND, gtk.EXPAND) self.hscale = gtk.HScale(gtk.Adjustment(1.0, 0.0, 3.0, 1.0, 1.0, 0.0)) self.hscale.set_size_request(120, -1) self.hscale.set_draw_value(False) al = gtk.Alignment(yalign=0.5) al.set_padding(0, 0, 8, 8) al.add(self.hscale) im_in = gtk.image_new_from_stock(gtk.STOCK_ZOOM_IN, gtk.ICON_SIZE_MENU) im_out = gtk.image_new_from_stock(gtk.STOCK_ZOOM_OUT, gtk.ICON_SIZE_MENU) self.throbber_popup_button = ThrobberPopupButton() # Widget placement vbox = gtk.VBox(); hbox = gtk.HBox();self.scale_box = gtk.HBox();self.histogramhbox = gtk.HBox() vbox_general = gtk.VBox();scale_toolbar_box = gtk.HBox() hbox.pack_start(ev_backward_button, False, False); hbox.pack_start(self.view, True, True, 6) hbox.pack_end(ev_forward_button, False, False); self.scale_box.pack_start(im_out,False,False);self.scale_box.pack_start(al, False, False) self.scale_box.pack_start(im_in,False,False);self.scale_box.pack_end(gtk.SeparatorToolItem(),False,False) scale_toolbar_box.pack_start(self.toolbar); scale_toolbar_box.pack_end(self.throbber_popup_button,False,False); scale_toolbar_box.pack_end(self.scale_box, False, False); vbox.pack_start(scale_toolbar_box, False, False); vbox.pack_start(self.nicebar,False,False);vbox.pack_start(hbox, True, True, 5); self.histogramhbox.pack_end(self.histogram, True, True, 32); self.histogramhbox.set_sensitive(False) self.spinner_notebook = gtk.Notebook() self.spinner_notebook.set_show_tabs(False) self.spinner_notebook.set_show_border(False) self.spinner_notebook.append_page(spinner_table) self.spinner_notebook.append_page(vbox) vbox_general.pack_start(self.spinner_notebook) vbox_general.pack_end(self.histogramhbox, False, False) self.add(vbox_general) vbox_general.show_all() self.scale_box.hide() self.show() self.nicebar.hide() SearchBox.hide() #Tray Icon self.tray_manager = TrayIconManager(self) # Settings self.view.set_day(self.store.today) self.view.set_zoom_slider(self.hscale) # Signal connections self.view.connect("new-view-added", lambda w, v: self.toolbar.add_new_view_button(v.button, len(self.view.tool_buttons))) self.connect("destroy", self.quit) self.connect("delete-event", self.on_delete) self.toolbar.connect("previous", self.previous) self.toolbar.connect("jump-to-today", lambda w: self.set_date(datetime.date.today())) self.toolbar.connect("next", self.next) self.hscale.connect("value-changed", self._on_zoom_changed) self.backward_button.connect("clicked", self.previous) self.forward_button.connect("clicked", self.next) self.forward_button.connect("jump-to-today", lambda w: self.set_date(datetime.date.today())) self.histogram.connect("date-changed", lambda w, date: self.set_date(date)) self.view.connect("view-button-clicked", self.on_view_button_click) self.store.connect("update", self.histogram.histogram.set_store) SearchBox.connect("search", self._on_search) self.throbber_popup_button.connect("toggle-erase-mode", self._on_toggle_erase_mode) SearchBox.connect("clear", self._on_search_clear) # Window configuration self.set_icon_name("ucl-study-journal") self.set_icon_list( *[gtk.gdk.pixbuf_new_from_file(get_icon_path(f)) for f in ( "hicolor/16x16/apps/ucl-study-journal.png", "hicolor/24x24/apps/ucl-study-journal.png", "hicolor/32x32/apps/ucl-study-journal.png", "hicolor/48x48/apps/ucl-study-journal.png", "hicolor/256x256/apps/ucl-study-journal.png")]) gobject.idle_add(self.setup) gobject.idle_add(self.load_plugins)
def get_channel_setting(name, channel): """ Retorna el valor de configuracion del parametro solicitado. Devuelve el valor del parametro 'name' en la configuracion propia del canal 'channel'. Si se especifica el nombre del canal busca en la ruta \addon_data\plugin.video.pelisalacarta\settings_channels el archivo channel_data.json y lee el valor del parametro 'name'. Si el archivo channel_data.json no existe busca en la carpeta channels el archivo channel.xml y crea un archivo channel_data.json antes de retornar el valor solicitado. Parametros: name -- nombre del parametro channel [ -- nombre del canal Retorna: value -- El valor del parametro 'name' """ #Creamos la carpeta si no existe if not os.path.exists( os.path.join(config.get_data_path(), "settings_channels")): os.mkdir(os.path.join(config.get_data_path(), "settings_channels")) file_settings = os.path.join(config.get_data_path(), "settings_channels", channel + "_data.json") dict_settings = {} if os.path.exists(file_settings): # Obtenemos configuracion guardada de ../settings/channel_data.json try: dict_file = jsontools.load_json(open(file_settings, "r").read()) if isinstance(dict_file, dict) and dict_file.has_key('settings'): dict_settings = dict_file['settings'] except EnvironmentError: logger.info("ERROR al leer el archivo: {0}".format(file_settings)) if len(dict_settings) == 0 or not dict_settings.has_key(name): # Obtenemos controles del archivo ../channels/channel.xml from core import channeltools try: list_controls, default_settings = channeltools.get_channel_controls_settings( channel) except: default_settings = {} if default_settings.has_key( name ): # Si el parametro existe en el channel.xml creamos el channel_data.json default_settings.update(dict_settings) dict_settings = default_settings dict_file = {} dict_file['settings'] = dict_settings # Creamos el archivo ../settings/channel_data.json json_data = jsontools.dump_json(dict_file).encode("utf-8") try: open(file_settings, "w").write(json_data) except EnvironmentError: logger.info( "[config.py] ERROR al salvar el archivo: {0}".format( file_settings)) # Devolvemos el valor del parametro local 'name' si existe if dict_settings.has_key(name): return dict_settings[name] else: return None
import sys import os from sklearn.decomposition import TruncatedSVD from sklearn.svm import SVC import numpy as np import joblib from config import tags, get_data_path, get_model_path def train(train_data, reducer_model_path, classifier_model_path): train_data = np.load(train_data) train_xs, train_ys = train_data["xs"], train_data["ys"] reducer = TruncatedSVD(n_components=300) train_xs = reducer.fit_transform(train_xs) classifier = SVC(C=10.0, gamma="scale", probability=True) classifier.fit(train_xs, train_ys) joblib.dump(reducer, reducer_model_path) joblib.dump(classifier, classifier_model_path) if __name__ == '__main__': if len(sys.argv) != 2 or sys.argv[1] not in tags: raise ValueError("""usage: 情绪效价,情绪用户""") tag_type = sys.argv[1] origin_file_path, train_npz_path, test_npz_path = get_data_path(tag_type) _, reducer_model_path, classifier_model_path = get_model_path(tag_type) train(train_npz_path, reducer_model_path, classifier_model_path)
def play_torrent(item, xlistitem, mediaurl): logger.info() # Opciones disponibles para Reproducir torrents torrent_options = list() torrent_options.append(["Cliente interno (necesario libtorrent)"]) torrent_options.append(["Cliente interno MCT (necesario libtorrent)"]) torrent_options.extend(torrent_client_installed(show_tuple=True)) torrent_client = config.get_setting("torrent_client", server="torrent") if torrent_client and torrent_client - 1 <= len(torrent_options): if torrent_client == 0: seleccion = dialog_select( "Abrir torrent con...", [opcion[0] for opcion in torrent_options]) else: seleccion = torrent_client - 1 else: if len(torrent_options) > 1: seleccion = dialog_select( "Abrir torrent con...", [opcion[0] for opcion in torrent_options]) else: seleccion = 0 # Plugins externos if seleccion > 1: mediaurl = urllib.quote_plus(item.url) xbmc.executebuiltin("PlayMedia(" + torrent_options[seleccion][1] % mediaurl + ")") if seleccion == 1: from platformcode import mct mct.play(mediaurl, xlistitem, subtitle=item.subtitle, item=item) # Reproductor propio (libtorrent) if seleccion == 0: import time played = False debug = (config.get_setting("debug") == True) # Importamos el cliente from btserver import Client client_tmp_path = config.get_setting("downloadpath") if not client_tmp_path: client_tmp_path = config.get_data_path() # Iniciamos el cliente: c = Client(url=mediaurl, is_playing_fnc=xbmc.Player().isPlaying, wait_time=None, timeout=10, temp_path=os.path.join(client_tmp_path, "alfa-torrent"), print_status=debug) # Mostramos el progreso progreso = dialog_progress("Alfa - Torrent", "Iniciando...") # Mientras el progreso no sea cancelado ni el cliente cerrado while not c.closed: try: # Obtenemos el estado del torrent s = c.status if debug: # Montamos las tres lineas con la info del torrent txt = '%.2f%% de %.1fMB %s | %.1f kB/s' % \ (s.progress_file, s.file_size, s.str_state, s._download_rate) txt2 = 'S: %d(%d) P: %d(%d) | DHT:%s (%d) | Trakers: %d' % \ (s.num_seeds, s.num_complete, s.num_peers, s.num_incomplete, s.dht_state, s.dht_nodes, s.trackers) txt3 = 'Origen Peers TRK: %d DHT: %d PEX: %d LSD %d ' % \ (s.trk_peers, s.dht_peers, s.pex_peers, s.lsd_peers) else: txt = '%.2f%% de %.1fMB %s | %.1f kB/s' % \ (s.progress_file, s.file_size, s.str_state, s._download_rate) txt2 = 'S: %d(%d) P: %d(%d)' % ( s.num_seeds, s.num_complete, s.num_peers, s.num_incomplete) try: txt3 = 'Deteniendo automaticamente en: %ss' % (int( s.timeout)) except: txt3 = '' progreso.update(s.buffer, txt, txt2, txt3) time.sleep(0.5) if progreso.iscanceled(): progreso.close() if s.buffer == 100: if dialog_yesno("Alfa - Torrent", "¿Deseas iniciar la reproduccion?"): played = False progreso = dialog_progress("Alfa - Torrent", "") progreso.update(s.buffer, txt, txt2, txt3) else: progreso = dialog_progress("Alfa - Torrent", "") break else: if dialog_yesno("Alfa - Torrent", "¿Deseas cancelar el proceso?"): progreso = dialog_progress("Alfa - Torrent", "") break else: progreso = dialog_progress("Alfa - Torrent", "") progreso.update(s.buffer, txt, txt2, txt3) # Si el buffer se ha llenado y la reproduccion no ha sido iniciada, se inicia if s.buffer == 100 and not played: # Cerramos el progreso progreso.close() # Obtenemos el playlist del torrent videourl = c.get_play_list() # Iniciamos el reproductor playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO) playlist.clear() playlist.add(videourl, xlistitem) xbmc_player = xbmc.Player() xbmc_player.play(playlist) # Marcamos como reproducido para que no se vuelva a iniciar played = True # si es un archivo de la videoteca enviar a marcar como visto if item.strm_path: from platformcode import xbmc_videolibrary xbmc_videolibrary.mark_auto_as_watched(item) # Y esperamos a que el reproductor se cierre while xbmc.Player().isPlaying(): time.sleep(1) # Cuando este cerrado, Volvemos a mostrar el dialogo progreso = dialog_progress("Alfa - Torrent", "") progreso.update(s.buffer, txt, txt2, txt3) except: import traceback logger.error(traceback.format_exc()) break progreso.update(100, "Terminando y eliminando datos", " ", " ") # Detenemos el cliente if not c.closed: c.stop() # Y cerramos el progreso progreso.close()
#------------------------------------------------------------ # pelisalacarta - XBMC Plugin # Conector para divxden # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/ #------------------------------------------------------------ import urlparse, urllib2, urllib, re import os.path import sys import xbmc import os import scrapertools import unpackerjs import config COOKIEFILE = os.path.join(config.get_data_path(), "cookies.lwp") def geturl(urlvideo): xbmc.output("[divxlink.py] url=" + urlvideo) # --------------------------------------- # Inicializa la libreria de las cookies # --------------------------------------- ficherocookies = COOKIEFILE try: os.remove(ficherocookies) except: pass # the path and filename to save your cookies in cj = None
def get_resource(url, source=True): baseurl, basename = os.path.split(url) # Strip protocol cache_uri = re.sub("^\w+://", "", baseurl) # Strip www cache_uri = re.sub("^(?:www\.)?", "", cache_uri) # domain.com -> com.domain domain, path = cache_uri.split("/", 1) cache_uri = "/".join([".".join(reversed(domain.split("."))), path]) # com.domain/path -> com.domain.path cache_uri = re.sub("^([^/]+)/", "\\1.", cache_uri) cachedir = os.path.join(cachepath, os.path.join(*cache_uri.split("/"))) if not os.path.isdir(cachedir): _safe_print("Creating cache directory:", cachedir) os.makedirs(cachedir) cachefilename = os.path.join(cachedir, basename) body = "" if not force and os.path.isfile(cachefilename): _safe_print("Using cached file:", cachefilename) with open(cachefilename, "rb") as cachefile: body = cachefile.read() if not body.strip(): for url in (url, url.replace("https://", "http://")): _safe_print("Requesting:", url) try: response = urllib.request.urlopen(url) except (socket.error, urllib.error.URLError, http.client.HTTPException) as exception: _safe_print(exception) else: body = response.read() response.close() break if not body.strip(): # Fallback to local copy url = get_data_path("x3d-viewer/" + basename) if not url: _safe_print("Error: Resource not found:", basename) return with open(url, "rb") as resource_file: body = resource_file.read() if body.strip(): if cache and (force or not os.path.isfile(cachefilename)): with open(cachefilename, "wb") as cachefile: cachefile.write(body) if source and not basename.endswith(".swf"): if basename.endswith(".css"): return "<style>%s</style>" % body elif basename.endswith(".js"): return "<script>%s" % body else: return body else: return "file:///" + safe_unicode(cachefilename).encode( "UTF-8").lstrip("/").replace(os.path.sep, "/") else: _safe_print("Error: Empty document:", url) if os.path.isfile(cachefilename): _safe_print("Removing", cachefilename) os.remove(cachefilename)
import re import os import sys import scrapertools import config import logger import json from core.item import Item import guitools if config.get_setting("branch"): branch = config.get_setting("branch") else: branch = "master" channelspath = os.path.join(config.get_data_path(), "Channels.json") serverspath = os.path.join(config.get_data_path(), "Servers.json") lastupdatepath = os.path.join(config.get_data_path(), "update.txt") giturl = "https://api.github.com/repos/divadres/pelisalacarta/contents" downloadurl = "https://raw.githubusercontent.com/divadres/pelisalacarta/"+branch headers = [["User-Agent", "pelisalacarta"]] def checkforupdates(): import time logger.info("checkforupdates") #Actualizaciones del plugin if config.get_setting("updatecheck2") == "true": logger.info("Comprobando actualizaciones de pelisalcarta") if os.path.isfile(lastupdatepath):