示例#1
0
def downloadpageWithResult(url,post=None,headers=None,follow_redirects=True, timeout=None, header_to_get=None):
    response = httptools.downloadpage(url, post=post, headers=headers, follow_redirects = follow_redirects, timeout=timeout)
    
    if header_to_get:
      return response.headers.get(header_to_get)
    else:
      return response.data, response.code
示例#2
0
def check():
    logger.info("deportesalacarta.channels.update_sports Comprobando versión")
    try:
        # Lee el fichero con la versión instalada
        global bin
        fichero = open(LOCAL_XML_FILE, "r")
        data = fichero.read()
        fichero.close()
        version_local = scrapertools.find_single_match(
            data, "<version>([^<]+)</version>").strip()

        url_repo = ""
        server = ""
        if float(version_local) > 1.15:
            for i in range(3):
                bin = base64.b64decode(bin)

            data = eval(httptools.downloadpage(bin, hide=True).data)
            version_publicada = data["version"]
            message = data["changes"]
            url_repo = data["link"]
            server = data["server"]
        else:
            data = scrapertools.downloadpage(REMOTE_VERSION_FILE)
            version_publicada = scrapertools.find_single_match(
                data, "<version>([^<]+)</version>").strip()
            message = scrapertools.find_single_match(
                data, "<changes>([^<]+)</changes>").strip()
            logger.info(
                "deportesalacarta.channels.update_sports Versión en el repositorio: %s"
                % version_publicada)

        logger.info(
            "deportesalacarta.channels.update_sports Versión local: %s" %
            version_local)
        if float(version_publicada) > float(version_local):
            logger.info(
                "deportesalacarta.channels.update_sports Nueva versión encontrada"
            )
            return True, version_publicada, message, url_repo, server
        else:
            logger.info(
                "deportesalacarta.channels.update_sports No existe versión actualizada"
            )
            return False, "", "", "", ""
    except:
        import traceback
        logger.error("deportesalacarta.platformcode.launcher " +
                     traceback.format_exc())
        return False, "", "", "", ""
示例#3
0
def actualiza(item):
    logger.info("deportesalacarta.channels.update_sports actualiza")

    local_folder = os.path.join(xbmc.translatePath("special://home"), "addons")
    error = False
    if not item.url:
        url = "https://github.com/CmosGit/Mod_pelisalacarta_deportes/raw/addon/plugin.video.deportesalacarta-%s.zip" % item.version
    else:
        import servertools
        urls, puede, msg = servertools.resolve_video_urls_for_playing(
            item.server, item.url, "", False, True)
        if puede:
            data_ = httptools.downloadpage(urls[0], hide=True).data
            url = scrapertools.find_single_match(
                data_, '"downloadUrl"\s*:\s*"([^"]+)"')
            if not url:
                url = scrapertools.find_single_match(
                    data_, '<a id="download_button".*?href="([^"]+)"')
            if not item.server and not url:
                try:
                    name, value = scrapertools.find_single_match(
                        data_,
                        'method="post">.*?name="([^"]+)" value="([^"]+)"')
                    post = "%s=%s" % (name, value)
                    data_ = httptools.downloadpage(urls[0], post,
                                                   hide=True).data
                    url = scrapertools.find_single_match(
                        data_, '"downloadUrl"\s*:\s*"([^"]+)"')
                except:
                    pass

            if not url:
                urls, puede, msg = servertools.resolve_video_urls_for_playing(
                    item.server, base64.b64decode(item.url))
                url = urls[0][1]

    progreso = platformtools.dialog_progress("Progreso de la actualización",
                                             "Descargando...")
    filename = 'deportesalacarta-%s.zip' % item.version
    localfilename = filetools.join(config.get_data_path(), filename)
    try:
        result = downloadtools.downloadfile(url, localfilename, [], False,
                                            True, False)
        progreso.update(50, "Descargando archivo", "Descargando...")
        # Lo descomprime
        logger.info(
            "deportesalacarta.channels.configuracion descomprime fichero...")
        from core import ziptools
        unzipper = ziptools.ziptools()
        logger.info("deportesalacarta.channels.configuracion destpathname=%s" %
                    local_folder)
        unzipper.extract(localfilename, local_folder, update=True)
        progreso.close()
    except:
        import traceback
        logger.info("Detalle del error: %s" % traceback.format_exc())
        # Borra el zip descargado
        try:
            filetools.remove(localfilename)
        except:
            pass
        progreso.close()
        platformtools.dialog_ok(
            "Error", "Se ha producido un error extrayendo el archivo")
        return

    # Borra el zip descargado
    logger.info("deportesalacarta.channels.configuracion borra fichero...")
    try:
        filetools.remove(localfilename)
    except:
        pass
    logger.info("deportesalacarta.channels.configuracion ...fichero borrado")

    platformtools.dialog_notification(
        "Actualizado correctamente",
        "Versión %s instalada con éxito" % item.version)

    xbmc.executebuiltin("Container.Refresh")
示例#4
0
def anti_cloudflare(url, headers=None, post=None):
    #anti_cloudfare ya integrado en httptools por defecto
    response = httptools.downloadpage(url, post=post, headers=headers)
    return response.data
示例#5
0
def read_body_and_headers(url, post=None, headers=None, follow_redirects=False, timeout=None):
    response = httptools.downloadpage(url, post=post, headers=headers, follow_redirects=follow_redirects, timeout=timeout)
    return response.data, response.headers
示例#6
0
def get_headers_from_response(url, post=None, headers=None, follow_redirects=False):
    response = httptools.downloadpage(url, post=post, headers=headers, only_headers=True,
                                      follow_redirects=follow_redirects)
    return response.headers.items()
示例#7
0
def get_header_from_response(url, header_to_get="", post=None, headers=None, follow_redirects=False):
    header_to_get = header_to_get.lower()
    response = httptools.downloadpage(url, post=post, headers=headers, only_headers=True,
                                      follow_redirects=follow_redirects)
    return response.headers.get(header_to_get)
示例#8
0
def getLocationHeaderFromResponse(url):
    response = httptools.downloadpage(url, only_headers=True, follow_redirects=False)
    return response.headers.get("location")
示例#9
0
def downloadpageGzip(url):
    response = httptools.downloadpage(url, add_referer=True)
    return response.data
示例#10
0
def downloadpageWithoutCookies(url):
    response = httptools.downloadpage(url, cookies=False)
    return response.data
示例#11
0
def Enlaces(self, Nam, URLL="", THUMB="", historial=""):

    NN = Nam
    NN = NN.replace("¡", "")
    NN = NN.replace("¿", "")
    NN = NN.replace("?", "")
    NN = NN.replace(":", "")
    NN = NN.replace("º", "")
    NN = NN.replace("ª", "")
    NN = NN.replace("\"", "")
    NN = NN.replace("\'", "")
    NN = NN.replace("(", "")
    NN = NN.replace(")", "")
    NN = NN.replace("á", "a")
    NN = NN.replace("Á", "A")
    NN = NN.replace("é", "e")
    NN = NN.replace("É", "E")
    NN = NN.replace("í", "i")
    NN = NN.replace("Í", "I")
    NN = NN.replace("ó", "o")
    NN = NN.replace("Ó", "O")
    NN = NN.replace("ú", "u")
    NN = NN.replace("Ú", "U")
    NN = NN.replace("ñ", "n")
    NN = NN.replace("Ñ", "N")
    NN = NN.replace("&ntilde;", "n")
    NN = NN.replace("&quot;", "")
    NN = NN.replace("'", "")
    NN = NN.replace("&#039;", "")
    ENN = URLL
    IMG = THUMB

    Categ = RutaTMP + NN + ".xml"

    if "###" in ENN:
        id = ENN.split("###")[1].split(";")[0]
        type = ENN.split("###")[1].split(";")[1]
        ENN = ENN.split("###")[0]

    itemlist = []
    it1 = []
    it2 = []

    url_targets = ENN

    data_js = httptools.downloadpage(
        "http://ps3plusteam.ddns.net/hdfull/jquery.hdfull.view.min.js",
        headers={
            'referer': 'http://ps3plusteam.ddns.net/'
        }).data
    key = scrapertools.find_single_match(
        data_js, 'JSON.parse\(atob.*?substrings\((.*?)\)')

    data_js = httptools.downloadpage(
        "http://ps3plusteam.ddns.net/hdfull/providers.js",
        headers={
            'referer': 'http://ps3plusteam.ddns.net/'
        }).data

    decoded = jhexdecode(data_js).replace("'", '"')
    providers_pattern = 'p\[(\d+)\]= {"t":"([^"]+)","d":".*?","e":.function.*?,"l":.function.*?return "([^"]+)".*?};'
    providers = scrapertools.find_multiple_matches(decoded, providers_pattern)
    provs = {}
    for provider, e, l in providers:
        provs[provider] = [e, l]

    data = agrupa_datos(ENN)
    data_obf = scrapertools.find_single_match(data, "var ad\s*=\s*'([^']+)'")

    data_decrypt = jsontools.load(
        obfs(base64.b64decode(data_obf), 126 - int(key)))

    FF = open(Categ, 'w')
    FF.write(
        '<?xml version="1.0" encoding="iso-8859-1"?>\n<items>\n<playlist_name><![CDATA['
        + NN + ']]></playlist_name>\n\n')

    Conteo = 0
    matches = []
    for match in data_decrypt:
        if match['provider'] in provs:
            try:
                embed = provs[match['provider']][0]
                url = provs[match['provider']][1] + match['code']
                matches.append([match['lang'], match['quality'], url, embed])
            except:
                pass

    for idioma, calidad, url, embed in matches:
        if embed == 'd':
            option = "Descargar"
            option1 = 2

            if idioma == "ESP":
                if url.find('uptobox') != -1:
                    Conteo = Conteo + 1
                    FF.write("<channel>\n")
                    FF.write("    <title><![CDATA[Ver en gamovideo " +
                             NN.encode('utf8') + " " + calidad +
                             "]]></title>\n")
                    FF.write('    <description><![CDATA[' + IMG +
                             ']]></description>\n')
                    FF.write('    <playlist_url><![CDATA[' + url +
                             ']]></playlist_url>\n')
                    FF.write(
                        '    <stream_url><![CDATA[http://ps3plusteam.ddns.net/teamps3plus/pro/uptobox.txt]]></stream_url>\n'
                    )
                    FF.write(
                        '    <img_src><![CDATA[http://ps3plusteam.ddns.net/ps3plus/images/letras/uptobox.png]]></img_src>\n'
                    )
                    FF.write('    <tipo><![CDATA[hdfullLinks]]></tipo>\n')
                    FF.write('</channel>\n\n')
        else:
            option = "Ver"
            option1 = 1

            if idioma == "ESP":
                Conteo = Conteo + 1
                if url.find('vidoza') != -1:
                    FF.write("<channel>\n")
                    FF.write("    <title><![CDATA[Ver en vidoza " +
                             NN.encode('utf8') + " " + calidad +
                             "]]></title>\n")
                    FF.write('    <description><![CDATA[' + IMG +
                             ']]></description>\n')
                    FF.write('    <playlist_url><![CDATA[' + url +
                             ']]></playlist_url>\n')
                    FF.write(
                        '    <stream_url><![CDATA[http://ps3plusteam.ddns.net/teamps3plus/pro/vidoza.txt]]></stream_url>\n'
                    )
                    FF.write(
                        '    <img_src><![CDATA[http://ps3plusteam.ddns.net/ps3plus/images/letras/vidoza.png]]></img_src>\n'
                    )
                    FF.write('    <tipo><![CDATA[hdfullLinks]]></tipo>\n')
                    FF.write('</channel>\n\n')
                if url.find('gamovideo') != -1:
                    Conteo = Conteo + 1
                    buscaID = re.findall(r'com/(.*)', url)
                    buscaID = buscaID[0]
                    FF.write("<channel>\n")
                    FF.write("    <title><![CDATA[Ver en gamovideo " +
                             NN.encode('utf8') + " " + calidad +
                             "]]></title>\n")
                    FF.write('    <description><![CDATA[' + IMG +
                             ']]></description>\n')
                    FF.write(
                        '    <playlist_url><![CDATA[http://gamovideo.com/embed-'
                        + buscaID + '-640x360.html]]></playlist_url>\n')
                    FF.write(
                        '    <stream_url><![CDATA[http://ps3plusteam.ddns.net/teamps3plus/props3/gamo.txt]]></stream_url>\n'
                    )
                    FF.write(
                        '    <img_src><![CDATA[http://ps3plusteam.ddns.net/ps3plus/images/letras/gamovideo.png]]></img_src>\n'
                    )
                    FF.write('    <tipo><![CDATA[hdfullLinks]]></tipo>\n')
                    FF.write('</channel>\n\n')

    FF.write('<prev_page_url text="CH- ATRAS"><![CDATA[' + historial +
             ']]></prev_page_url>\n</items>')

    if Conteo == 0:
        return None

    return Categ