Ejemplo n.º 1
0
def streamlink(request=request):
    '''Parse query string, set options and get stream.'''
    try:
        # Get arguments passed with query string
        args = request.args
        # Available options
        if 'help' in args:
            return Response(Streamlink.set_option.__doc__,
                            content_type='text/plain')
        # url should be either first argument or set explicitly with 'url' key.
        if 'url' not in args:
            url = next(iter(args))
        else:
            url = args['url']

        # Split url to url itself (url[0]) and stream (url[1]) if present.
        url = url.split()
        session = Streamlink()
        plugin = session.resolve_url(url[0])
        # Use remain arguments to set other options.
        for key in args:
            if re.match('[0-9]+$', args[key]):
                value = int(args[key])
            else:
                value = args[key]
            # Set session options described by help
            session.set_option(key, value)
            # Set plugin options if require (usually username and password)
            plugin.set_option(key, value)
        # Catch stream with given url
        streams = session.streams(url[0])
        # pick the stream
        if len(url) > 1:
            stream = streams[url[1]]
        else:
            # If specific stream is not provided in args, output list of available streams.
            return Response('Available streams: ' + str(list(streams.keys())) +
                            '\n',
                            content_type='text/plain')

        # Stream generator
        url_root = request.url_root

        def generate(fd):
            chunk = True
            # Iterate over stream
            with fd:
                last = time()
                while chunk:
                    now = time()
                    # yank periodically server to keepalive
                    if now - last > pause:
                        urlopen(url_root)
                        last = now
                    chunk = fd.read(buff_size)
                    # Read chunk of stream
                    yield chunk

        if 'link' in args:
            # Redirect client to stream url
            redirect_url = stream.url
            response = Response('', content_type='')
            response.headers['Location'] = redirect_url
            response.status_code = 302
            return response
        else:
            # Streaming to client
            # Open file like object of stream
            fd = stream.open()
            return Response(generate(fd), content_type='video/mpeg')
    except Exception or OSError as exception:
        error = 'Exception {0}: {1}\n'.format(
            type(exception).__name__, exception)
        return Response(error, content_type='text/plain')
Ejemplo n.º 2
0
 def get_session(self, options=None, *args, **kwargs):
     return Streamlink(options)
Ejemplo n.º 3
0
 def test_set_and_get_locale(self):
     session = Streamlink()
     session.set_option("locale", "en_US")
     self.assertEqual(session.localization.country.alpha2, "US")
     self.assertEqual(session.localization.language.alpha2, "en")
     self.assertEqual(session.localization.language_code, "en_US")
Ejemplo n.º 4
0
Archivo: main.py Proyecto: Genzo45/NAP
def setup_streamlink():
    """Creates the Streamlink session."""
    global streamlink

    streamlink = Streamlink()
Ejemplo n.º 5
0
def parser_helper():
    session = Streamlink()
    parser = build_parser()
    setup_plugin_args(session, parser)
    return parser
 def setUp(self):
     self.session = Streamlink()
     self.plugin = Camsoda("https://www.camsoda.com/stream-name")
Ejemplo n.º 7
0
    def subject(self, load_plugins=True):
        session = Streamlink()
        if load_plugins:
            session.load_plugins(self.plugin_path)

        return session
Ejemplo n.º 8
0
import os
import time

import cv2
import numpy
from streamlink import Streamlink

streamer = Streamlink()


def grabframes(url: str, limit=1e3) -> numpy.ndarray:
    plugin = streamer.resolve_url(url)
    stream = plugin.streams().get("best")
    if stream is None:
        print("Failed to get stream. Try again.")
        return
    fd = stream.open()
    data = bytearray()
    while len(data) < 1e6:
        data += fd.read(1024)
    fname = "stream.bin"
    with open(fname, "wb") as outfile:
        outfile.write(data)

    capture = cv2.VideoCapture(fname)
    imgs = []
    while capture.isOpened():
        img = capture.read()[1]
        if img is None:
            break
        imgs.append(img)
Ejemplo n.º 9
0
    def test_https_proxy_set_only(self):
        session = Streamlink()
        session.set_option("https-proxy", "https://testhttpsproxy.com")

        self.assertFalse("http" in session.http.proxies)
        self.assertEqual("https://testhttpsproxy.com", session.http.proxies['https'])
def session():
    return Streamlink()
Ejemplo n.º 11
0
def setup_streamlink():
    streamlink_cli.main.streamlink = Streamlink()
    streamlink_cli.main.streamlink.load_plugins(PluginPath)
    return streamlink_cli.main.streamlink
Ejemplo n.º 12
0
def filmonhls():
    with freezegun.freeze_time(datetime.datetime(2000, 1, 1, 0, 0, 0, 0)), \
         patch("streamlink.plugins.filmon.FilmOnHLS.url", new_callable=PropertyMock) as url:
        url.return_value = "http://filmon.tv/test.m3u8"
        session = Streamlink()
        yield FilmOnHLS(session, channel="test")
Ejemplo n.º 13
0
def start_watching(channel,
                   client_id,
                   proxy=None,
                   thread_id=None,
                   on_exit=None):
    """Simulates watching the Stream."""

    if thread_id is None:
        thread_id = proxy or "local"

    session = init_session(proxy)

    run = False
    try:
        # set cookies for watching stream
        session.head(f"https://twitch.tv/{channel}",
                     allow_redirects=True,
                     timeout=TIMEOUT)
        token = get_token(session, channel, client_id)

        if "error" in token:
            print("error in token:", token["message"])
            # exit thread
            sys.exit(1)

        # got the stream url from twitch
        url = get_stream_url(session, channel, token)
        run = True
    except OSError as e:
        if "403 Forbidden" not in str(e):
            from traceback import print_exc

            # some generic error occured
            print_exc()
            sys.exit(1)

        # proxy is rejecting twitch requests
        # using streamlink library to obtain URL
        print(
            f"error getting stream url in thread {thread_id}. try streamlink")

        stream_session = Streamlink()
        custom_headers = {"Client-ID": client_id}
        stream_session.set_option("http-headers", custom_headers)
        streams = stream_session.streams(f"http://twitch.tv/{channel}")
        url = streams["worst"].url

        run = True

    all_batch_urls = dict()
    sorted_time = set()
    last_playlist = time.time() - 100
    last_segmet = None
    while run:
        try:
            # it should update the viewer count only by using prefetch urls
            # the browser is also only requesting these
            pref_url = request_prefetch_url(session, channel, url)
            if pref_url is None:
                raise Exception("could not get prefetch url")

            request_segment(session, channel, pref_url)

            # TODO reimplement this auto watcher once the bot is working

            # if (last_playlist < time.time() - 15):
            # # update batch urls
            # video_urls = request_playlist(session, channel, url)
            # all_batch_urls.update(video_urls)
            # sorted_time = sorted(set(all_batch_urls.keys()))

            # if last_segmet is not None:
            # # remove all batches already seen
            # items_to_remove = set()
            # for batch_time in sorted_time:
            # if batch_time > last_segmet:
            # continue

            # # mark item to remove
            # items_to_remove.add(batch_time)

            # # actually remove item
            # for item in items_to_remove:
            # sorted_time.remove(item)
            # del all_batch_urls[item]

            # last_playlist = time.time()

            # if len(sorted_time) > 0:
            # # remove the current batch from our lists
            # earliest_batch_time = sorted_time[0]
            # batch_url = all_batch_urls[earliest_batch_time]
            # sorted_time.remove(earliest_batch_time)
            # del all_batch_urls[earliest_batch_time]

            # # set last watch time
            # last_segmet = earliest_batch_time

            # # watch the batch
            # request_segment(session, channel, batch_url)

            time.sleep(2)
        except requests.exceptions.Timeout as e:
            print(f"timeout in thread: {thread_id}")
            break
        except Exception as e:
            print(f"error in thread: {thread_id}")
            from traceback import print_exc

            print_exc()
            break

    if on_exit is not None:
        on_exit(thread_id)

    sys.exit(0)
Ejemplo n.º 14
0
from streamlink import Streamlink

session = Streamlink()
streams = session.streams("https://www.twitch.tv/valkia")

fd = streams['audio_only'].open()
f = open("audio.ts", "ab")

for i in range(1, 10):
    data1 = fd.read(float('inf'))
    f.write(data1)
    print("Written to file")

f.close()
fd.close()
Ejemplo n.º 15
0
def session():
    from streamlink import Streamlink
    return Streamlink()
Ejemplo n.º 16
0
def _init_record(stream_url, callback_url):
    filename = str(uuid.uuid4())
    requests.post(
        callback_url,
        json={
            "event": "record_initialized",
            "id": filename,
            "timestamp": datetime().strftime("%Y-%m-%d %H:%m:%s"),
        },
    )
    try:
        session = Streamlink()
        streams = session.streams(stream_url)
        source_stream = streams["source"]
        stream_fd = source_stream.open()
        os.makedirs("records", exist_ok=True)

        total_byte_accepted = 0
        SIZE_OF_THUNK = 1024
        # report to callback when 1M data is accepted
        bytes_for_next_callback = 1024 * 1024
        with open("./records/{}".format(filename), "wb") as record_fd:
            while True:
                data = stream_fd.read(SIZE_OF_THUNK)
                record_fd.write(data)
                total_byte_accepted += SIZE_OF_THUNK
                bytes_for_next_callback -= SIZE_OF_THUNK

                if bytes_for_next_callback <= 0:
                    request.post(
                        callback_url,
                        json={
                            "event": "downloading",
                            "id": filename,
                            "timestamp": datetime().strftime("%Y-%m-%d %H:%m:%s"),
                            "total_bytes": total_byte_accepted,
                        },
                    )
                    bytes_for_next_callback = 1024 * 1024

                if data == "":
                    break

        requests.post(
            callback_url,
            json={
                "event": "downloaded",
                "id": filename,
                "timestamp": datetime().strftime("%Y-%m-%d %H:%m:%s"),
                "total_bytes": total_byte_accepted,
            },
        )

        upload_file("./records/{}".format(filename), filename, "blackbox-statics")

        requests.post(
            callback_url,
            json={
                "event": "uploaded",
                "id": filename,
                "timestamp": datetime().strftime("%Y-%m-%d %H:%m:%s"),
                "total_bytes": total_byte_accepted,
            },
        )
    except Error as err:
        requests.post(
            callback_url,
            json={
                "event": "error",
                "id": filename,
                "timestamp": datetime().strftime("%Y-%m-%d %H:%m:%s"),
                "error": str(err),
            },
        )
Ejemplo n.º 17
0
def get_session():
    s = Streamlink()
    s.load_plugins(PluginPath)
    return s
Ejemplo n.º 18
0
def main_play(HTTPBase, redirect=False):
    # parse url query data
    old_data = parse_qsl(urlparse(HTTPBase.path).query)
    arglist = []
    for k, v in old_data:
        arglist += ['--{0}'.format(unquote(k)), unquote(v)]

    parser = build_parser()
    args = setup_args(parser, arglist, ignore_unknown=True)

    # create a new session for every request
    session = Streamlink()

    log.info('User-Agent: {0}'.format(HTTPBase.headers.get(
        'User-Agent', '???')))
    log.info('Client: {0}'.format(HTTPBase.client_address))
    log.info('Address: {0}'.format(HTTPBase.address_string()))

    setup_plugins(session, args)
    setup_plugin_args(session, parser)
    # call setup args again once the plugin specific args have been added
    args = setup_args(parser, arglist, ignore_unknown=True)
    args = setup_config_args(session, args, parser, arglist)
    logger.root.setLevel(args.loglevel)
    setup_http_session(session, args)

    if args.url:
        setup_options(session, args)

        try:
            plugin = session.resolve_url(args.url)
            setup_plugin_options(session, args, plugin)
            log.info('Found matching plugin {0} for URL {1}', plugin.module,
                     args.url)

            plugin_args = []
            for parg in plugin.arguments:
                value = plugin.get_option(parg.dest)
                if value:
                    plugin_args.append((parg, value))

            if plugin_args:
                log.debug('Plugin specific arguments:')
                for parg, value in plugin_args:
                    log.debug(' {0}={1} ({2})'.format(
                        parg.argument_name(plugin.module),
                        value if not parg.sensitive else ('*' * 8), parg.dest))

            if redirect is True:
                streams = session.streams(args.url,
                                          stream_types=['hls', 'http'])
            else:
                streams = session.streams(
                    args.url,
                    stream_types=args.stream_types,
                    sorting_excludes=args.stream_sorting_excludes)
        except NoPluginError:
            log.error('No plugin can handle URL: {0}', args.url)
            HTTPBase._headers(404, 'text/html', connection='close')
            return
        except PluginError as err:
            log.error('PluginError {0}', str(err))
            HTTPBase._headers(404, 'text/html', connection='close')
            return

        if not streams:
            log.error('No playable streams found on this URL: {0}', args.url)
            HTTPBase._headers(404, 'text/html', connection='close')
            return

        if args.default_stream and not args.stream:
            args.stream = args.default_stream

        if not args.stream:
            args.stream = ['best']

        stream_ended = False
        validstreams = format_valid_streams(plugin, streams)
        for stream_name in args.stream:
            if stream_name in streams:
                log.info('Available streams: {0}', validstreams)
                '''Decides what to do with the selected stream.'''

                stream_name = resolve_stream_name(streams, stream_name)
                stream = streams[stream_name]

                # Find any streams with a '_alt' suffix and attempt
                # to use these in case the main stream is not usable.
                alt_streams = list(
                    filter(lambda k: stream_name + '_alt' in k,
                           sorted(streams.keys())))

                for stream_name in [stream_name] + alt_streams:
                    stream = streams[stream_name]
                    stream_type = type(stream).shortname()

                    log.info('Opening stream: {0} ({1})', stream_name,
                             stream_type)

                    if isinstance(stream, (RTMPStream)):
                        log.info('RTMP streams '
                                 'might not work on every platform.')
                    elif isinstance(stream, (MuxedStream, DASHStream)):
                        log.info('FFmpeg streams (dash, muxed) '
                                 'might not work on every platform.')

                    # 301
                    if redirect is True:
                        log.info('301 - URL: {0}'.format(stream.url))
                        HTTPBase.send_response(301)
                        HTTPBase.send_header('Location', stream.url)
                        HTTPBase.end_headers()
                        log.info('301 - done')
                        stream_ended = True
                        break

                    # play
                    try:
                        fd = stream.open()
                    except StreamError as err:
                        log.error('Could not open stream: {0}'.format(err))
                        continue

                    cache = 4096
                    HTTPBase._headers(200, 'video/unknown')
                    try:
                        log.debug('Pre-buffering {0} bytes'.format(cache))
                        while True:
                            buff = fd.read(cache)
                            if not buff:
                                log.error('No Data for buff!')
                                break
                            HTTPBase.wfile.write(buff)
                        HTTPBase.wfile.close()
                    except socket.error as e:
                        if isinstance(e.args, tuple):
                            if e.errno == errno.EPIPE:
                                # remote peer disconnected
                                log.info('Detected remote disconnect')
                            else:
                                log.error(str(e))
                        else:
                            log.error(str(e))

                    fd.close()
                    log.info('Stream ended')
                    fd = None
                    stream_ended = True

                    break

                if not stream_ended:
                    HTTPBase._headers(404, 'text/html', connection='close')
                return

            err = ('The specified stream(s) \'{0}\' could not be '
                   'found'.format(', '.join(args.stream)))

            log.error('{0}.\n       Available streams: {1}', err, validstreams)
            HTTPBase._headers(404, 'text/html', connection='close')
            return

        else:
            HTTPBase._headers(404, 'text/html', connection='close')
            log.error('No URL provided.')
            return
Ejemplo n.º 19
0
 def subject(url):
     session = Streamlink()
     Twitch.bind(session, "tests.plugins.test_twitch")
     plugin = Twitch(url)
     return plugin.get_author(), plugin.get_title(), plugin.get_category()
 def test_check_cmd_none(self, mock_cmd):
     s = StreamProcess(Streamlink())
     mock_cmd.return_value = None
     self.assertRaises(StreamError, s._check_cmd)
Ejemplo n.º 21
0
 def setUp(self):
     self.session = Streamlink()
     self.session.http = MagicMock(HTTPSession)
     self.session.http.headers = {}
 def test_check_cmd_cat(self, which, mock_cmd):
     s = StreamProcess(Streamlink())
     mock_cmd.return_value = "test"
     self.assertEqual("test", s._check_cmd())
Ejemplo n.º 23
0
 def setUp(self):
     self.session = Streamlink()
 def test_check_cmd_nofound(self, which, mock_cmd):
     s = StreamProcess(Streamlink())
     mock_cmd.return_value = "test"
     which.return_value = None
     self.assertRaises(StreamError, s._check_cmd)
Ejemplo n.º 25
0
def setup_streamlink():
    """Creates the Streamlink session."""
    global streamlink

    streamlink = Streamlink({"user-input-requester": ConsoleUserInputRequester(console)})
 def test_check_cmdline(self, which, mock_cmd):
     s = StreamProcess(Streamlink(), params=dict(help=True))
     mock_cmd.return_value = "test"
     which.return_value = "test"
     self.assertEqual("test --help", s.cmdline())
Ejemplo n.º 27
0
 def setUp(self):
     self.session = Streamlink()
     self.session.load_plugins(self.PluginPath)
 def test_check_cmdline_long(self, which, mock_cmd):
     s = StreamProcess(Streamlink(), params=dict(out_file="test file.txt"))
     mock_cmd.return_value = "test"
     which.return_value = "test"
     self.assertEqual("test --out-file \"test file.txt\"", s.cmdline())
Ejemplo n.º 29
0
    def test_https_proxy_default(self):
        session = Streamlink()
        session.set_option("http-proxy", "http://testproxy.com")

        self.assertEqual("http://testproxy.com", session.http.proxies['http'])
        self.assertEqual("http://testproxy.com", session.http.proxies['https'])
Ejemplo n.º 30
0
def startRecording(model):
    global notonline
    global recording

    try:
        model = model.lower()
        resp = requests.get('https://www.cam4.com/' + model,
                            headers={
                                'user-agent': 'UserAgent'
                            }).text.splitlines()
        videoPlayUrl = ""
        videoAppUrl = ""
        for line in resp:
            if "videoPlayUrl" in line:
                for part in line.split("&"):
                    if "videoPlayUrl" in part and videoPlayUrl == "":
                        videoPlayUrl = part[13:]
                    elif "videoAppUrl" in part and videoAppUrl == "":
                        videoAppUrl = part.split("//")[1]

        if videoAppUrl == "" and videoAppUrl == "":
            notonline.append(model)
            return

        if model in notonline:
            notonline.remove(model)

        session = Streamlink()
        session.set_option('http-headers',
                           "referer=https://www.cam4.com/{}".format(model))

        streams = session.streams(
            "hlsvariant://https://{}/amlst:{}_aac/playlist.m3u8?referer=www.cam4.com&timestamp={}"
            .format(videoAppUrl, videoPlayUrl, str(int(time.time() * 1000))))

        stream = streams["best"]
        fd = stream.open()
        ts = time.time()
        st = datetime.datetime.fromtimestamp(ts).strftime("%Y.%m.%d_%H.%M.%S")
        _uuid = uuid.uuid4()

        file = os.path.join(
            setting['save_directory'], model,
            "{st}_{model}_{_uuid}.mp4".format(path=setting['save_directory'],
                                              model=model,
                                              st=st,
                                              _uuid=_uuid))
        os.makedirs(os.path.join(setting['save_directory'], model),
                    exist_ok=True)
        with open(file, 'wb') as f:
            recording.append(model)
            while True:
                try:
                    data = fd.read(1024)
                    f.write(data)
                except:
                    break
        if setting['postProcessingCommand']:
            processingQueue.put({'model': model, 'path': file})
    except Exception as e:
        notonline.append(model)
        if model in recording:
            recording.remove(model)
    finally:
        if model not in notonline:
            notonline.append(model)
        if model in recording:
            recording.remove(model)