Beispiel #1
0
class BandwidthEndpoint(RESTEndpoint):
    """
    This endpoint is responsible for handing requests for bandwidth accounting data.
    """
    def setup_routes(self) -> None:
        self.app.add_routes([web.get('/statistics', self.get_statistics)])
        self.app.add_routes([web.get('/history', self.get_history)])

    @docs(tags=["Bandwidth"],
          summary="Return statistics about the bandwidth community.",
          responses={
              200: {
                  "schema":
                  schema(
                      BandwidthStatisticsResponse={
                          'statistics':
                          schema(
                              BandwidthStatistics={
                                  'id': String,
                                  'num_peers_helped': Integer,
                                  'num_peers_helped_by': Integer,
                                  'total_taken': Integer,
                                  'total_given': Integer
                              })
                      })
              }
          })
    async def get_statistics(self, request) -> RESTResponse:
        if not self.session.bandwidth_community:
            return RESTResponse({"error": "Bandwidth community not found"},
                                status=HTTP_NOT_FOUND)
        return RESTResponse(
            {'statistics': self.session.bandwidth_community.get_statistics()})

    @docs(tags=["Bandwidth"],
          summary="Return a list of the balance history.",
          responses={
              200: {
                  "schema":
                  schema(
                      BandwidthHistoryResponse={
                          "history": [
                              schema(BandwidthHistoryItem={
                                  "timestamp": Integer,
                                  "balance": Integer
                              })
                          ]
                      })
              }
          })
    async def get_history(self, request) -> RESTResponse:
        if not self.session.bandwidth_community:
            return RESTResponse({"error": "Bandwidth community not found"},
                                status=HTTP_NOT_FOUND)
        return RESTResponse({
            'history':
            self.session.bandwidth_community.database.get_history()
        })
Beispiel #2
0
class ShutdownEndpoint(RESTEndpoint):
    """
    With this endpoint you can shutdown Tribler.
    """

    def __init__(self, shutdown_callback):
        super().__init__()
        self.shutdown_callback = shutdown_callback

    def setup_routes(self):
        self.app.add_routes([web.put('', self.shutdown)])

    @docs(
        tags=["General"],
        summary="Shutdown Tribler.",
        responses={
            200: {
                "schema": schema(TriblerShutdownResponse={
                    'shutdown': Boolean
                })
            }
        }
    )
    async def shutdown(self, request):
        self.shutdown_callback()
        return RESTResponse({"shutdown": True})
Beispiel #3
0
class StateEndpoint(RESTEndpoint):
    """
    This endpoint is responsible for handing all requests regarding the state of Tribler.
    """
    def __init__(self, session):
        super().__init__(session)
        self.tribler_state = STATE_STARTING
        self.last_exception = None
        self.sentry_event = None

        self.session.notifier.add_observer(NTFY.UPGRADER_STARTED,
                                           self.on_tribler_upgrade_started)
        self.session.notifier.add_observer(NTFY.UPGRADER_DONE,
                                           self.on_tribler_upgrade_finished)
        self.session.notifier.add_observer(NTFY.TRIBLER_STARTED,
                                           self.on_tribler_started)

    def setup_routes(self):
        self.app.add_routes([web.get('', self.get_state)])

    def on_tribler_upgrade_started(self, *_):
        self.tribler_state = STATE_UPGRADING

    def on_tribler_upgrade_finished(self, *_):
        self.tribler_state = STATE_STARTING

    def on_tribler_started(self, *_):
        self.tribler_state = STATE_STARTED

    def on_tribler_exception(self, exception_text, sentry_event):
        self.tribler_state = STATE_EXCEPTION
        self.last_exception = exception_text
        self.sentry_event = sentry_event

    @docs(
        tags=["General"],
        summary="Return the current state of the Tribler core.",
        responses={
            200: {
                "schema":
                schema(
                    TriblerStateResponse={
                        'state':
                        (String,
                         'One of three stats: STARTING, UPGRADING, STARTED, EXCEPTION'
                         ),
                        'last_exception':
                        String,
                        'readable_state':
                        String
                    })
            }
        })
    async def get_state(self, request):
        return RESTResponse({
            "state": self.tribler_state,
            "last_exception": self.last_exception,
            "readable_state": self.session.readable_status
        })
Beispiel #4
0
class UpgraderEndpoint(RESTEndpoint):
    """
    With this endpoint you can control DB upgrade process of Tribler.
    """
    def setup_routes(self):
        self.app.add_routes([web.post('', self.skip_upgrade)])

    @docs(tags=["Upgrader"],
          summary="Skip the DB upgrade process, if it is running.",
          responses={
              200: {
                  "schema":
                  schema(UpgraderResponse={'skip_db_upgrade': Boolean}),
                  'examples': {
                      "skip_db_upgrade": True
                  }
              },
              HTTP_NOT_FOUND: {
                  'schema': HandledErrorSchema
              },
              HTTP_BAD_REQUEST: {
                  'schema': HandledErrorSchema
              }
          })
    @json_schema(
        schema(
            UpgraderRequest={
                'skip_db_upgrade': (
                    Boolean, 'Whether to skip the DB upgrade process or not'),
            }))
    async def skip_upgrade(self, request):
        parameters = await request.json()
        if SKIP_DB_UPGRADE_STR not in parameters:
            return RESTResponse({"error": "attribute to change is missing"},
                                status=HTTP_BAD_REQUEST)
        elif not self.session.upgrader:
            return RESTResponse({"error": "upgrader is not running"},
                                status=HTTP_NOT_FOUND)

        if self.session.upgrader and parameters[SKIP_DB_UPGRADE_STR]:
            self.session.upgrader.skip()

        return RESTResponse({SKIP_DB_UPGRADE_STR: True})
Beispiel #5
0
class RemoteQueryEndpoint(MetadataEndpointBase):
    """
    This endpoint is responsible for searching in channels and torrents present in the local Tribler database.
    It also fires a remote search in the IPv8 channel community.
    """

    def setup_routes(self):
        self.app.add_routes([web.put('', self.create_remote_search_request)])

    def sanitize_parameters(self, parameters):
        sanitized = super(RemoteQueryEndpoint, self).sanitize_parameters(parameters)
        sanitized.update({'uuid': parameters['uuid'], 'channel_pk': unhexlify(parameters.get('channel_pk', ""))})
        return sanitized

    @docs(
        tags=['Metadata'],
        summary="Perform a search for a given query.",
        responses={
            200: {
                'schema': schema(RemoteSearchResponse={
                    'success': Boolean
                })
            }
        }
    )
    @querystring_schema(RemoteQueryParameters)
    async def create_remote_search_request(self, request):
        # Query remote results from the GigaChannel Community v1.0.
        # v1.0 does not support searching for text limited by public key.
        # GigaChannel v1.0 search community sends requests for channel contents by putting channel's public key
        # into the text filter field. To communicate with older clients we have to shape the request accordingly.
        # Results are returned over the Events endpoint.
        try:
            sanitized = self.sanitize_parameters(request.query)
            if sanitized["txt_filter"] and sanitized["channel_pk"]:
                return RESTResponse({"error": "Remote search by text and pk is not supported"}, status=HTTP_BAD_REQUEST)
        except (ValueError, KeyError) as e:
            return RESTResponse({"error": "Error processing request parameters: %s" % e}, status=HTTP_BAD_REQUEST)

        self.session.gigachannel_community.send_search_request(
            sanitized['txt_filter'] or ('"%s"*' % hexlify(sanitized['channel_pk'])),
            metadata_type=sanitized.get('metadata_type'),
            sort_by=sanitized['sort_by'],
            sort_asc=sanitized['sort_desc'],
            hide_xxx=sanitized['hide_xxx'],
            uuid=sanitized['uuid'],
        )

        return RESTResponse({"success": True})
Beispiel #6
0
class ShutdownEndpoint(RESTEndpoint):
    """
    With this endpoint you can shutdown Tribler.
    """

    def __init__(self, session):
        super(ShutdownEndpoint, self).__init__(session)
        self.process_checker = ProcessChecker()

    def setup_routes(self):
        self.app.add_routes([web.put('', self.shutdown)])

    @docs(
        tags=["General"],
        summary="Shutdown Tribler.",
        responses={
            200: {
                "schema": schema(TriblerShutdownResponse={
                    'shutdown': Boolean
                })
            }
        }
    )
    async def shutdown(self, request):
        async def shutdown():
            try:
                keep_loop_running = await self.session.shutdown()
            except Exception as e:
                self._logger.error(e)
                keep_loop_running = False

            self.process_checker.remove_lock_file()
            # Flush the logs to the file before exiting
            for handler in logging.getLogger().handlers:
                handler.flush()
            if not keep_loop_running:
                get_event_loop().stop()

        ensure_future(shutdown())
        return RESTResponse({"shutdown": True})
class RemoteQueryEndpoint(MetadataEndpointBase):
    """
    This endpoint fires a remote search in the IPv8 GigaChannel Community.
    """

    def setup_routes(self):
        self.app.add_routes([web.put('', self.create_remote_search_request)])

    def sanitize_parameters(self, parameters):
        sanitized = super().sanitize_parameters(parameters)

        # Convert frozenset to string
        if "metadata_type" in sanitized:
            sanitized["metadata_type"] = [str(mt) for mt in sanitized["metadata_type"] if mt]
        if "channel_pk" in parameters:
            sanitized["channel_pk"] = parameters["channel_pk"]
        if "origin_id" in parameters:
            sanitized["origin_id"] = parameters["origin_id"]

        return sanitized

    @docs(
        tags=['Metadata'],
        summary="Perform a search for a given query.",
        responses={200: {'schema': schema(RemoteSearchResponse={'request_uuid': String()})}},
    )
    @querystring_schema(RemoteQueryParameters)
    async def create_remote_search_request(self, request):
        # Query remote results from the GigaChannel Community.
        # Results are returned over the Events endpoint.
        try:
            sanitized = self.sanitize_parameters(request.query)
        except (ValueError, KeyError) as e:
            return RESTResponse({"error": f"Error processing request parameters: {e}"}, status=HTTP_BAD_REQUEST)

        request_uuid = self.session.gigachannel_community.send_search_request(**sanitized)
        return RESTResponse({"request_uuid": str(request_uuid)})
Beispiel #8
0
class SettingsEndpoint(RESTEndpoint):
    """
    This endpoint is reponsible for handing all requests regarding settings and configuration.
    """
    def __init__(self,
                 tribler_config: TriblerConfig,
                 download_manager: DownloadManager = None):
        super().__init__()
        self.tribler_config = tribler_config
        self.download_manager = download_manager

    def setup_routes(self):
        self.app.add_routes([
            web.get('', self.get_settings),
            web.post('', self.update_settings)
        ])

    @docs(
        tags=["General"],
        summary="Return all the session settings that can be found in Tribler.",
        responses={200: {
            "schema": schema(GetTriblerSettingsResponse={})
        }},
        description=
        "This endpoint returns all the session settings that can be found in Tribler.\n\n It also returns "
        "the runtime-determined ports")
    async def get_settings(self, request):
        self._logger.info(f'Get settings. Request: {request}')
        return RESTResponse({
            "settings": self.tribler_config.dict(),
            "ports": list(default_network_utils.ports_in_use)
        })

    @docs(tags=["General"],
          summary="Update Tribler settings.",
          responses={
              200: {
                  "schema":
                  schema(UpdateTriblerSettingsResponse={'modified': Boolean})
              }
          })
    @json_schema(schema(UpdateTriblerSettingsRequest={}))
    async def update_settings(self, request):
        settings_dict = await request.json()
        await self.parse_settings_dict(settings_dict)
        self.tribler_config.write()
        return RESTResponse({"modified": True})

    async def parse_setting(self, section, option, value):
        """
        Set a specific Tribler setting. Throw a ValueError if this setting is not available.
        """
        # if section in self.config.config and option in self.config.config[section]:
        self.tribler_config.__getattribute__(section).__setattr__(
            option, value)
        # else:
        #     raise ValueError(f"Section {section} with option {option} does not exist")

        # Perform some actions when specific keys are set
        if section == "libtorrent" and (option == "max_download_rate"
                                        or option == "max_upload_rate"):
            if self.download_manager:
                self.download_manager.update_max_rates_from_config()

    async def parse_settings_dict(self, settings_dict, depth=1, root_key=None):
        """
        Parse the settings dictionary.
        """
        for key, value in settings_dict.items():
            if isinstance(value, dict):
                await self.parse_settings_dict(value,
                                               depth=depth + 1,
                                               root_key=key)
            else:
                await self.parse_setting(root_key, key, value)
Beispiel #9
0
class EventsEndpoint(RESTEndpoint, TaskManager):
    """
    Important events in Tribler are returned over the events endpoint. This connection is held open. Each event is
    pushed over this endpoint in the form of a JSON dictionary. Each JSON dictionary contains a type field that
    indicates the type of the event. Individual events are separated by a newline character.
    """

    def __init__(self, session):
        RESTEndpoint.__init__(self, session)
        TaskManager.__init__(self)
        self.events_responses = []
        self.app.on_shutdown.append(self.on_shutdown)

        # We need to know that Tribler completed its startup sequence
        self.tribler_started = False
        self.session.notifier.add_observer(NTFY.TRIBLER_STARTED, self._tribler_started)

        for event_type, event_lambda in reactions_dict.items():
            self.session.notifier.add_observer(event_type,
                                               lambda *args, el=event_lambda, et=event_type:
                                               self.write_data({"type": et.value, "event": el(*args)}))

        def on_circuit_removed(circuit, *args):
            if isinstance(circuit, Circuit):
                event = {
                    "circuit_id": circuit.circuit_id,
                    "bytes_up": circuit.bytes_up,
                    "bytes_down": circuit.bytes_down,
                    "uptime": time.time() - circuit.creation_time
                }
                self.write_data({"type": NTFY.TUNNEL_REMOVE.value, "event": event})

        # Tribler tunnel circuit has been removed
        self.session.notifier.add_observer(NTFY.TUNNEL_REMOVE, on_circuit_removed)

    async def on_shutdown(self, _):
        await self.shutdown_task_manager()

    def _tribler_started(self):
        self.tribler_started = True

    def setup_routes(self):
        self.app.add_routes([web.get('', self.get_events)])

    @task
    async def write_data(self, message):
        """
        Write data over the event socket if it's open.
        """
        if not self.events_responses:
            return
        try:
            message = json.dumps(message)
        except UnicodeDecodeError:
            # The message contains invalid characters; fix them
            self._logger.error("Event contains non-unicode characters, fixing")
            message = json.dumps(fix_unicode_dict(message))
        message_bytes = b'data: ' + message.encode('utf-8') + b'\n\n'
        for request in self.events_responses:
            await request.write(message_bytes)

    # An exception has occurred in Tribler. The event includes a readable string of the error.
    def on_tribler_exception(self, exception_text):
        self.write_data({"type": NTFY.TRIBLER_EXCEPTION.value, "event": {"text": exception_text}})

    @docs(
        tags=["General"],
        summary="Open an EventStream for receiving Tribler events.",
        responses={
            200: {
                "schema": schema(EventsResponse={'type': String,
                                                 'event': Dict})
            }
        }
    )
    async def get_events(self, request):
        """
        .. http:get:: /events

        A GET request to this endpoint will open the event connection.

            **Example request**:

                .. sourcecode:: none

                    curl -X GET http://localhost:8085/events
        """

        # Setting content-type to text/event-stream to ensure browsers will handle the content properly
        response = RESTStreamResponse(status=200,
                                      reason='OK',
                                      headers={'Content-Type': 'text/event-stream',
                                               'Cache-Control': 'no-cache',
                                               'Connection': 'keep-alive'})
        await response.prepare(request)
        # FIXME: Proper start check!
        await response.write(b'data: ' + json.dumps({"type": NTFY.EVENTS_START.value,
                                                     "event": {"tribler_started": self.tribler_started,
                                                               "version": version_id}}).encode('utf-8') + b'\n\n')
        self.events_responses.append(response)
        try:
            while True:
                await self.register_anonymous_task('event_sleep', lambda: None, delay=3600)
        except CancelledError:
            self.events_responses.remove(response)
            return response
Beispiel #10
0
class EventsEndpoint(RESTEndpoint, TaskManager):
    """
    Important events in Tribler are returned over the events endpoint. This connection is held open. Each event is
    pushed over this endpoint in the form of a JSON dictionary. Each JSON dictionary contains a type field that
    indicates the type of the event. Individual events are separated by a newline character.
    """

    def __init__(self, notifier: Notifier, public_key: str = None):
        RESTEndpoint.__init__(self)
        TaskManager.__init__(self)
        self.events_responses: List[RESTStreamResponse] = []
        self.app.on_shutdown.append(self.on_shutdown)
        self.notifier = None
        self.undelivered_error: Optional[dict] = None
        self.connect_notifier(notifier)
        self.public_key = public_key

    def connect_notifier(self, notifier: Notifier):
        self.notifier = notifier

        for event_type, event_lambda in reactions_dict.items():
            self.notifier.add_observer(event_type,
                                       lambda *args, el=event_lambda, et=event_type:
                                       self.write_data({"type": et.value, "event": el(*args)}))

        def on_circuit_removed(circuit, *args):
            if isinstance(circuit, Circuit):
                event = {
                    "circuit_id": circuit.circuit_id,
                    "bytes_up": circuit.bytes_up,
                    "bytes_down": circuit.bytes_down,
                    "uptime": time.time() - circuit.creation_time
                }
                self.write_data({"type": NTFY.TUNNEL_REMOVE.value, "event": event})

        # Tribler tunnel circuit has been removed
        self.notifier.add_observer(NTFY.TUNNEL_REMOVE, on_circuit_removed)

    async def on_shutdown(self, _):
        await self.shutdown_task_manager()

    def setup_routes(self):
        self.app.add_routes([web.get('', self.get_events)])

    def initial_message(self) -> dict:
        return {
            "type": NTFY.EVENTS_START.value,
            "event": {"public_key": self.public_key, "version": version_id}
        }

    def error_message(self, reported_error: ReportedError) -> dict:
        return {
            "type": NTFY.TRIBLER_EXCEPTION.value,
            "event": asdict(reported_error),
        }

    def encode_message(self, message: dict) -> bytes:
        try:
            message = json.dumps(message)
        except UnicodeDecodeError:
            # The message contains invalid characters; fix them
            self._logger.error("Event contains non-unicode characters, fixing")
            message = json.dumps(fix_unicode_dict(message))
        return b'data: ' + message.encode('utf-8') + b'\n\n'

    def has_connection_to_gui(self):
        return bool(self.events_responses)

    @task
    async def write_data(self, message):
        """
        Write data over the event socket if it's open.
        """
        if not self.has_connection_to_gui():
            return

        message_bytes = self.encode_message(message)
        for response in self.events_responses:
            await response.write(message_bytes)

    # An exception has occurred in Tribler. The event includes a readable
    # string of the error and a Sentry event.
    def on_tribler_exception(self, reported_error: ReportedError):
        message = self.error_message(reported_error)
        if self.has_connection_to_gui():
            self.write_data(message)
        elif not self.undelivered_error:
            # If there are several undelivered errors, we store the first error as more important and skip other
            self.undelivered_error = message

    @docs(
        tags=["General"],
        summary="Open an EventStream for receiving Tribler events.",
        responses={
            200: {
                "schema": schema(EventsResponse={'type': String,
                                                 'event': Dict})
            }
        }
    )
    async def get_events(self, request):
        """
        .. http:get:: /events

        A GET request to this endpoint will open the event connection.

            **Example request**:

                .. sourcecode:: none

                    curl -X GET http://localhost:52194/events
        """

        # Setting content-type to text/event-stream to ensure browsers will handle the content properly
        response = RESTStreamResponse(status=200,
                                      reason='OK',
                                      headers={'Content-Type': 'text/event-stream',
                                               'Cache-Control': 'no-cache',
                                               'Connection': 'keep-alive'})
        await response.prepare(request)
        await response.write(self.encode_message(self.initial_message()))

        if self.undelivered_error:
            error = self.undelivered_error
            self.undelivered_error = None
            await response.write(self.encode_message(error))

        self.events_responses.append(response)

        try:
            while True:
                await self.register_anonymous_task('event_sleep', lambda: None, delay=3600)
        except CancelledError:
            self.events_responses.remove(response)
            return response
Beispiel #11
0
class StatisticsEndpoint(RESTEndpoint):
    """
    This endpoint is responsible for handing requests regarding statistics in Tribler.
    """
    def setup_routes(self):
        self.app.add_routes([
            web.get('/tribler', self.get_tribler_stats),
            web.get('/ipv8', self.get_ipv8_stats)
        ])

    @docs(tags=["General"],
          summary="Return general statistics of Tribler.",
          responses={
              200: {
                  "schema":
                  schema(
                      TriblerStatisticsResponse={
                          'statistics':
                          schema(
                              TriblerStatistics={
                                  'num_channels':
                                  Integer,
                                  'database_size':
                                  Integer,
                                  'torrent_queue_stats': [
                                      schema(
                                          TorrentQueueStats={
                                              'failed': Integer,
                                              'total': Integer,
                                              'type': String,
                                              'pending': Integer,
                                              'success': Integer
                                          })
                                  ],
                              })
                      })
              }
          })
    async def get_tribler_stats(self, request):
        return RESTResponse(
            {'tribler_statistics': self.session.get_tribler_statistics()})

    @docs(tags=["General"],
          summary="Return general statistics of IPv8.",
          responses={
              200: {
                  "schema":
                  schema(
                      IPv8StatisticsResponse={
                          'statistics':
                          schema(IPv8Statistics={
                              'total_up': Integer,
                              'total_down': Integer
                          })
                      })
              }
          })
    async def get_ipv8_stats(self, request):
        """

            .. sourcecode:: javascript

                {
                    "ipv8_statistics": {
                        "total_up": 3424324,
                        "total_down": 859484
                    }
                }
        """
        return RESTResponse(
            {'ipv8_statistics': self.session.get_ipv8_statistics()})
Beispiel #12
0
class SearchEndpoint(MetadataEndpointBase):
    """
    This endpoint is responsible for searching in channels and torrents present in the local Tribler database.
    """
    def setup_routes(self):
        self.app.add_routes([
            web.get('', self.search),
            web.get('/completions', self.completions)
        ])

    @staticmethod
    def get_uuid(parameters):
        return parameters['uuid'] if 'uuid' in parameters else None

    @docs(tags=['Metadata'],
          summary="Perform a search for a given query.",
          responses={
              200: {
                  'schema':
                  schema(
                      SearchResponse={
                          'torrents': [
                              schema(
                                  Torrent={
                                      'commit_status': Integer,
                                      'num_leechers': Integer,
                                      'date': Integer,
                                      'relevance_score': Integer,
                                      'id': Integer,
                                      'size': Integer,
                                      'category': String,
                                      'public_key': String,
                                      'name': String,
                                      'last_tracker_check': Integer,
                                      'infohash': String,
                                      'num_seeders': Integer,
                                      'type': String,
                                  })
                          ],
                          'chant_dirty':
                          Boolean
                      })
              }
          })
    @querystring_schema(MetadataParameters)
    async def search(self, request):
        try:
            sanitized = self.sanitize_parameters(request.query)
        except (ValueError, KeyError):
            return RESTResponse(
                {"error": "Error processing request parameters"},
                status=HTTP_BAD_REQUEST)

        if not sanitized["txt_filter"]:
            return RESTResponse({"error": "Filter parameter missing"},
                                status=HTTP_BAD_REQUEST)

        include_total = request.query.get('include_total', '')

        def search_db():
            with db_session:
                pony_query = self.session.mds.MetadataNode.get_entries(
                    **sanitized)
                total = self.session.mds.MetadataNode.get_total_count(
                    **sanitized) if include_total else None
                search_results = [r.to_simple_dict() for r in pony_query]
            self.session.mds._db.disconnect(
            )  # DB must be disconnected explicitly if run on a thread
            return search_results, total

        try:
            search_results, total = await asyncio.get_event_loop(
            ).run_in_executor(None, search_db)
        except Exception as e:
            self._logger.error("Error while performing DB search: %s", e)
            return RESTResponse(status=HTTP_BAD_REQUEST)

        response_dict = {
            "results": search_results,
            "first": sanitized["first"],
            "last": sanitized["last"],
            "sort_by": sanitized["sort_by"],
            "sort_desc": sanitized["sort_desc"],
        }
        if total is not None:
            response_dict.update({"total": total})

        return RESTResponse(response_dict)

    @docs(tags=['Metadata'],
          summary="Return auto-completion suggestions for a given query.",
          parameters=[{
              'in': 'query',
              'name': 'q',
              'description': 'Search query',
              'type': 'string',
              'required': True
          }],
          responses={
              200: {
                  'schema': schema(CompletionsResponse={
                      'completions': [String],
                  }),
                  'examples': {
                      'completions': ['pioneer one', 'pioneer movie']
                  }
              }
          })
    async def completions(self, request):
        args = request.query
        if 'q' not in args:
            return RESTResponse({"error": "query parameter missing"},
                                status=HTTP_BAD_REQUEST)

        keywords = args['q'].strip().lower()
        # TODO: add XXX filtering for completion terms
        results = self.session.mds.TorrentMetadata.get_auto_complete_terms(
            keywords, max_terms=5)
        return RESTResponse({"completions": results})
Beispiel #13
0
class RemoteQueryEndpoint(MetadataEndpointBase):
    """
    This endpoint fires a remote search in the IPv8 GigaChannel Community.
    """
    def __init__(self, gigachannel_community: GigaChannelCommunity, *args,
                 **kwargs):
        MetadataEndpointBase.__init__(self, *args, **kwargs)
        self.gigachannel_community = gigachannel_community

    def setup_routes(self):
        self.app.add_routes([web.put('', self.create_remote_search_request)])
        self.app.add_routes(
            [web.get('/channels_peers', self.get_channels_peers)])

    def sanitize_parameters(self, parameters):
        sanitized = super().sanitize_parameters(parameters)

        if "channel_pk" in parameters:
            sanitized["channel_pk"] = unhexlify(parameters["channel_pk"])
        if "origin_id" in parameters:
            sanitized["origin_id"] = int(parameters["origin_id"])

        return sanitized

    @docs(
        tags=['Metadata'],
        summary="Perform a search for a given query.",
        responses={
            200: {
                'schema':
                schema(RemoteSearchResponse={'request_uuid': String()})
            }
        },
    )
    @querystring_schema(RemoteQueryParameters)
    async def create_remote_search_request(self, request):
        self._logger.info('Create remote search request')
        # Query remote results from the GigaChannel Community.
        # Results are returned over the Events endpoint.
        try:
            sanitized = self.sanitize_parameters(request.query)
        except (ValueError, KeyError) as e:
            return RESTResponse(
                {"error": f"Error processing request parameters: {e}"},
                status=HTTP_BAD_REQUEST)
        self._logger.info(f'Parameters: {sanitized}')

        request_uuid, peers_list = self.gigachannel_community.send_search_request(
            **sanitized)
        peers_mid_list = [hexlify(p.mid) for p in peers_list]

        return RESTResponse({
            "request_uuid": str(request_uuid),
            "peers": peers_mid_list
        })

    async def get_channels_peers(self, _):
        # Get debug stats for peers serving channels
        current_time = time.time()
        result = []
        mapping = self.gigachannel_community.channels_peers
        with db_session:
            for id_tuple, peers in mapping._channels_dict.items():  # pylint:disable=W0212
                channel_pk, channel_id = id_tuple
                chan = self.mds.ChannelMetadata.get(public_key=channel_pk,
                                                    id_=channel_id)

                peers_list = []
                for p in peers:
                    peers_list.append(
                        (hexlify(p.mid), int(current_time - p.last_response)))

                chan_dict = {
                    "channel_name": chan.title if chan else None,
                    "channel_pk": hexlify(channel_pk),
                    "channel_id": channel_id,
                    "peers": peers_list,
                }
                result.append(chan_dict)

        return RESTResponse({"channels_list": result})
Beispiel #14
0
class TrustchainEndpoint(RESTEndpoint):
    """
    This endpoint is responsible for handing requests for trustchain data.
    """
    def setup_routes(self):
        self.app.add_routes([
            web.get('/statistics', self.get_statistics),
            web.get('/bootstrap', self.bootstrap)
        ])

    @docs(tags=["TrustChain"],
          summary="Return statistics about the trustchain community.",
          responses={
              200: {
                  "schema":
                  schema(
                      TrustchainStatisticsResponse={
                          'statistics':
                          schema(
                              TrustchainStatistics={
                                  'id': String,
                                  'peers_that_pk_helped': Integer,
                                  'peers_that_helped_pk': Integer,
                                  'total_blocks': Integer,
                                  'total_down': Integer,
                                  'total_up': Integer
                              })
                      })
              }
          })
    async def get_statistics(self, request):
        if 'MB' not in self.session.wallets:
            return RESTResponse({"error": "TrustChain community not found"},
                                status=HTTP_NOT_FOUND)
        return RESTResponse({
            'statistics':
            recursive_unicode(self.session.wallets['MB'].get_statistics())
        })

    @docs(
        tags=["TrustChain"],
        summary=
        "Generate a new identity and transfers bandwidth tokens to it..",
        parameters=[{
            'in': 'query',
            'name': 'amount',
            'description':
            'Specifies how much tokens need to be emptied into the new identity',
            'type': 'integer',
            'required': True
        }],
        responses={
            200: {
                "schema":
                schema(
                    TrustchainBootstrapResponse={
                        'private_key':
                        String,
                        'transaction':
                        schema(BootstrapTransaction={
                            'down': Integer,
                            'up': Integer
                        }),
                        'block':
                        schema(BootstrapBlock={
                            'block_hash': String,
                            'sequence_number': String
                        })
                    })
            }
        })
    async def bootstrap(self, request):
        if 'MB' not in self.session.wallets:
            return RESTResponse({"error": "bandwidth wallet not found"},
                                status=HTTP_NOT_FOUND)
        bandwidth_wallet = self.session.wallets['MB']

        available_tokens = bandwidth_wallet.get_bandwidth_tokens()

        args = request.query
        if 'amount' in args:
            try:
                amount = int(args['amount'])
            except ValueError:
                return RESTResponse(
                    {"error": "Provided token amount is not a number"},
                    status=HTTP_BAD_REQUEST)

            if amount <= 0:
                return RESTResponse(
                    {"error": "Provided token amount is zero or negative"},
                    status=HTTP_BAD_REQUEST)
        else:
            amount = available_tokens

        if amount <= 0 or amount > available_tokens:
            return RESTResponse(
                {"error": "Not enough bandwidth tokens available"},
                status=HTTP_BAD_REQUEST)

        result = bandwidth_wallet.bootstrap_new_identity(amount)
        result['private_key'] = result['private_key'].decode('utf-8')
        result['block']['block_hash'] = result['block']['block_hash'].decode(
            'utf-8')
        return RESTResponse(result)
Beispiel #15
0
class TorrentInfoEndpoint(RESTEndpoint):
    """
    This endpoint is responsible for handing all requests regarding torrent info in Tribler.
    """
    def setup_routes(self):
        self.app.add_routes([web.get('', self.get_torrent_info)])

    @docs(
        tags=["Libtorrent"],
        summary="Return metainfo from a torrent found at a provided URI.",
        parameters=[{
            'in':
            'query',
            'name':
            'torrent',
            'description':
            'URI for which to return torrent information. This URI can either represent '
            'a file location, a magnet link or a HTTP(S) url.',
            'type':
            'string',
            'required':
            True
        }],
        responses={
            200: {
                'description':
                'Return a hex-encoded json-encoded string with torrent metainfo',
                "schema": schema(GetMetainfoResponse={'metainfo': String})
            }
        })
    async def get_torrent_info(self, request):
        args = request.query

        hops = None
        if 'hops' in args:
            try:
                hops = int(args['hops'])
            except ValueError:
                return RESTResponse(
                    {
                        "error":
                        f"wrong value of 'hops' parameter: {repr(args['hops'])}"
                    },
                    status=HTTP_BAD_REQUEST)

        if 'uri' not in args or not args['uri']:
            return RESTResponse({"error": "uri parameter missing"},
                                status=HTTP_BAD_REQUEST)

        uri = args['uri']
        if uri.startswith('file:'):
            try:
                filename = url2pathname(uri[5:])
                tdef = TorrentDef.load(filename)
                metainfo = tdef.get_metainfo()
            except (TypeError, RuntimeError):
                return RESTResponse(
                    {"error": "error while decoding torrent file"},
                    status=HTTP_INTERNAL_SERVER_ERROR)
        elif uri.startswith('http'):
            try:
                async with ClientSession(raise_for_status=True) as session:
                    response = await session.get(uri)
                    response = await response.read()
            except (ServerConnectionError, ClientResponseError) as e:
                return RESTResponse({"error": str(e)},
                                    status=HTTP_INTERNAL_SERVER_ERROR)

            if response.startswith(b'magnet'):
                _, infohash, _ = parse_magnetlink(response)
                if infohash:
                    metainfo = await self.session.dlmgr.get_metainfo(
                        infohash, timeout=60, hops=hops, url=response)
            else:
                metainfo = bdecode_compat(response)
        elif uri.startswith('magnet'):
            infohash = parse_magnetlink(uri)[1]
            if infohash is None:
                return RESTResponse({"error": "missing infohash"},
                                    status=HTTP_BAD_REQUEST)
            metainfo = await self.session.dlmgr.get_metainfo(infohash,
                                                             timeout=60,
                                                             hops=hops,
                                                             url=uri)
        else:
            return RESTResponse({"error": "invalid uri"},
                                status=HTTP_BAD_REQUEST)

        if not metainfo:
            return RESTResponse({"error": "metainfo error"},
                                status=HTTP_INTERNAL_SERVER_ERROR)

        if not isinstance(metainfo, dict) or b'info' not in metainfo:
            self._logger.warning("Received metainfo is not a valid dictionary")
            return RESTResponse({"error": "invalid response"},
                                status=HTTP_INTERNAL_SERVER_ERROR)

        # Add the torrent to GigaChannel as a free-for-all entry, so others can search it
        self.session.mds.TorrentMetadata.add_ffa_from_dict(
            tdef_to_metadata_dict(TorrentDef.load_from_dict(metainfo)))

        # TODO(Martijn): store the stuff in a database!!!
        # TODO(Vadim): this means cache the downloaded torrent in a binary storage, like LevelDB
        infohash = hashlib.sha1(bencode(metainfo[b'info'])).digest()

        download = self.session.dlmgr.downloads.get(infohash)
        metainfo_request = self.session.dlmgr.metainfo_requests.get(
            infohash, [None])[0]
        download_is_metainfo_request = download == metainfo_request

        # Check if the torrent is already in the downloads
        encoded_metainfo = deepcopy(metainfo)

        # FIXME: json.dumps garbles binary data that is used by the 'pieces' field
        # However, this is fine as long as the GUI does not use this field.
        encoded_metainfo[b'info'][b'pieces'] = hexlify(
            encoded_metainfo[b'info'][b'pieces']).encode('utf-8')
        encoded_metainfo = hexlify(
            json.dumps(recursive_unicode(encoded_metainfo, ignore_errors=True),
                       ensure_ascii=False).encode('utf-8'))
        return RESTResponse({
            "metainfo":
            encoded_metainfo,
            "download_exists":
            download and not download_is_metainfo_request
        })
Beispiel #16
0
class TrustViewEndpoint(RESTEndpoint):
    def __init__(self, bandwidth_db: BandwidthDatabase):
        super().__init__()
        self.bandwidth_db = bandwidth_db

    def setup_routes(self):
        self.app.add_routes([web.get('', self.get_view)])

    @cached_property
    def trust_graph(self) -> TrustGraph:
        trust_graph = TrustGraph(self.bandwidth_db.my_pub_key,
                                 self.bandwidth_db)
        trust_graph.compose_graph_data()
        return trust_graph

    @docs(tags=["TrustGraph"],
          summary="Return the trust graph.",
          parameters=[],
          responses={
              200: {
                  "schema":
                  schema(
                      GraphResponse={
                          'root_public_key':
                          String,
                          'graph':
                          schema(
                              Graph={
                                  'node':
                                  schema(
                                      Node={
                                          'id': Integer,
                                          'key': String,
                                          'pos': [Float],
                                          'sequence_number': Integer,
                                          'total_up': Integer,
                                          'total_down': Integer
                                      }),
                                  'edge':
                                  List(List(Integer))
                              }),
                          'bootstrap':
                          schema(
                              Bootstrap={
                                  'download': Integer,
                                  'upload': Integer,
                                  'progress': Float
                              }),
                          'num_tx':
                          Integer
                      })
              }
          })
    async def get_view(self, request):
        refresh_graph = int(request.query.get('refresh', '0'))
        if refresh_graph:
            self.trust_graph.compose_graph_data()

        graph_data = self.trust_graph.compute_node_graph()

        return RESTResponse({
            'root_public_key':
            hexlify(self.bandwidth_db.my_pub_key),
            'graph':
            graph_data,
            'bootstrap':
            0,
            'num_tx':
            len(graph_data['edge'])
        })
Beispiel #17
0
class StatisticsEndpoint(RESTEndpoint):
    """
    This endpoint is responsible for handing requests regarding statistics in Tribler.
    """
    def __init__(self,
                 ipv8: IPv8 = None,
                 metadata_store: MetadataStore = None):
        super().__init__()
        self.mds = metadata_store
        self.ipv8 = ipv8

    def setup_routes(self):
        self.app.add_routes([
            web.get('/tribler', self.get_tribler_stats),
            web.get('/ipv8', self.get_ipv8_stats)
        ])

    @docs(tags=["General"],
          summary="Return general statistics of Tribler.",
          responses={
              200: {
                  "schema":
                  schema(
                      TriblerStatisticsResponse={
                          'statistics':
                          schema(
                              TriblerStatistics={
                                  'num_channels':
                                  Integer,
                                  'database_size':
                                  Integer,
                                  'torrent_queue_stats': [
                                      schema(
                                          TorrentQueueStats={
                                              'failed': Integer,
                                              'total': Integer,
                                              'type': String,
                                              'pending': Integer,
                                              'success': Integer
                                          })
                                  ],
                              })
                      })
              }
          })
    async def get_tribler_stats(self, request):
        stats_dict = {}
        if self.mds:
            db_size = self.mds.get_db_file_size()
            stats_dict = {
                "db_size": db_size,
                "num_channels": self.mds.get_num_channels(),
                "num_torrents": self.mds.get_num_torrents()
            }

        return RESTResponse({'tribler_statistics': stats_dict})

    @docs(tags=["General"],
          summary="Return general statistics of IPv8.",
          responses={
              200: {
                  "schema":
                  schema(
                      IPv8StatisticsResponse={
                          'statistics':
                          schema(IPv8Statistics={
                              'total_up': Integer,
                              'total_down': Integer
                          })
                      })
              }
          })
    async def get_ipv8_stats(self, request):
        """

            .. sourcecode:: javascript

                {
                    "ipv8_statistics": {
                        "total_up": 3424324,
                        "total_down": 859484
                    }
                }
        """
        stats_dict = {}
        if self.ipv8:
            stats_dict = {
                "total_up": self.ipv8.endpoint.bytes_up,
                "total_down": self.ipv8.endpoint.bytes_down,
                # "session_uptime": time.time() - self.ipv8_start_time
            }
        return RESTResponse({'ipv8_statistics': stats_dict})
Beispiel #18
0
class LibTorrentEndpoint(RESTEndpoint):
    """
    Endpoint for getting information about libtorrent sessions and settings.
    """

    def __init__(self, download_manager: DownloadManager):
        super().__init__()
        self.download_manager = download_manager

    def setup_routes(self):
        self.app.add_routes([web.get('/settings', self.get_libtorrent_settings),
                             web.get('/session', self.get_libtorrent_session_info)])

    @docs(
        tags=["Libtorrent"],
        summary="Return Libtorrent session settings.",
        parameters=[{
            'in': 'query',
            'name': 'hop',
            'description': 'The hop count of the session for which to return settings',
            'type': 'string',
            'required': False
        }],
        responses={
            200: {
                'description': 'Return a dictonary with key-value pairs from the Libtorrent session settings',
                "schema": schema(LibtorrentSessionResponse={'hop': Integer,
                                                            'settings': schema(LibtorrentSettings={})})
            }
        }
    )
    async def get_libtorrent_settings(self, request):
        args = request.query
        hop = 0
        if 'hop' in args and args['hop']:
            hop = int(args['hop'])

        if hop not in self.download_manager.ltsessions:
            return RESTResponse({'hop': hop, "settings": {}})

        lt_session = self.download_manager.ltsessions[hop]
        if hop == 0:
            lt_settings = self.download_manager.get_session_settings(lt_session)
            lt_settings['peer_fingerprint'] = hexlify(lt_settings['peer_fingerprint'])
        else:
            lt_settings = lt_session.get_settings()

        return RESTResponse({'hop': hop, "settings": lt_settings})

    @docs(
        tags=["Libtorrent"],
        summary="Return Libtorrent session information.",
        parameters=[{
            'in': 'query',
            'name': 'hop',
            'description': 'The hop count of the session for which to return information',
            'type': 'string',
            'required': False
        }],
        responses={
            200: {
                'description': 'Return a dictonary with key-value pairs from the Libtorrent session information',
                "schema": schema(LibtorrentinfoResponse={'hop': Integer,
                                                         'settings': schema(LibtorrentInfo={})})
            }
        }
    )
    async def get_libtorrent_session_info(self, request):
        session_stats = Future()

        def on_session_stats_alert_received(alert):
            if not session_stats.done():
                session_stats.set_result(alert.values)

        args = request.query
        hop = 0
        if 'hop' in args and args['hop']:
            hop = int(args['hop'])

        if hop not in self.download_manager.ltsessions or \
                not hasattr(self.download_manager.ltsessions[hop], "post_session_stats"):
            return RESTResponse({'hop': hop, 'session': {}})

        self.download_manager.session_stats_callback = on_session_stats_alert_received
        self.download_manager.ltsessions[hop].post_session_stats()
        stats = await session_stats
        return RESTResponse({'hop': hop, 'session': stats})
Beispiel #19
0
class SettingsEndpoint(RESTEndpoint):
    """
    This endpoint is reponsible for handing all requests regarding settings and configuration.
    """
    def setup_routes(self):
        self.app.add_routes([
            web.get('', self.get_settings),
            web.post('', self.update_settings)
        ])

    @docs(
        tags=["General"],
        summary="Return all the session settings that can be found in Tribler.",
        responses={200: {
            "schema": schema(GetTriblerSettingsResponse={})
        }},
        description=
        "This endpoint returns all the session settings that can be found in Tribler.\n\n It also returns "
        "the runtime-determined ports, i.e. the ports for the SOCKS5 servers. Please note that a port "
        "with a value of -1 in the settings means that the port is randomly assigned at startup."
    )
    async def get_settings(self, request):
        return RESTResponse({
            "settings": self.session.config.config,
            "ports": self.session.config.selected_ports
        })

    @docs(tags=["General"],
          summary="Update Tribler settings.",
          responses={
              200: {
                  "schema":
                  schema(UpdateTriblerSettingsResponse={'modified': Boolean})
              }
          })
    @json_schema(schema(UpdateTriblerSettingsRequest={}))
    async def update_settings(self, request):
        settings_dict = await request.json()
        await self.parse_settings_dict(settings_dict)
        self.session.config.write()
        return RESTResponse({"modified": True})

    async def parse_setting(self, section, option, value):
        """
        Set a specific Tribler setting. Throw a ValueError if this setting is not available.
        """
        if section in self.session.config.config and option in self.session.config.config[
                section]:
            self.session.config.config[section][option] = value
        else:
            raise ValueError(
                f"Section {section} with option {option} does not exist")

        # Perform some actions when specific keys are set
        if section == "libtorrent" and (option == "max_download_rate"
                                        or option == "max_upload_rate"):
            self.session.dlmgr.update_max_rates_from_config()

    async def parse_settings_dict(self, settings_dict, depth=1, root_key=None):
        """
        Parse the settings dictionary.
        """
        for key, value in settings_dict.items():
            if isinstance(value, dict):
                await self.parse_settings_dict(value,
                                               depth=depth + 1,
                                               root_key=key)
            else:
                await self.parse_setting(root_key, key, value)
Beispiel #20
0
class TrustViewEndpoint(RESTEndpoint):
    def __init__(self, session):
        super().__init__(session)
        self.logger = logging.getLogger(self.__class__.__name__)

        self.bandwidth_db = None
        self.trust_graph = None
        self.public_key = None

    def setup_routes(self):
        self.app.add_routes([web.get('', self.get_view)])

    def initialize_graph(self):
        if self.session.bandwidth_community:
            self.bandwidth_db = self.session.bandwidth_community.database
            self.public_key = self.session.bandwidth_community.my_pk
            self.trust_graph = TrustGraph(self.public_key, self.bandwidth_db)

            # Start bootstrap download if not already done
            if not self.session.bootstrap:
                self.session.start_bootstrap_download()

    @docs(tags=["TrustGraph"],
          summary="Return the trust graph.",
          parameters=[],
          responses={
              200: {
                  "schema":
                  schema(
                      GraphResponse={
                          'root_public_key':
                          String,
                          'graph':
                          schema(
                              Graph={
                                  'node':
                                  schema(
                                      Node={
                                          'id': Integer,
                                          'key': String,
                                          'pos': [Float],
                                          'sequence_number': Integer,
                                          'total_up': Integer,
                                          'total_down': Integer
                                      }),
                                  'edge':
                                  List(List(Integer))
                              }),
                          'bootstrap':
                          schema(
                              Bootstrap={
                                  'download': Integer,
                                  'upload': Integer,
                                  'progress': Float
                              }),
                          'num_tx':
                          Integer
                      })
              }
          })
    async def get_view(self, request):
        if not self.trust_graph:
            self.initialize_graph()
            self.trust_graph.compose_graph_data()

        refresh_graph = int(request.query.get('refresh', '0'))
        if refresh_graph:
            self.trust_graph.compose_graph_data()

        graph_data = self.trust_graph.compute_node_graph()

        return RESTResponse({
            'root_public_key': hexlify(self.public_key),
            'graph': graph_data,
            'bootstrap': self.get_bootstrap_info(),
            'num_tx': len(graph_data['edge'])
        })

    def get_bootstrap_info(self):
        if self.session.bootstrap.download and self.session.bootstrap.download.get_state(
        ):
            state = self.session.bootstrap.download.get_state()
            return {
                'download': state.get_total_transferred(DOWNLOAD),
                'upload': state.get_total_transferred(UPLOAD),
                'progress': state.get_progress(),
            }
        return {'download': 0, 'upload': 0, 'progress': 0}
Beispiel #21
0
class ChannelsEndpoint(ChannelsEndpointBase):
    def setup_routes(self):
        self.app.add_routes([
            web.get('', self.get_channels),
            web.get(r'/{channel_pk:\w*}/{channel_id:\w*}',
                    self.get_channel_contents),
            web.get(r'/{channel_pk:\w*}/{channel_id:\w*}/description',
                    self.get_channel_description),
            web.put(r'/{channel_pk:\w*}/{channel_id:\w*}/description',
                    self.put_channel_description),
            web.get(r'/{channel_pk:\w*}/{channel_id:\w*}/thumbnail',
                    self.get_channel_thumbnail),
            web.put(r'/{channel_pk:\w*}/{channel_id:\w*}/thumbnail',
                    self.put_channel_thumbnail),
            web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/copy',
                     self.copy_channel),
            web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/channels',
                     self.create_channel),
            web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/collections',
                     self.create_collection),
            web.put(r'/{channel_pk:\w*}/{channel_id:\w*}/torrents',
                    self.add_torrent_to_channel),
            web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/commit',
                     self.post_commit),
            web.get(r'/{channel_pk:\w*}/{channel_id:\w*}/commit',
                    self.is_channel_dirty),
            web.get('/popular_torrents', self.get_popular_torrents_channel),
        ])

    def add_download_progress_to_metadata_list(self, contents_list):
        for torrent in contents_list:
            if torrent['type'] == REGULAR_TORRENT:
                dl = self.session.dlmgr.get_download(
                    unhexlify(torrent['infohash']))
                if dl is not None and dl.tdef.infohash not in self.session.dlmgr.metainfo_requests:
                    torrent['progress'] = dl.get_state().get_progress()

    def get_channel_from_request(self, request):
        channel_pk = (self.session.mds.my_key.pub().key_to_bin()[10:]
                      if request.match_info['channel_pk'] == 'mychannel' else
                      unhexlify(request.match_info['channel_pk']))
        channel_id = int(request.match_info['channel_id'])
        return channel_pk, channel_id

    @docs(
        tags=['Metadata'],
        summary='Get a list of all channels known to the system.',
        responses={
            200: {
                'schema':
                schema(
                    GetChannelsResponse={
                        'results': [ChannelSchema],
                        'first': Integer(),
                        'last': Integer(),
                        'sort_by': String(),
                        'sort_desc': Integer(),
                        'total': Integer(),
                    })
            }
        },
    )
    async def get_channels(self, request):
        sanitized = self.sanitize_parameters(request.query)
        sanitized[
            'subscribed'] = None if 'subscribed' not in request.query else bool(
                int(request.query['subscribed']))
        include_total = request.query.get('include_total', '')
        sanitized.update({"origin_id": 0})
        sanitized['metadata_type'] = CHANNEL_TORRENT

        with db_session:
            channels = self.session.mds.get_entries(**sanitized)
            total = self.session.mds.get_total_count(
                **sanitized) if include_total else None
            channels_list = []
            for channel in channels:
                channel_dict = channel.to_simple_dict()
                # Add progress info for those channels that are still being processed
                if channel.subscribed:
                    if channel_dict["state"] == CHANNEL_STATE.UPDATING.value:
                        try:
                            progress = self.session.mds.compute_channel_update_progress(
                                channel)
                            channel_dict["progress"] = progress
                        except (ZeroDivisionError, FileNotFoundError) as e:
                            self._logger.error(
                                "Error %s when calculating channel update progress. Channel data: %s-%i %i/%i",
                                e,
                                hexlify(channel.public_key),
                                channel.id_,
                                channel.start_timestamp,
                                channel.local_version,
                            )
                    elif channel_dict[
                            "state"] == CHANNEL_STATE.METAINFO_LOOKUP.value:
                        if not self.session.dlmgr.metainfo_requests.get(
                                bytes(channel.infohash)
                        ) and self.session.dlmgr.download_exists(
                                bytes(channel.infohash)):
                            channel_dict[
                                "state"] = CHANNEL_STATE.DOWNLOADING.value

                channels_list.append(channel_dict)
        response_dict = {
            "results": channels_list,
            "first": sanitized["first"],
            "last": sanitized["last"],
            "sort_by": sanitized["sort_by"],
            "sort_desc": int(sanitized["sort_desc"]),
        }
        if total is not None:
            response_dict.update({"total": total})
        return RESTResponse(response_dict)

    @docs(
        tags=['Metadata'],
        summary=
        'Get a list of the channel\'s contents (torrents/channels/etc.).',
        responses={
            200: {
                'schema':
                schema(
                    GetChannelContentsResponse={
                        'results': [Dict()],
                        'first': Integer(),
                        'last': Integer(),
                        'sort_by': String(),
                        'sort_desc': Integer(),
                        'total': Integer(),
                    })
            }
        },
    )
    async def get_channel_contents(self, request):
        sanitized = self.sanitize_parameters(request.query)
        include_total = request.query.get('include_total', '')
        channel_pk, channel_id = self.get_channel_from_request(request)
        sanitized.update({"channel_pk": channel_pk, "origin_id": channel_id})
        remote = sanitized.pop("remote", None)

        total = None

        remote_failed = False
        if remote:
            try:
                contents_list = await self.session.gigachannel_community.remote_select_channel_contents(
                    **sanitized)
            except (RequestTimeoutException, NoChannelSourcesException,
                    CancelledError):
                remote_failed = True

        if not remote or remote_failed:
            with db_session:
                contents = self.session.mds.get_entries(**sanitized)
                contents_list = [c.to_simple_dict() for c in contents]
                total = self.session.mds.get_total_count(
                    **sanitized) if include_total else None
        self.add_download_progress_to_metadata_list(contents_list)
        response_dict = {
            "results": contents_list,
            "first": sanitized['first'],
            "last": sanitized['last'],
            "sort_by": sanitized['sort_by'],
            "sort_desc": int(sanitized['sort_desc']),
        }
        if total is not None:
            response_dict.update({"total": total})

        return RESTResponse(response_dict)

    async def get_channel_description(self, request):
        channel_pk, channel_id = self.get_channel_from_request(request)
        with db_session:
            channel_description = self.session.mds.ChannelDescription.select(
                lambda g: g.public_key == channel_pk and g.origin_id ==
                channel_id).first()

        response_dict = loads(channel_description.json_text) if (
            channel_description is not None) else {}
        return RESTResponse(response_dict)

    async def put_channel_description(self, request):
        channel_pk, channel_id = self.get_channel_from_request(request)
        request_parsed = await request.json()
        updated_json_text = dumps(
            {"description_text": request_parsed["description_text"]})
        with db_session:
            channel_description = self.session.mds.ChannelDescription.select(
                lambda g: g.public_key == channel_pk and g.origin_id ==
                channel_id).first()
            if channel_description is not None:
                channel_description.update_properties(
                    {"json_text": updated_json_text})
            else:
                channel_description = self.session.mds.ChannelDescription(
                    public_key=channel_pk,
                    origin_id=channel_id,
                    json_text=updated_json_text,
                    status=NEW)
        return RESTResponse(loads(channel_description.json_text))

    async def get_channel_thumbnail(self, request):
        channel_pk, channel_id = self.get_channel_from_request(request)
        with db_session:
            obj = self.session.mds.ChannelThumbnail.select(
                lambda g: g.public_key == channel_pk and g.origin_id ==
                channel_id).first()
        return web.Response(
            body=obj.binary_data,
            content_type=obj.data_type) if obj else web.Response(status=400)

    async def put_channel_thumbnail(self, request):
        content_type = request.headers["Content-Type"]
        post_body = await request.read()
        channel_pk, channel_id = self.get_channel_from_request(request)
        obj_properties = {"binary_data": post_body, "data_type": content_type}
        with db_session:
            obj = self.session.mds.ChannelThumbnail.select(
                lambda g: g.public_key == channel_pk and g.origin_id ==
                channel_id, ).first()
            if obj is not None:
                obj.update_properties(obj_properties)
            else:
                self.session.mds.ChannelThumbnail(public_key=channel_pk,
                                                  origin_id=channel_id,
                                                  status=NEW,
                                                  **obj_properties)
        return web.Response(status=201)

    @docs(
        tags=['Metadata'],
        summary='Create a copy of an entry/entries from another channel.',
        parameters=[{
            'in': 'body',
            'name': 'entries',
            'description': 'List of entries to copy',
            'example': [{
                'public_key': '1234567890',
                'id': 123
            }],
            'required': True,
        }],
        responses={
            200: {
                'description': 'Returns a list of copied content'
            },
            HTTP_NOT_FOUND: {
                'schema': HandledErrorSchema,
                'example': {
                    "error": "Target channel not found"
                }
            },
            HTTP_BAD_REQUEST: {
                'schema': HandledErrorSchema,
                'example': {
                    "error": "Source entry not found"
                }
            },
        },
    )
    async def copy_channel(self, request):
        with db_session:
            channel_pk, channel_id = self.get_channel_from_request(request)
            personal_root = channel_id == 0 and channel_pk == self.session.mds.my_key.pub(
            ).key_to_bin()[10:]
            # TODO: better error handling
            target_collection = self.session.mds.CollectionNode.get(
                public_key=database_blob(channel_pk), id_=channel_id)
            try:
                request_parsed = await request.json()
            except (ContentTypeError, ValueError):
                return RESTResponse({"error": "Bad JSON"},
                                    status=HTTP_BAD_REQUEST)

            if not target_collection and not personal_root:
                return RESTResponse({"error": "Target channel not found"},
                                    status=HTTP_NOT_FOUND)
            results_list = []
            for entry in request_parsed:
                public_key, id_ = database_blob(unhexlify(
                    entry["public_key"])), entry["id"]
                source = self.session.mds.ChannelNode.get(
                    public_key=public_key, id_=id_)
                if not source:
                    return RESTResponse({"error": "Source entry not found"},
                                        status=HTTP_BAD_REQUEST)
                # We must upgrade Collections to Channels when moving them to root channel, and, vice-versa,
                # downgrade Channels to Collections when moving them into existing channels
                if isinstance(source, self.session.mds.CollectionNode):
                    src_dict = source.to_dict()
                    if channel_id == 0:
                        rslt = self.session.mds.ChannelMetadata.create_channel(
                            title=source.title)
                    else:
                        dst_dict = {'origin_id': channel_id, "status": NEW}
                        for k in self.session.mds.CollectionNode.nonpersonal_attributes:
                            dst_dict[k] = src_dict[k]
                        dst_dict.pop("metadata_type")
                        rslt = self.session.mds.CollectionNode(**dst_dict)
                    for child in source.actual_contents:
                        child.make_copy(rslt.id_)
                else:
                    rslt = source.make_copy(channel_id)
                results_list.append(rslt.to_simple_dict())
            return RESTResponse(results_list)

    @docs(
        tags=['Metadata'],
        summary='Create a new channel entry in the given channel.',
        responses={
            200: {
                'description': 'Returns the newly created channel',
                'schema': schema(CreateChannelResponse={'results': [Dict()]}),
            }
        },
    )
    async def create_channel(self, request):
        with db_session:
            _, channel_id = self.get_channel_from_request(request)
            request_parsed = await request.json()
            channel_name = request_parsed.get("name", "New channel")
            md = self.session.mds.ChannelMetadata.create_channel(
                channel_name, origin_id=channel_id)
            return RESTResponse({"results": [md.to_simple_dict()]})

    @docs(
        tags=['Metadata'],
        summary='Create a new collection entry in the given channel.',
        responses={
            200: {
                'description': 'Returns the newly created collection',
                'schema':
                schema(CreateCollectionResponse={'results': [Dict()]}),
            }
        },
    )
    async def create_collection(self, request):
        with db_session:
            _, channel_id = self.get_channel_from_request(request)
            request_parsed = await request.json()
            collection_name = request_parsed.get("name", "New collection")
            md = self.session.mds.CollectionNode(origin_id=channel_id,
                                                 title=collection_name,
                                                 status=NEW)
            return RESTResponse({"results": [md.to_simple_dict()]})

    @docs(
        tags=['Metadata'],
        summary='Add a torrent file to your own channel.',
        responses={
            200: {
                'schema':
                schema(
                    AddTorrentToChannelResponse={
                        'added': (
                            Integer,
                            'Number of torrent that were added to the channel')
                    })
            },
            HTTP_NOT_FOUND: {
                'schema': HandledErrorSchema,
                'example': {
                    "error": "Unknown channel"
                }
            },
            HTTP_BAD_REQUEST: {
                'schema': HandledErrorSchema,
                'example': {
                    "error": "unknown uri type"
                }
            },
        },
    )
    @json_schema(
        schema(
            AddTorrentToChannelRequest={
                'torrent': (String, 'Base64-encoded torrent file'),
                'uri': (String, 'Add a torrent from a magnet link or URL'),
                'torrents_dir': (
                    String, 'Add all .torrent files from a chosen directory'),
                'recursive':
                (Boolean,
                 'Toggle recursive scanning of the chosen directory for .torrent files'
                 ),
                'description': (String, 'Description for the torrent'),
            }))
    async def add_torrent_to_channel(self, request):
        channel_pk, channel_id = self.get_channel_from_request(request)
        with db_session:
            channel = self.session.mds.CollectionNode.get(
                public_key=database_blob(channel_pk), id_=channel_id)
        if not channel:
            return RESTResponse({"error": "Unknown channel"},
                                status=HTTP_NOT_FOUND)

        parameters = await request.json()

        extra_info = {}
        if parameters.get('description', None):
            extra_info = {'description': parameters['description']}

        # First, check whether we did upload a magnet link or URL
        if parameters.get('uri', None):
            uri = parameters['uri']
            if uri.startswith("http:") or uri.startswith("https:"):
                async with ClientSession() as session:
                    response = await session.get(uri)
                    data = await response.read()
                tdef = TorrentDef.load_from_memory(data)
            elif uri.startswith("magnet:"):
                _, xt, _ = parse_magnetlink(uri)
                if (xt and is_infohash(codecs.encode(xt, 'hex')) and
                    (self.session.mds.torrent_exists_in_personal_channel(xt)
                     or channel.copy_torrent_from_infohash(xt))):
                    return RESTResponse({"added": 1})

                meta_info = await self.session.dlmgr.get_metainfo(xt,
                                                                  timeout=30,
                                                                  url=uri)
                if not meta_info:
                    raise RuntimeError("Metainfo timeout")
                tdef = TorrentDef.load_from_dict(meta_info)
            else:
                return RESTResponse({"error": "unknown uri type"},
                                    status=HTTP_BAD_REQUEST)

            added = 0
            if tdef:
                channel.add_torrent_to_channel(tdef, extra_info)
                added = 1
            return RESTResponse({"added": added})

        torrents_dir = None
        if parameters.get('torrents_dir', None):
            torrents_dir = parameters['torrents_dir']
            if not path_util.isabs(torrents_dir):
                return RESTResponse(
                    {"error": "the torrents_dir should point to a directory"},
                    status=HTTP_BAD_REQUEST)

        recursive = False
        if parameters.get('recursive'):
            recursive = parameters['recursive']
            if not torrents_dir:
                return RESTResponse(
                    {
                        "error":
                        "the torrents_dir parameter should be provided when the recursive parameter is set"
                    },
                    status=HTTP_BAD_REQUEST,
                )

        if torrents_dir:
            torrents_list, errors_list = channel.add_torrents_from_dir(
                torrents_dir, recursive)
            return RESTResponse({
                "added": len(torrents_list),
                "errors": errors_list
            })

        if not parameters.get('torrent', None):
            return RESTResponse({"error": "torrent parameter missing"},
                                status=HTTP_BAD_REQUEST)

        # Try to parse the torrent data
        # Any errors will be handled by the error_middleware
        torrent = base64.b64decode(parameters['torrent'])
        torrent_def = TorrentDef.load_from_memory(torrent)
        channel.add_torrent_to_channel(torrent_def, extra_info)
        return RESTResponse({"added": 1})

    @docs(
        tags=['Metadata'],
        summary='Commit a channel.',
        responses={
            200: {
                'schema': schema(CommitResponse={'success': Boolean()})
            }
        },
    )
    async def post_commit(self, request):
        channel_pk, channel_id = self.get_channel_from_request(request)
        with db_session:
            if channel_id == 0:
                for t in self.session.mds.CollectionNode.commit_all_channels():
                    self.session.gigachannel_manager.updated_my_channel(
                        TorrentDef.load_from_dict(t))
            else:
                coll = self.session.mds.CollectionNode.get(
                    public_key=database_blob(channel_pk), id_=channel_id)
                if not coll:
                    return RESTResponse({"success": False},
                                        status=HTTP_NOT_FOUND)
                torrent_dict = coll.commit_channel_torrent()
                if torrent_dict:
                    self.session.gigachannel_manager.updated_my_channel(
                        TorrentDef.load_from_dict(torrent_dict))

        return RESTResponse({"success": True})

    @docs(
        tags=['Metadata'],
        summary='Check if a channel has uncommitted changes.',
        responses={
            200: {
                'schema': schema(IsChannelDirtyResponse={'dirty': Boolean()})
            }
        },
    )
    async def is_channel_dirty(self, request):
        channel_pk, _ = self.get_channel_from_request(request)
        with db_session:
            dirty = self.session.mds.MetadataNode.exists(
                lambda g: g.public_key == database_blob(
                    channel_pk) and g.status in DIRTY_STATUSES)
            return RESTResponse({"dirty": dirty})

    @docs(
        tags=['Metadata'],
        summary=
        'Get the list of most popular torrents. Functions as a pseudo-channel.',
        responses={
            200: {
                'schema':
                schema(GetChannelContentsResponse={
                    'results': [Dict()],
                    'first': Integer(),
                    'last': Integer(),
                })
            }
        },
    )
    async def get_popular_torrents_channel(self, request):
        sanitized = self.sanitize_parameters(request.query)
        sanitized["metadata_type"] = REGULAR_TORRENT
        sanitized["popular"] = True

        with db_session:
            contents = self.session.mds.get_entries(**sanitized)
            contents_list = [c.to_simple_dict() for c in contents]
        self.add_download_progress_to_metadata_list(contents_list)

        response_dict = {
            "results": contents_list,
            "first": sanitized['first'],
            "last": sanitized['last'],
        }

        return RESTResponse(response_dict)
Beispiel #22
0
class CreateTorrentEndpoint(RESTEndpoint):
    """
    Create a torrent file from local files.
    See: http://www.bittorrent.org/beps/bep_0012.html
    """
    def setup_routes(self):
        self.app.add_routes([web.post('', self.create_torrent)])

    @docs(
        tags=["Libtorrent"],
        summary=
        "Create a torrent from local files and return it in base64 encoding.",
        parameters=[{
            'in': 'query',
            'name': 'download',
            'description':
            'Flag indicating whether or not to start downloading',
            'type': 'boolean',
            'required': False
        }],
        responses={
            200: {
                "schema":
                schema(CreateTorrentResponse={
                    'torrent': 'base64 encoded torrent file'
                }),
                "examples": {
                    'Success': {
                        "success": True
                    }
                }
            },
            HTTP_BAD_REQUEST: {
                "schema": HandledErrorSchema,
                "examples": {
                    "Error": {
                        "error": "files parameter missing"
                    }
                }
            }
        })
    @json_schema(
        schema(
            CreateTorrentRequest={
                'files': [String],
                'name': String,
                'description': String,
                'trackers': [String],
                'export_dir': String
            }))
    async def create_torrent(self, request):
        parameters = await request.json()
        params = {}

        if 'files' in parameters and parameters['files']:
            file_path_list = [
                ensure_unicode(f, 'utf-8') for f in parameters['files']
            ]
        else:
            return RESTResponse({"error": "files parameter missing"},
                                status=HTTP_BAD_REQUEST)

        if 'description' in parameters and parameters['description']:
            params['comment'] = parameters['description']

        if 'trackers' in parameters and parameters['trackers']:
            tracker_url_list = parameters['trackers']
            params['announce'] = tracker_url_list[0]
            params['announce-list'] = tracker_url_list

        name = 'unknown'
        if 'name' in parameters and parameters['name']:
            name = parameters['name']
            params['name'] = name

        export_dir = None
        if 'export_dir' in parameters and parameters['export_dir']:
            export_dir = Path(parameters['export_dir'])

        from tribler_core.version import version_id
        params['created by'] = '%s version: %s' % ('Tribler', version_id)

        params['nodes'] = False
        params['httpseeds'] = False
        params['encoding'] = False
        params['piece length'] = 0  # auto

        try:
            result = await self.session.dlmgr.create_torrent_file(
                file_path_list, recursive_bytes(params))
        except (IOError, UnicodeDecodeError, RuntimeError) as e:
            self._logger.exception(e)
            return return_handled_exception(request, e)

        metainfo_dict = bdecode_compat(result['metainfo'])

        if export_dir and export_dir.exists():
            save_path = export_dir / ("%s.torrent" % name)
            with open(save_path, "wb") as fd:
                fd.write(result['metainfo'])

        # Download this torrent if specified
        if 'download' in request.query and request.query[
                'download'] and request.query['download'] == "1":
            download_config = DownloadConfig()
            download_config.set_dest_dir(result['base_path'] if len(
                file_path_list) == 1 else result['base_dir'])
            try:
                self.session.dlmgr.start_download(
                    tdef=TorrentDef(metainfo_dict), config=download_config)
            except DuplicateDownloadException:
                self._logger.warning(
                    "The created torrent is already being downloaded.")

        return RESTResponse(
            json.dumps({
                "torrent":
                base64.b64encode(result['metainfo']).decode('utf-8')
            }))
Beispiel #23
0
class SearchEndpoint(MetadataEndpointBase):
    """
    This endpoint is responsible for searching in channels and torrents present in the local Tribler database.
    """
    def setup_routes(self):
        self.app.add_routes([
            web.get('', self.search),
            web.get('/completions', self.completions)
        ])

    @staticmethod
    def get_uuid(parameters):
        return parameters['uuid'] if 'uuid' in parameters else None

    @classmethod
    def sanitize_parameters(cls, parameters):
        sanitized = super().sanitize_parameters(parameters)
        if "max_rowid" in parameters:
            sanitized["max_rowid"] = int(parameters["max_rowid"])
        return sanitized

    @docs(
        tags=['Metadata'],
        summary="Perform a search for a given query.",
        responses={
            200: {
                'schema':
                schema(
                    SearchResponse={
                        'torrents': [
                            schema(
                                Torrent={
                                    'commit_status': Integer,
                                    'num_leechers': Integer,
                                    'date': Integer,
                                    'relevance_score': Integer,
                                    'id': Integer,
                                    'size': Integer,
                                    'category': String,
                                    'public_key': String,
                                    'name': String,
                                    'last_tracker_check': Integer,
                                    'infohash': String,
                                    'num_seeders': Integer,
                                    'type': String,
                                })
                        ],
                        'chant_dirty':
                        Boolean,
                    })
            }
        },
    )
    @querystring_schema(MetadataParameters)
    async def search(self, request):
        try:
            sanitized = self.sanitize_parameters(request.query)
        except (ValueError, KeyError):
            return RESTResponse(
                {"error": "Error processing request parameters"},
                status=HTTP_BAD_REQUEST)

        if not sanitized["txt_filter"]:
            return RESTResponse({"error": "Filter parameter missing"},
                                status=HTTP_BAD_REQUEST)

        include_total = request.query.get('include_total', '')

        mds: MetadataStore = self.session.mds

        def search_db():
            with db_session:
                pony_query = mds.get_entries(**sanitized)
                search_results = [r.to_simple_dict() for r in pony_query]
                if include_total:
                    total = mds.get_total_count(**sanitized)
                    max_rowid = mds.get_max_rowid()
                else:
                    total = max_rowid = None
            return search_results, total, max_rowid

        try:
            search_results, total, max_rowid = await mds.run_threaded(search_db
                                                                      )
        except Exception as e:  # pylint: disable=broad-except;  # pragma: no cover
            self._logger.error("Error while performing DB search: %s: %s",
                               type(e).__name__, e)
            return RESTResponse(status=HTTP_BAD_REQUEST)

        response_dict = {
            "results": search_results,
            "first": sanitized["first"],
            "last": sanitized["last"],
            "sort_by": sanitized["sort_by"],
            "sort_desc": sanitized["sort_desc"],
        }
        if include_total:
            response_dict.update(total=total, max_rowid=max_rowid)

        return RESTResponse(response_dict)

    @docs(
        tags=['Metadata'],
        summary="Return auto-completion suggestions for a given query.",
        parameters=[{
            'in': 'query',
            'name': 'q',
            'description': 'Search query',
            'type': 'string',
            'required': True
        }],
        responses={
            200: {
                'schema': schema(CompletionsResponse={
                    'completions': [String],
                }),
                'examples': {
                    'completions': ['pioneer one', 'pioneer movie']
                },
            }
        },
    )
    async def completions(self, request):
        args = request.query
        if 'q' not in args:
            return RESTResponse({"error": "query parameter missing"},
                                status=HTTP_BAD_REQUEST)

        keywords = args['q'].strip().lower()
        # TODO: add XXX filtering for completion terms
        results = self.session.mds.get_auto_complete_terms(keywords,
                                                           max_terms=5)
        return RESTResponse({"completions": results})
Beispiel #24
0
class TagsEndpoint(RESTEndpoint):
    """
    Top-level endpoint for tags.
    """

    def __init__(self, db: TagDatabase, community: TagCommunity):
        super().__init__()
        self.db: TagDatabase = db
        self.community: TagCommunity = community

    @staticmethod
    def validate_infohash(infohash: str) -> Tuple[bool, Optional[RESTResponse]]:
        try:
            infohash = unhexlify(infohash)
            if len(infohash) != 20:
                return False, RESTResponse({"error": "Invalid infohash"}, status=HTTP_BAD_REQUEST)
        except binascii.Error:
            return False, RESTResponse({"error": "Invalid infohash"}, status=HTTP_BAD_REQUEST)

        return True, None

    def setup_routes(self):
        self.app.add_routes(
            [
                web.patch('/{infohash}', self.update_tags_entries),
                web.get('/{infohash}/suggestions', self.get_suggestions),
            ]
        )

    @docs(
        tags=["General"],
        summary="Update a particular torrent with tags.",
        responses={
            200: {
                "schema": schema(UpdateTagsResponse={'success': Boolean()})
            },
            HTTP_BAD_REQUEST: {
                "schema": HandledErrorSchema, 'example': {"error": "Invalid tag length"}},
        },
        description="This endpoint updates a particular torrent with the provided tags."
    )
    async def update_tags_entries(self, request):
        params = await request.json()
        infohash = request.match_info["infohash"]
        ih_valid, error_response = TagsEndpoint.validate_infohash(infohash)
        if not ih_valid:
            return error_response

        tags = {tag.lower() for tag in params["tags"]}

        # Validate whether the size of the tag is within the allowed range
        for tag in tags:
            if len(tag) < MIN_TAG_LENGTH or len(tag) > MAX_TAG_LENGTH:
                return RESTResponse({"error": "Invalid tag length"}, status=HTTP_BAD_REQUEST)

        self.modify_tags(unhexlify(infohash), tags)

        return RESTResponse({"success": True})

    @db_session
    def modify_tags(self, infohash: bytes, new_tags: Set[str]):
        """
        Modify the tags of a particular content item.
        """
        if not self.community:
            return

        # First, get the current tags and compute the diff between the old and new tags
        old_tags = set(self.db.get_tags(infohash))
        added_tags = new_tags - old_tags
        removed_tags = old_tags - new_tags

        # Create individual tag operations for the added/removed tags
        public_key = self.community.tags_key.pub().key_to_bin()
        for tag in added_tags.union(removed_tags):
            type_of_operation = TagOperationEnum.ADD if tag in added_tags else TagOperationEnum.REMOVE
            operation = TagOperation(infohash=infohash, operation=type_of_operation, clock=0,
                                     creator_public_key=public_key, tag=tag)
            operation.clock = self.db.get_clock(operation) + 1
            signature = self.community.sign(operation)
            self.db.add_tag_operation(operation, signature, is_local_peer=True)

    @docs(
        tags=["General"],
        summary="Get tag suggestions for a torrent with a particular infohash.",
        responses={
            200: {
                "schema": schema(SuggestedTagsResponse={'suggestions': List(String)})
            },
            HTTP_BAD_REQUEST: {
                "schema": HandledErrorSchema, 'example': {"error": "Invalid infohash"}},
        },
        description="This endpoint updates a particular torrent with the provided tags."
    )
    async def get_suggestions(self, request):
        """
        Get suggestions for a particular tag.
        """
        infohash = request.match_info["infohash"]
        ih_valid, error_response = TagsEndpoint.validate_infohash(infohash)
        if not ih_valid:
            return error_response

        with db_session:
            suggestions = self.db.get_suggestions(unhexlify(infohash))
            return RESTResponse({"suggestions": suggestions})
Beispiel #25
0
class TrustViewEndpoint(RESTEndpoint):
    def __init__(self, session):
        super(TrustViewEndpoint, self).__init__(session)
        self.logger = logging.getLogger(self.__class__.__name__)

        self.trustchain_db = None
        self.trust_graph = None
        self.public_key = None

    def setup_routes(self):
        self.app.add_routes([web.get('', self.get_view)])

    def initialize_graph(self):
        if self.session.trustchain_community:
            self.trustchain_db = self.session.trustchain_community.persistence
            self.public_key = self.session.trustchain_community.my_peer.public_key.key_to_bin(
            )
            self.trust_graph = TrustGraph(hexlify(self.public_key))

            # Start bootstrap download if not already done
            if not self.session.bootstrap:
                self.session.start_bootstrap_download()

    @docs(tags=["TrustChain"],
          summary="Return the trust graph.",
          parameters=[{
              'in': 'query',
              'name': 'depth',
              'description': 'Depth level (0 = all depths)',
              'enum': [0, 1, 2, 3, 4],
              'type': 'integer',
              'required': False
          }],
          responses={
              200: {
                  "schema":
                  schema(
                      GraphResponse={
                          'root_public_key':
                          String,
                          'graph':
                          schema(
                              Graph={
                                  'node':
                                  schema(
                                      Node={
                                          'id': Integer,
                                          'key': String,
                                          'pos': [Float],
                                          'sequence_number': Integer,
                                          'total_up': Integer,
                                          'total_down': Integer
                                      }),
                                  'edge':
                                  List(List(Integer))
                              }),
                          'bootstrap':
                          schema(
                              Bootstrap={
                                  'download': Integer,
                                  'upload': Integer,
                                  'progress': Float
                              }),
                          'num_tx':
                          Integer,
                          'depth':
                          Integer
                      })
              }
          })
    async def get_view(self, request):
        if not self.trust_graph:
            self.initialize_graph()

        def get_bandwidth_blocks(public_key, limit=5):
            return self.trustchain_db.get_latest_blocks(
                public_key, limit=limit, block_types=[b'tribler_bandwidth'])

        def get_friends(public_key, limit=5):
            return self.trustchain_db.get_connected_users(public_key,
                                                          limit=limit)

        depth = 0
        if 'depth' in request.query:
            depth = int(request.query['depth'])

        # If depth is zero or not provided then fetch all depth levels
        fetch_all = depth == 0

        try:
            if fetch_all:
                self.trust_graph.reset(hexlify(self.public_key))
            if fetch_all or depth == 1:
                self.trust_graph.add_blocks(
                    get_bandwidth_blocks(self.public_key, limit=100))
            if fetch_all or depth == 2:
                for friend in get_friends(self.public_key):
                    self.trust_graph.add_blocks(
                        get_bandwidth_blocks(unhexlify(friend['public_key']),
                                             limit=10))
            if fetch_all or depth == 3:
                for friend in get_friends(self.public_key):
                    self.trust_graph.add_blocks(
                        get_bandwidth_blocks(unhexlify(friend['public_key'])))
                    for fof in get_friends(unhexlify(friend['public_key'])):
                        self.trust_graph.add_blocks(
                            get_bandwidth_blocks(unhexlify(fof['public_key'])))
            if fetch_all or depth == 4:
                for user_block in self.trustchain_db.get_users():
                    self.trust_graph.add_blocks(
                        get_bandwidth_blocks(
                            unhexlify(user_block['public_key'])))
        except TrustGraphException as tgex:
            self.logger.warning(tgex)

        graph_data = self.trust_graph.compute_node_graph()

        return RESTResponse({
            'root_public_key': hexlify(self.public_key),
            'graph': graph_data,
            'bootstrap': self.get_bootstrap_info(),
            'num_tx': len(graph_data['edge']),
            'depth': depth,
        })

    def get_bootstrap_info(self):
        if self.session.bootstrap.download and self.session.bootstrap.download.get_state(
        ):
            state = self.session.bootstrap.download.get_state()
            return {
                'download': state.get_total_transferred(DOWNLOAD),
                'upload': state.get_total_transferred(UPLOAD),
                'progress': state.get_progress(),
            }
        return {'download': 0, 'upload': 0, 'progress': 0}
Beispiel #26
0
class DebugEndpoint(RESTEndpoint):
    """
    This endpoint is responsible for handing requests regarding debug information in Tribler.
    """
    def __init__(self,
                 state_dir: Path,
                 log_dir: Path,
                 tunnel_community: TunnelCommunity = None,
                 resource_monitor: ResourceMonitor = None):
        super().__init__()
        self.state_dir = state_dir
        self.log_dir = log_dir
        self.tunnel_community = tunnel_community
        self.resource_monitor = resource_monitor

    def setup_routes(self):
        self.app.add_routes([
            web.get('/circuits/slots', self.get_circuit_slots),
            web.get('/open_files', self.get_open_files),
            web.get('/open_sockets', self.get_open_sockets),
            web.get('/threads', self.get_threads),
            web.get('/cpu/history', self.get_cpu_history),
            web.get('/memory/history', self.get_memory_history),
            web.get('/log', self.get_log),
            web.get('/profiler', self.get_profiler_state),
            web.put('/profiler', self.start_profiler),
            web.delete('/profiler', self.stop_profiler)
        ])
        if HAS_MELIAE:
            self.app.add_routes(
                [web.get('/memory/dump', self.get_memory_dump)])

    @docs(tags=['Debug'],
          summary="Return information about the slots in the tunnel overlay.",
          responses={
              200: {
                  'schema':
                  schema(
                      CircuitSlotsResponse={
                          'slots': [
                              schema(CircuitSlot={
                                  'random': Integer,
                                  'competing': Integer
                              })
                          ]
                      })
              }
          })
    async def get_circuit_slots(self, request):
        return RESTResponse({
            "slots": {
                "random": self.tunnel_community.random_slots,
                "competing": self.tunnel_community.competing_slots
            }
        })

    @docs(tags=['Debug'],
          summary="Return information about files opened by Tribler.",
          responses={
              200: {
                  'schema':
                  schema(
                      OpenFilesResponse={
                          'open_files':
                          [schema(OpenFile={
                              'path': String,
                              'fd': Integer
                          })]
                      })
              }
          })
    async def get_open_files(self, request):
        my_process = psutil.Process()
        return RESTResponse({
            "open_files": [{
                "path": open_file.path,
                "fd": open_file.fd
            } for open_file in my_process.open_files()]
        })

    @docs(tags=['Debug'],
          summary="Return information about open sockets.",
          responses={
              200: {
                  'schema':
                  schema(
                      OpenSocketsResponse={
                          'open_sockets': [
                              schema(
                                  OpenSocket={
                                      'family': Integer,
                                      'status': String,
                                      'laddr': String,
                                      'raddr': String,
                                      'type': Integer
                                  })
                          ]
                      })
              }
          })
    async def get_open_sockets(self, request):
        my_process = psutil.Process()
        sockets = []
        for open_socket in my_process.connections():
            sockets.append({
                "family":
                open_socket.family,
                "status":
                open_socket.status,
                "laddr":
                ("%s:%d" % open_socket.laddr) if open_socket.laddr else "-",
                "raddr":
                ("%s:%d" % open_socket.raddr) if open_socket.raddr else "-",
                "type":
                open_socket.type
            })
        return RESTResponse({"open_sockets": sockets})

    @docs(tags=['Debug'],
          summary="Return information about running threads.",
          responses={
              200: {
                  'schema':
                  schema(
                      ThreadsResponse={
                          'threads': [
                              schema(
                                  Thread={
                                      'thread_id': Integer,
                                      'thread_name': String,
                                      'frames': [String]
                                  })
                          ]
                      })
              }
          })
    async def get_threads(self, request):
        watchdog = WatchDog()
        return RESTResponse({"threads": watchdog.get_threads_info()})

    @docs(tags=['Debug'],
          summary="Return information about CPU usage history.",
          responses={
              200: {
                  'schema':
                  schema(
                      CPUHistoryResponse={
                          'cpu_history':
                          [schema(CPUHistory={
                              'time': Integer,
                              'cpu': Float
                          })]
                      })
              }
          })
    async def get_cpu_history(self, request):
        history = self.resource_monitor.get_cpu_history_dict(
        ) if self.resource_monitor else {}
        return RESTResponse({"cpu_history": history})

    @docs(tags=['Debug'],
          summary="Return information about memory usage history.",
          responses={
              200: {
                  'schema':
                  schema(
                      MemoryHistoryResponse={
                          'memory_history': [
                              schema(MemoryHistory={
                                  'time': Integer,
                                  'mem': Integer
                              })
                          ]
                      })
              }
          })
    async def get_memory_history(self, request):
        history = self.resource_monitor.get_memory_history_dict(
        ) if self.resource_monitor else {}
        return RESTResponse({"memory_history": history})

    @docs(tags=['Debug'],
          summary="Return a Meliae-compatible dump of the memory contents.",
          responses={
              200: {
                  'description': 'The content of the memory dump file'
              }
          })
    async def get_memory_dump(self, request):
        if sys.platform == "win32":
            # On Windows meliae (especially older versions) segfault on writing to file
            dump_buffer = MemoryDumpBuffer()
            try:
                scanner.dump_all_objects(dump_buffer)
            except OverflowError as e:
                # https://bugs.launchpad.net/meliae/+bug/569947
                logging.error(
                    "meliae dump failed (your version may be too old): %s",
                    str(e))
            content = dump_buffer.getvalue()
            dump_buffer.close()
        else:
            # On other platforms, simply writing to file is much faster
            dump_file_path = self.state_dir / 'memory_dump.json'
            scanner.dump_all_objects(dump_file_path)
            with open(dump_file_path) as dump_file:
                content = dump_file.read()
        date_str = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
        return RESTResponse(
            content,
            headers={
                'Content-Type', 'application/json', 'Content-Disposition',
                'attachment; filename=tribler_memory_dump_%s.json' % date_str
            })

    @docs(
        tags=['Debug'],
        summary="Return content of core or gui log file & max_lines requested.",
        parameters=[{
            'in': 'query',
            'name': 'process',
            'description': 'Specifies which log to return',
            'enum': ['core', 'gui'],
            'type': 'string',
            'required': False
        }, {
            'in': 'query',
            'name': 'max_lines',
            'description':
            'Maximum number of lines to return from the log file',
            'type': 'integer',
            'required': False
        }],
        responses={
            200: {
                'schema':
                schema(LogFileResponse={
                    'max_lines': Integer,
                    'content': String
                })
            }
        })
    async def get_log(self, request):
        # First, flush all the logs to make sure it is written to file
        for handler in logging.getLogger().handlers:
            handler.flush()

        # Default response
        response = {'content': '', 'max_lines': 0}

        # Get the location of log file
        param_process = request.query.get('process', 'core')
        log_name = f'tribler-{param_process}-info.log'
        log_file_name = self.log_dir / log_name

        # If the log file is not present in the versioned state directory, try root state directory location
        if not log_file_name.exists():
            log_file_name = get_root_state_directory() / log_name

        # If the log file is still not found, maybe it is not created yet, then return the default response
        if not log_file_name.exists():
            return RESTResponse(response)

        # If the log file exists and return last requested 'max_lines' of log
        try:
            max_lines = int(request.query['max_lines'])
            with log_file_name.open(mode='r') as log_file:
                response['content'] = self.tail(log_file, max_lines)
            response['max_lines'] = max_lines
        except ValueError:
            with log_file_name.open(mode='r') as log_file:
                response['content'] = self.tail(log_file,
                                                100)  # default 100 lines
            response['max_lines'] = 0

        return RESTResponse(response)

    def tail(self, file_handler, lines=1):
        """Tail a file and get X lines from the end"""
        # place holder for the lines found
        lines_found = []
        byte_buffer = 1024

        # block counter will be multiplied by buffer
        # to get the block size from the end
        block_counter = -1

        # loop until we find X lines
        while len(lines_found) < lines:
            try:
                file_handler.seek(block_counter * byte_buffer, os.SEEK_END)
            except OSError:  # either file is too small, or too many lines requested
                file_handler.seek(0)
                lines_found = file_handler.readlines()
                break

            lines_found = file_handler.readlines()

            # we found enough lines, get out
            if len(lines_found) > lines:
                break

            # decrement the block counter to get the
            # next X bytes
            block_counter -= 1

        return ''.join(lines_found[-lines:])

    @docs(
        tags=['Debug'],
        summary="Return information about the state of the profiler.",
        responses={
            200: {
                'schema':
                schema(
                    ProfilerStateResponse={
                        'state': (String,
                                  'State of the profiler (STARTED or STOPPED)')
                    })
            }
        })
    async def get_profiler_state(self, _):
        if self.resource_monitor is None:
            return RESTResponse(status=404)
        state = "STARTED" if self.resource_monitor.profiler.is_running(
        ) else "STOPPED"
        return RESTResponse({"state": state})

    @docs(tags=['Debug'],
          summary="Start the profiler.",
          responses={
              200: {
                  'schema': schema(StartProfilerResponse={'success': Boolean})
              }
          })
    async def start_profiler(self, _):
        self.resource_monitor.profiler.start()
        return RESTResponse({"success": True})

    @docs(tags=['Debug'],
          summary="Stop the profiler.",
          responses={
              200: {
                  'schema': schema(StopProfilerResponse={'success': Boolean})
              }
          })
    async def stop_profiler(self, _):
        file_path = self.resource_monitor.profiler.stop()
        return RESTResponse({"success": True, "profiler_file": str(file_path)})
Beispiel #27
0
class MetadataEndpoint(MetadataEndpointBase, UpdateEntryMixin):
    """
    This is the top-level endpoint class that serves other endpoints.

    # /metadata
    #          /channels
    #          /torrents
    #          /<public_key>
    """

    def setup_routes(self):
        self.app.add_routes(
            [
                web.patch('', self.update_channel_entries),
                web.delete('', self.delete_channel_entries),
                web.get('/torrents/{infohash}/health', self.get_torrent_health),
                web.patch(r'/{public_key:\w*}/{id:\w*}', self.update_channel_entry),
                web.get(r'/{public_key:\w*}/{id:\w*}', self.get_channel_entries),
            ]
        )

    @docs(
        tags=['Metadata'],
        summary='Update channel entries.',
        parameters=[
            {
                'in': 'body',
                'name': 'entries',
                'description': 'List of entries to update',
                'example': [{'public_key': '1234567890', 'id': 123, 'property_to_update': 'new_value'}],
                'required': True,
            }
        ],
        responses={
            200: {'description': 'Returns a list of updated entries'},
            HTTP_NOT_FOUND: {'schema': HandledErrorSchema},
            HTTP_BAD_REQUEST: {'schema': HandledErrorSchema},
        },
    )
    async def update_channel_entries(self, request):
        try:
            request_parsed = await request.json()
        except (ContentTypeError, ValueError):
            return RESTResponse({"error": "Bad JSON"}, status=HTTP_BAD_REQUEST)
        results_list = []
        for entry in request_parsed:
            public_key = database_blob(unhexlify(entry.pop("public_key")))
            id_ = entry.pop("id")
            error, result = self.update_entry(public_key, id_, entry)
            # TODO: handle the results for a list that contains some errors in a smarter way
            if error:
                return RESTResponse(result, status=error)
            results_list.append(result)
        return RESTResponse(results_list)

    @docs(
        tags=['Metadata'],
        summary='Delete channel entries.',
        parameters=[
            {
                'in': 'body',
                'name': 'entries',
                'description': 'List of entries to delete',
                'example': [{'public_key': '1234567890', 'id': 123}],
                'required': True,
            }
        ],
        responses={
            200: {'description': 'Returns a list of deleted entries'},
            HTTP_BAD_REQUEST: {'schema': HandledErrorSchema},
        },
    )
    async def delete_channel_entries(self, request):
        with db_session:
            request_parsed = await request.json()
            results_list = []
            for entry in request_parsed:
                public_key = database_blob(unhexlify(entry.pop("public_key")))
                id_ = entry.pop("id")
                entry = self.session.mds.ChannelNode.get(public_key=public_key, id_=id_)
                if not entry:
                    return RESTResponse({"error": "Entry %i not found" % id_}, status=HTTP_BAD_REQUEST)
                entry.delete()
                result = {"public_key": hexlify(public_key), "id": id_, "state": "Deleted"}
                results_list.append(result)
            return RESTResponse(results_list)

    @docs(
        tags=['Metadata'],
        summary='Update a single channel entry.',
        responses={
            200: {'description': 'The updated entry'},
            HTTP_NOT_FOUND: {'schema': HandledErrorSchema},
            HTTP_BAD_REQUEST: {'schema': HandledErrorSchema},
        },
    )
    async def update_channel_entry(self, request):
        # TODO: unify checks for parts of the path, i.e. proper hex for public key, etc.
        try:
            parameters = await request.json()
        except (ContentTypeError, ValueError):
            return RESTResponse({"error": "Bad JSON input data"}, status=HTTP_BAD_REQUEST)

        public_key = unhexlify(request.match_info['public_key'])
        id_ = request.match_info['id']
        error, result = self.update_entry(public_key, id_, parameters)
        return RESTResponse(result, status=error or 200)

    @docs(
        tags=['Metadata'],
        summary='Get channel entries.',
        responses={200: {'description': 'Returns a list of entries'}, HTTP_NOT_FOUND: {'schema': HandledErrorSchema}},
    )
    async def get_channel_entries(self, request):
        public_key = unhexlify(request.match_info['public_key'])
        id_ = request.match_info['id']
        with db_session:
            entry = self.session.mds.ChannelNode.get(public_key=database_blob(public_key), id_=id_)

            if entry:
                # TODO: handle costly attributes in a more graceful and generic way for all types of metadata
                entry_dict = entry.to_simple_dict(include_trackers=isinstance(entry, self.session.mds.TorrentMetadata))
            else:
                return RESTResponse({"error": "entry not found in database"}, status=HTTP_NOT_FOUND)

        return RESTResponse(entry_dict)

    @docs(
        tags=["Metadata"],
        summary="Fetch the swarm health of a specific torrent.",
        parameters=[
            {
                'in': 'path',
                'name': 'infohash',
                'description': 'Infohash of the download to remove',
                'type': 'string',
                'required': True,
            },
            {
                'in': 'query',
                'name': 'timeout',
                'description': 'Timeout to be used in the connections to the trackers',
                'type': 'integer',
                'default': 20,
                'required': False,
            },
            {
                'in': 'query',
                'name': 'refresh',
                'description': 'Whether or not to force a health recheck. Settings this to 0 means that the '
                'health of a torrent will not be checked again if it was recently checked.',
                'type': 'integer',
                'enum': [0, 1],
                'required': False,
            },
            {
                'in': 'query',
                'name': 'nowait',
                'description': 'Whether or not to return immediately. If enabled, results '
                'will be passed through to the events endpoint.',
                'type': 'integer',
                'enum': [0, 1],
                'required': False,
            },
        ],
        responses={
            200: {
                'schema': schema(
                    HealthCheckResponse={
                        'tracker': schema(
                            HealthCheck={'seeders': Integer, 'leechers': Integer, 'infohash': String, 'error': String}
                        )
                    }
                ),
                'examples': [
                    {
                        "health": {
                            "http://mytracker.com:80/announce": {
                                "seeders": 43,
                                "leechers": 20,
                                "infohash": "97d2d8f5d37e56cfaeaae151d55f05b077074779",
                            },
                            "http://nonexistingtracker.com:80/announce": {"error": "timeout"},
                        }
                    },
                    {'checking': 1},
                ],
            }
        },
    )
    async def get_torrent_health(self, request):
        timeout = request.query.get('timeout')
        if not timeout:
            timeout = TORRENT_CHECK_TIMEOUT
        elif timeout.isdigit():
            timeout = int(timeout)
        else:
            return RESTResponse({"error": f"Error processing timeout parameter '{timeout}'"}, status=HTTP_BAD_REQUEST)
        refresh = request.query.get('refresh', '0') == '1'
        nowait = request.query.get('nowait', '0') == '1'

        infohash = unhexlify(request.match_info['infohash'])
        result_future = self.session.torrent_checker.check_torrent_health(infohash, timeout=timeout, scrape_now=refresh)
        # Return immediately. Used by GUI to schedule health updates through the EventsEndpoint
        if nowait:
            return RESTResponse({'checking': '1'})

        # Errors will be handled by error_middleware
        result = await result_future
        return RESTResponse({'health': result})
Beispiel #28
0
class ChannelsEndpoint(ChannelsEndpointBase):
    def setup_routes(self):
        self.app.add_routes(
            [
                web.get('', self.get_channels),
                web.get(r'/{channel_pk:\w*}/{channel_id:\w*}', self.get_channel_contents),
                web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/copy', self.copy_channel),
                web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/channels', self.create_channel),
                web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/collections', self.create_collection),
                web.put(r'/{channel_pk:\w*}/{channel_id:\w*}/torrents', self.add_torrent_to_channel),
                web.post(r'/{channel_pk:\w*}/{channel_id:\w*}/commit', self.post_commit),
                web.get(r'/{channel_pk:\w*}/{channel_id:\w*}/commit', self.is_channel_dirty),
            ]
        )

    def get_channel_from_request(self, request):
        channel_pk = (
            self.session.mds.my_key.pub().key_to_bin()[10:]
            if request.match_info['channel_pk'] == 'mychannel'
            else unhexlify(request.match_info['channel_pk'])
        )
        channel_id = int(request.match_info['channel_id'])
        return channel_pk, channel_id

    @docs(
        tags=['Metadata'],
        summary='Get a list of all channels known to the system.',
        responses={
            200: {
                'schema': schema(
                    GetChannelsResponse={
                        'results': [ChannelSchema],
                        'first': Integer(),
                        'last': Integer(),
                        'sort_by': String(),
                        'sort_desc': Integer(),
                        'total': Integer(),
                    }
                )
            }
        },
    )
    # TODO: DRY it with SpecificChannel endpoint?
    async def get_channels(self, request):
        sanitized = self.sanitize_parameters(request.query)
        sanitized['subscribed'] = None if 'subscribed' not in request.query else bool(int(request.query['subscribed']))
        include_total = request.query.get('include_total', '')
        sanitized.update({"origin_id": 0})

        with db_session:
            channels = self.session.mds.ChannelMetadata.get_entries(**sanitized)
            total = self.session.mds.ChannelMetadata.get_total_count(**sanitized) if include_total else None
            channels_list = [channel.to_simple_dict() for channel in channels]
        response_dict = {
            "results": channels_list,
            "first": sanitized["first"],
            "last": sanitized["last"],
            "sort_by": sanitized["sort_by"],
            "sort_desc": int(sanitized["sort_desc"]),
        }
        if total is not None:
            response_dict.update({"total": total})
        return RESTResponse(response_dict)

    @docs(
        tags=['Metadata'],
        summary='Get a list of the channel\'s contents (torrents/channels/etc.).',
        responses={
            200: {
                'schema': schema(
                    GetChannelContentsResponse={
                        'results': [Dict()],
                        'first': Integer(),
                        'last': Integer(),
                        'sort_by': String(),
                        'sort_desc': Integer(),
                        'total': Integer(),
                    }
                )
            }
        },
    )
    async def get_channel_contents(self, request):
        sanitized = self.sanitize_parameters(request.query)
        include_total = request.query.get('include_total', '')
        channel_pk, channel_id = self.get_channel_from_request(request)
        sanitized.update({"channel_pk": channel_pk, "origin_id": channel_id})
        with db_session:
            contents = self.session.mds.MetadataNode.get_entries(**sanitized)
            contents_list = [c.to_simple_dict() for c in contents]
            total = self.session.mds.MetadataNode.get_total_count(**sanitized) if include_total else None
        response_dict = {
            "results": contents_list,
            "first": sanitized['first'],
            "last": sanitized['last'],
            "sort_by": sanitized['sort_by'],
            "sort_desc": int(sanitized['sort_desc']),
        }
        if total is not None:
            response_dict.update({"total": total})

        return RESTResponse(response_dict)

    @docs(
        tags=['Metadata'],
        summary='Create a copy of an entry/entries from another channel.',
        parameters=[
            {
                'in': 'body',
                'name': 'entries',
                'description': 'List of entries to copy',
                'example': [{'public_key': '1234567890', 'id': 123}],
                'required': True,
            }
        ],
        responses={
            200: {'description': 'Returns a list of copied content'},
            HTTP_NOT_FOUND: {'schema': HandledErrorSchema, 'example': {"error": "Target channel not found"}},
            HTTP_BAD_REQUEST: {'schema': HandledErrorSchema, 'example': {"error": "Source entry not found"}},
        },
    )
    async def copy_channel(self, request):
        with db_session:
            channel_pk, channel_id = self.get_channel_from_request(request)
            personal_root = channel_id == 0 and channel_pk == self.session.mds.my_key.pub().key_to_bin()[10:]
            # TODO: better error handling
            target_collection = self.session.mds.CollectionNode.get(
                public_key=database_blob(channel_pk), id_=channel_id
            )
            try:
                request_parsed = await request.json()
            except (ContentTypeError, ValueError):
                return RESTResponse({"error": "Bad JSON"}, status=HTTP_BAD_REQUEST)

            if not target_collection and not personal_root:
                return RESTResponse({"error": "Target channel not found"}, status=HTTP_NOT_FOUND)
            results_list = []
            for entry in request_parsed:
                public_key, id_ = database_blob(unhexlify(entry["public_key"])), entry["id"]
                source = self.session.mds.ChannelNode.get(public_key=public_key, id_=id_)
                if not source:
                    return RESTResponse({"error": "Source entry not found"}, status=HTTP_BAD_REQUEST)
                # We must upgrade Collections to Channels when moving them to root channel, and, vice-versa,
                # downgrade Channels to Collections when moving them into existing channels
                if isinstance(source, self.session.mds.CollectionNode):
                    src_dict = source.to_dict()
                    if channel_id == 0:
                        rslt = self.session.mds.ChannelMetadata.create_channel(title=source.title)
                    else:
                        dst_dict = {'origin_id': channel_id, "status": NEW}
                        for k in self.session.mds.CollectionNode.nonpersonal_attributes:
                            dst_dict[k] = src_dict[k]
                        dst_dict.pop("metadata_type")
                        rslt = self.session.mds.CollectionNode(**dst_dict)
                    for child in source.actual_contents:
                        child.make_copy(rslt.id_)
                else:
                    rslt = source.make_copy(channel_id)
                results_list.append(rslt.to_simple_dict())
            return RESTResponse(results_list)

    @docs(
        tags=['Metadata'],
        summary='Create a new channel entry in the given channel.',
        responses={
            200: {
                'description': 'Returns the newly created channel',
                'schema': schema(CreateChannelResponse={'results': [Dict()]}),
            }
        },
    )
    async def create_channel(self, request):
        with db_session:
            _, channel_id = self.get_channel_from_request(request)
            request_parsed = await request.json()
            channel_name = request_parsed.get("name", "New channel")
            md = self.session.mds.ChannelMetadata.create_channel(channel_name, origin_id=channel_id)
            return RESTResponse({"results": [md.to_simple_dict()]})

    @docs(
        tags=['Metadata'],
        summary='Create a new collection entry in the given channel.',
        responses={
            200: {
                'description': 'Returns the newly created collection',
                'schema': schema(CreateCollectionResponse={'results': [Dict()]}),
            }
        },
    )
    async def create_collection(self, request):
        with db_session:
            _, channel_id = self.get_channel_from_request(request)
            request_parsed = await request.json()
            collection_name = request_parsed.get("name", "New collection")
            md = self.session.mds.CollectionNode(origin_id=channel_id, title=collection_name, status=NEW)
            return RESTResponse({"results": [md.to_simple_dict()]})

    @docs(
        tags=['Metadata'],
        summary='Add a torrent file to your own channel.',
        responses={
            200: {
                'schema': schema(
                    AddTorrentToChannelResponse={'added': (Integer, 'Number of torrent that were added to the channel')}
                )
            },
            HTTP_NOT_FOUND: {'schema': HandledErrorSchema, 'example': {"error": "Unknown channel"}},
            HTTP_BAD_REQUEST: {'schema': HandledErrorSchema, 'example': {"error": "unknown uri type"}},
        },
    )
    @json_schema(
        schema(
            AddTorrentToChannelRequest={
                'torrent': (String, 'Base64-encoded torrent file'),
                'uri': (String, 'Add a torrent from a magnet link or URL'),
                'torrents_dir': (String, 'Add all .torrent files from a chosen directory'),
                'recursive': (Boolean, 'Toggle recursive scanning of the chosen directory for .torrent files'),
                'description': (String, 'Description for the torrent'),
                'filesize': (Integer, "Filesize of the torrent file, this parameter is used for "
                                      "skipping metadata check when uri is a magnet link"),
            }
        )
    )
    async def add_torrent_to_channel(self, request):
        channel_pk, channel_id = self.get_channel_from_request(request)
        with db_session:
            channel = self.session.mds.CollectionNode.get(public_key=database_blob(channel_pk), id_=channel_id)
        if not channel:
            return RESTResponse({"error": "Unknown channel"}, status=HTTP_NOT_FOUND)

        parameters = await request.json()

        extra_info = {}
        if parameters.get('description', None):
            extra_info = {'description': parameters['description']}

        # First, check whether we did upload a magnet link or URL
        if parameters.get('uri', None):
            uri = parameters['uri']
            if uri.startswith("http:") or uri.startswith("https:"):
                async with ClientSession() as session:
                    response = await session.get(uri)
                    data = await response.read()
                tdef = TorrentDef.load_from_memory(data)
            elif uri.startswith("magnet:"):
                _, xt, _ = parse_magnetlink(uri)
                if (
                    xt
                    and is_infohash(codecs.encode(xt, 'hex'))
                    and (channel.torrent_exists(xt) or channel.copy_torrent_from_infohash(xt))
                ):
                    return RESTResponse({"added": 1})

                filesize = parameters.get("filesize")
                if filesize and not (isinstance(filesize, int) or int is None):
                    return RESTResponse({"error": "filesize must be an integer"},
                                        status=HTTP_BAD_REQUEST,)
                if filesize:
                    dn, xt, _ = parse_magnetlink(uri)
                    tdef = TorrentDefNoMetainfo(xt, dn, uri, filesize)
                else:
                    meta_info = await self.session.dlmgr.get_metainfo(xt, timeout=30, url=uri)
                    if not meta_info:
                        raise RuntimeError("Metainfo timeout")
                    tdef = TorrentDef.load_from_dict(meta_info)
            else:
                return RESTResponse({"error": "unknown uri type"}, status=HTTP_BAD_REQUEST)

            added = 0
            if tdef:
                channel.add_torrent_to_channel(tdef, extra_info)
                added = 1
            return RESTResponse({"added": added})

        torrents_dir = None
        if parameters.get('torrents_dir', None):
            torrents_dir = parameters['torrents_dir']
            if not path_util.isabs(torrents_dir):
                return RESTResponse({"error": "the torrents_dir should point to a directory"}, status=HTTP_BAD_REQUEST)

        recursive = False
        if parameters.get('recursive'):
            recursive = parameters['recursive']
            if not torrents_dir:
                return RESTResponse(
                    {"error": "the torrents_dir parameter should be provided when the recursive parameter is set"},
                    status=HTTP_BAD_REQUEST,
                )

        if torrents_dir:
            torrents_list, errors_list = channel.add_torrents_from_dir(torrents_dir, recursive)
            return RESTResponse({"added": len(torrents_list), "errors": errors_list})

        if not parameters.get('torrent', None):
            return RESTResponse({"error": "torrent parameter missing"}, status=HTTP_BAD_REQUEST)

        # Try to parse the torrent data
        # Any errors will be handled by the error_middleware
        torrent = base64.b64decode(parameters['torrent'])
        torrent_def = TorrentDef.load_from_memory(torrent)
        channel.add_torrent_to_channel(torrent_def, extra_info)
        return RESTResponse({"added": 1})

    @docs(
        tags=['Metadata'],
        summary='Commit a channel.',
        responses={200: {'schema': schema(CommitResponse={'success': Boolean()})}},
    )
    async def post_commit(self, request):
        channel_pk, channel_id = self.get_channel_from_request(request)
        with db_session:
            if channel_id == 0:
                for t in self.session.mds.CollectionNode.commit_all_channels():
                    self.session.gigachannel_manager.updated_my_channel(TorrentDef.load_from_dict(t))
            else:
                coll = self.session.mds.CollectionNode.get(public_key=database_blob(channel_pk), id_=channel_id)
                if not coll:
                    return RESTResponse({"success": False}, status=HTTP_NOT_FOUND)
                torrent_dict = coll.commit_channel_torrent()
                if torrent_dict:
                    self.session.gigachannel_manager.updated_my_channel(TorrentDef.load_from_dict(torrent_dict))

        return RESTResponse({"success": True})

    @docs(
        tags=['Metadata'],
        summary='Check if a channel has uncommitted changes.',
        responses={200: {'schema': schema(IsChannelDirtyResponse={'dirty': Boolean()})}},
    )
    async def is_channel_dirty(self, request):
        channel_pk, _ = self.get_channel_from_request(request)
        with db_session:
            dirty = self.session.mds.MetadataNode.exists(
                lambda g: g.public_key == database_blob(channel_pk) and g.status in DIRTY_STATUSES
            )
            return RESTResponse({"dirty": dirty})
Beispiel #29
0
class DownloadsEndpoint(RESTEndpoint):
    """
    This endpoint is responsible for all requests regarding downloads. Examples include getting all downloads,
    starting, pausing and stopping downloads.
    """
    def __init__(self,
                 download_manager,
                 metadata_store=None,
                 tunnel_community=None):
        super().__init__()
        self.download_manager = download_manager
        self.mds = metadata_store
        self.tunnel_community = tunnel_community

        self.app.on_shutdown.append(self.on_shutdown)

    async def on_shutdown(self, _):
        pass

    def setup_routes(self):
        self.app.add_routes([
            web.get('', self.get_downloads),
            web.put('', self.add_download),
            web.delete('/{infohash}', self.delete_download),
            web.patch('/{infohash}', self.update_download),
            web.get('/{infohash}/torrent', self.get_torrent),
            web.get('/{infohash}/files', self.get_files),
            web.get('/{infohash}/stream/{fileindex}',
                    self.stream,
                    allow_head=False)
        ])

    @staticmethod
    def return_404(request, message="this download does not exist"):
        """
        Returns a 404 response code if your channel has not been created.
        """
        return RESTResponse({"error": message}, status=HTTP_NOT_FOUND)

    @staticmethod
    def create_dconfig_from_params(parameters):
        """
        Create a download configuration based on some given parameters. Possible parameters are:
        - anon_hops: the number of hops for the anonymous download. 0 hops is equivalent to a plain download
        - safe_seeding: whether the seeding of the download should be anonymous or not (0 = off, 1 = on)
        - destination: the destination path of the torrent (where it is saved on disk)
        """
        download_config = DownloadConfig()

        anon_hops = parameters.get('anon_hops', 0)
        safe_seeding = bool(parameters.get('safe_seeding', 0))

        if anon_hops > 0 and not safe_seeding:
            return None, "Cannot set anonymous download without safe seeding enabled"

        if anon_hops > 0:
            download_config.set_hops(anon_hops)

        if safe_seeding:
            download_config.set_safe_seeding(True)

        if 'destination' in parameters:
            download_config.set_dest_dir(parameters['destination'])

        if 'selected_files' in parameters:
            download_config.set_selected_files(parameters['selected_files'])

        return download_config, None

    @staticmethod
    def get_files_info_json(download):
        """
        Return file information as JSON from a specified download.
        """
        files_json = []
        files_completion = {
            name: progress
            for name, progress in download.get_state().get_files_completion()
        }
        selected_files = download.config.get_selected_files()
        file_index = 0
        for fn, size in download.get_def().get_files_with_length():
            files_json.append({
                "index":
                file_index,
                # We always return files in Posix format to make GUI independent of Core and simplify testing
                "name":
                str(PurePosixPath(fn)),
                "size":
                size,
                "included": (file_index in selected_files
                             or not selected_files),
                "progress":
                files_completion.get(fn, 0.0)
            })
            file_index += 1
        return files_json

    @docs(
        tags=["Libtorrent"],
        summary="Return all downloads, both active and inactive",
        parameters=[{
            'in': 'query',
            'name': 'get_peers',
            'description': 'Flag indicating whether or not to include peers',
            'type': 'boolean',
            'required': False
        }, {
            'in': 'query',
            'name': 'get_pieces',
            'description': 'Flag indicating whether or not to include pieces',
            'type': 'boolean',
            'required': False
        }, {
            'in': 'query',
            'name': 'get_files',
            'description': 'Flag indicating whether or not to include files',
            'type': 'boolean',
            'required': False
        }],
        responses={
            200: {
                "schema":
                schema(
                    DownloadsResponse={
                        'downloads':
                        schema(
                            Download={
                                'name': String,
                                'progress': Float,
                                'infohash': String,
                                'speed_down': Float,
                                'speed_up': Float,
                                'status': String,
                                'size': Integer,
                                'eta': Integer,
                                'num_peers': Integer,
                                'num_seeds': Integer,
                                'total_up': Integer,
                                'total_down': Integer,
                                'ratio': Float,
                                'files': String,
                                'trackers': String,
                                'hops': Integer,
                                'anon_download': Boolean,
                                'safe_seeding': Boolean,
                                'max_upload_speed': Integer,
                                'max_download_speed': Integer,
                                'destination': String,
                                'availability': Float,
                                'peers': String,
                                'total_pieces': Integer,
                                'vod_mode': Boolean,
                                'vod_prebuffering_progress': Float,
                                'vod_prebuffering_progress_consec': Float,
                                'error': String,
                                'time_added': Integer
                            })
                    }),
            }
        },
        description=
        "This endpoint returns all downloads in Tribler, both active and inactive. The progress "
        "is a number ranging from 0 to 1, indicating the progress of the specific state (downloading, "
        "checking etc). The download speeds have the unit bytes/sec. The size of the torrent is given "
        "in bytes. The estimated time assumed is given in seconds.\n\n"
        "Detailed information about peers and pieces is only requested when the get_peers and/or "
        "get_pieces flag is set. Note that setting this flag has a negative impact on performance "
        "and should only be used in situations where this data is required. ")
    async def get_downloads(self, request):
        get_peers = request.query.get('get_peers', '0') == '1'
        get_pieces = request.query.get('get_pieces', '0') == '1'
        get_files = request.query.get('get_files', '0') == '1'

        downloads_json = []
        downloads = self.download_manager.get_downloads()
        for download in downloads:
            if download.hidden and not download.config.get_channel_download():
                # We still want to send channel downloads since they are displayed in the GUI
                continue
            state = download.get_state()
            tdef = download.get_def()

            # Create tracker information of the download
            tracker_info = []
            for url, url_info in download.get_tracker_status().items():
                tracker_info.append({
                    "url": url,
                    "peers": url_info[0],
                    "status": url_info[1]
                })

            num_seeds, num_peers = state.get_num_seeds_peers()
            num_connected_seeds, num_connected_peers = download.get_num_connected_seeds_peers(
            )

            if download.config.get_channel_download():
                download_name = self.mds.ChannelMetadata.get_channel_name_cached(
                    tdef.get_name_utf8(), tdef.get_infohash())
            elif self.mds is None:
                download_name = tdef.get_name_utf8()
            else:
                download_name = self.mds.TorrentMetadata.get_torrent_title(tdef.get_infohash()) or \
                    tdef.get_name_utf8()

            download_status = get_extended_status(
                self.tunnel_community, download
            ) if self.tunnel_community else download.get_state().get_status()

            download_json = {
                "name":
                download_name,
                "progress":
                state.get_progress(),
                "infohash":
                hexlify(tdef.get_infohash()),
                "speed_down":
                state.get_current_payload_speed(DOWNLOAD),
                "speed_up":
                state.get_current_payload_speed(UPLOAD),
                "status":
                dlstatus_strings[download_status],
                "size":
                tdef.get_length(),
                "eta":
                state.get_eta(),
                "num_peers":
                num_peers,
                "num_seeds":
                num_seeds,
                "num_connected_peers":
                num_connected_peers,
                "num_connected_seeds":
                num_connected_seeds,
                "total_up":
                state.get_total_transferred(UPLOAD),
                "total_down":
                state.get_total_transferred(DOWNLOAD),
                "ratio":
                state.get_seeding_ratio(),
                "trackers":
                tracker_info,
                "hops":
                download.config.get_hops(),
                "anon_download":
                download.get_anon_mode(),
                "safe_seeding":
                download.config.get_safe_seeding(),
                # Maximum upload/download rates are set for entire sessions
                "max_upload_speed":
                DownloadManager.get_libtorrent_max_upload_rate(
                    self.download_manager.config),
                "max_download_speed":
                DownloadManager.get_libtorrent_max_download_rate(
                    self.download_manager.config),
                "destination":
                str(download.config.get_dest_dir()),
                "availability":
                state.get_availability(),
                "total_pieces":
                tdef.get_nr_pieces(),
                "vod_mode":
                download.stream and download.stream.enabled,
                "error":
                repr(state.get_error()) if state.get_error() else "",
                "time_added":
                download.config.get_time_added(),
                "channel_download":
                download.config.get_channel_download()
            }
            if download.stream:
                download_json.update({
                    "vod_prebuffering_progress":
                    download.stream.prebuffprogress,
                    "vod_prebuffering_progress_consec":
                    download.stream.prebuffprogress_consec,
                    "vod_header_progress":
                    download.stream.headerprogress,
                    "vod_footer_progress":
                    download.stream.footerprogress,
                })

            # Add peers information if requested
            if get_peers:
                peer_list = state.get_peerlist()
                for peer_info in peer_list:  # Remove have field since it is very large to transmit.
                    del peer_info['have']
                    if 'extended_version' in peer_info:
                        peer_info[
                            'extended_version'] = _safe_extended_peer_info(
                                peer_info['extended_version'])
                    # Does this peer represent a hidden services circuit?
                    if peer_info.get(
                            'port'
                    ) == CIRCUIT_ID_PORT and self.tunnel_community:
                        tc = self.tunnel_community
                        circuit_id = tc.ip_to_circuit_id(peer_info['ip'])
                        circuit = tc.circuits.get(circuit_id, None)
                        if circuit:
                            peer_info['circuit'] = circuit_id

                download_json["peers"] = peer_list

            # Add piece information if requested
            if get_pieces:
                download_json["pieces"] = download.get_pieces_base64().decode(
                    'utf-8')

            # Add files if requested
            if get_files:
                download_json["files"] = self.get_files_info_json(download)

            downloads_json.append(download_json)
        return RESTResponse({"downloads": downloads_json})

    @docs(
        tags=["Libtorrent"],
        summary="Start a download from a provided URI.",
        parameters=[{
            'in': 'query',
            'name': 'get_peers',
            'description': 'Flag indicating whether or not to include peers',
            'type': 'boolean',
            'required': False
        }, {
            'in': 'query',
            'name': 'get_pieces',
            'description': 'Flag indicating whether or not to include pieces',
            'type': 'boolean',
            'required': False
        }, {
            'in': 'query',
            'name': 'get_files',
            'description': 'Flag indicating whether or not to include files',
            'type': 'boolean',
            'required': False
        }],
        responses={
            200: {
                "schema":
                schema(AddDownloadResponse={
                    "started": Boolean,
                    "infohash": String
                }),
                'examples': {
                    "started": True,
                    "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"
                }
            }
        },
    )
    @json_schema(
        schema(
            AddDownloadRequest={
                'anon_hops':
                (Integer,
                 'Number of hops for the anonymous download. No hops is equivalent to a plain download'
                 ),
                'safe_seeding':
                (Boolean,
                 'Whether the seeding of the download should be anonymous or not'
                 ),
                'destination': (
                    String, 'the download destination path of the torrent'),
                'uri*':
                (String,
                 'The URI of the torrent file that should be downloaded. This URI can either represent a file '
                 'location, a magnet link or a HTTP(S) url.'),
            }))
    async def add_download(self, request):
        parameters = await request.json()
        if not parameters.get('uri'):
            return RESTResponse({"error": "uri parameter missing"},
                                status=HTTP_BAD_REQUEST)

        download_config, error = DownloadsEndpoint.create_dconfig_from_params(
            parameters)
        if error:
            return RESTResponse({"error": error}, status=HTTP_BAD_REQUEST)

        try:
            download = await self.download_manager.start_download_from_uri(
                parameters['uri'], config=download_config)
        except Exception as e:
            return RESTResponse({"error": str(e)},
                                status=HTTP_INTERNAL_SERVER_ERROR)

        return RESTResponse({
            "started":
            True,
            "infohash":
            hexlify(download.get_def().get_infohash())
        })

    @docs(
        tags=["Libtorrent"],
        summary="Remove a specific download.",
        parameters=[{
            'in': 'path',
            'name': 'infohash',
            'description': 'Infohash of the download to remove',
            'type': 'string',
            'required': True
        }],
        responses={
            200: {
                "schema":
                schema(DeleteDownloadResponse={
                    "removed": Boolean,
                    "infohash": String
                }),
                'examples': {
                    "removed": True,
                    "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"
                }
            }
        },
    )
    @json_schema(
        schema(
            RemoveDownloadRequest={
                'remove_data': (
                    Boolean, 'Whether or not to remove the associated data'),
            }))
    async def delete_download(self, request):
        parameters = await request.json()
        if 'remove_data' not in parameters:
            return RESTResponse({"error": "remove_data parameter missing"},
                                status=HTTP_BAD_REQUEST)

        infohash = unhexlify(request.match_info['infohash'])
        download = self.download_manager.get_download(infohash)
        if not download:
            return DownloadsEndpoint.return_404(request)

        try:
            await self.download_manager.remove_download(
                download, remove_content=parameters['remove_data'])
        except Exception as e:
            self._logger.exception(e)
            return return_handled_exception(request, e)

        return RESTResponse({
            "removed":
            True,
            "infohash":
            hexlify(download.get_def().get_infohash())
        })

    async def vod_response(self, download, parameters, request, vod_mode):
        modified = False
        if vod_mode:
            file_index = parameters.get("fileindex")
            if file_index is None:
                return RESTResponse(
                    {"error": "fileindex is necessary to enable vod_mode"},
                    status=HTTP_BAD_REQUEST)
            if download.stream is None:
                download.add_stream()
            if not download.stream.enabled or download.stream.fileindex != file_index:
                await wait_for(
                    download.stream.enable(file_index, request.http_range.start
                                           or 0), 10)
                await download.stream.updateprios()
                modified = True

        elif not vod_mode and download.stream is not None and download.stream.enabled:
            download.stream.disable()
            modified = True
        return RESTResponse({
            "vod_prebuffering_progress":
            download.stream.prebuffprogress,
            "vod_prebuffering_progress_consec":
            download.stream.prebuffprogress_consec,
            "vod_header_progress":
            download.stream.headerprogress,
            "vod_footer_progress":
            download.stream.footerprogress,
            "vod_mode":
            download.stream.enabled,
            "infohash":
            hexlify(download.get_def().get_infohash()),
            "modified":
            modified,
        })

    @docs(
        tags=["Libtorrent"],
        summary="Update a specific download.",
        parameters=[{
            'in': 'path',
            'name': 'infohash',
            'description': 'Infohash of the download to update',
            'type': 'string',
            'required': True
        }],
        responses={
            200: {
                "schema":
                schema(UpdateDownloadResponse={
                    "modified": Boolean,
                    "infohash": String
                }),
                'examples': {
                    "modified": True,
                    "infohash": "4344503b7e797ebf31582327a5baae35b11bda01"
                }
            }
        },
    )
    @json_schema(
        schema(
            UpdateDownloadRequest={
                'state':
                (String,
                 'State parameter to be passed to modify the state of the download (resume/stop/recheck)'
                 ),
                'selected_files': (
                    List(Integer),
                    'File indexes to be included in the download'),
                'anon_hops':
                (Integer,
                 'The anonymity of a download can be changed at runtime by passing the anon_hops '
                 'parameter, however, this must be the only parameter in this request.'
                 )
            }))
    async def update_download(self, request):
        infohash = unhexlify(request.match_info['infohash'])
        download = self.download_manager.get_download(infohash)
        if not download:
            return DownloadsEndpoint.return_404(request)

        parameters = await request.json()
        vod_mode = parameters.get("vod_mode")
        if vod_mode is not None:
            if not isinstance(vod_mode, bool):
                return RESTResponse({"error": "vod_mode must be bool flag"},
                                    status=HTTP_BAD_REQUEST)
            return await self.vod_response(download, parameters, request,
                                           vod_mode)

        if len(parameters) > 1 and 'anon_hops' in parameters:
            return RESTResponse(
                {
                    "error":
                    "anon_hops must be the only parameter in this request"
                },
                status=HTTP_BAD_REQUEST)
        elif 'anon_hops' in parameters:
            anon_hops = int(parameters['anon_hops'])
            try:
                await self.download_manager.update_hops(download, anon_hops)
            except Exception as e:
                self._logger.exception(e)
                return return_handled_exception(request, e)
            return RESTResponse({
                "modified":
                True,
                "infohash":
                hexlify(download.get_def().get_infohash())
            })

        if 'selected_files' in parameters:
            selected_files_list = parameters['selected_files']
            num_files = len(download.tdef.get_files())
            if not all(
                [0 <= index < num_files for index in selected_files_list]):
                return RESTResponse({"error": "index out of range"},
                                    status=HTTP_BAD_REQUEST)
            download.set_selected_files(selected_files_list)

        if parameters.get('state'):
            state = parameters['state']
            if state == "resume":
                download.resume()
            elif state == "stop":
                await download.stop(user_stopped=True)
            elif state == "recheck":
                download.force_recheck()
            elif state == "move_storage":
                dest_dir = Path(parameters['dest_dir'])
                if not dest_dir.exists():
                    return RESTResponse({
                        "error":
                        f"Target directory ({dest_dir}) does not exist"
                    })
                download.move_storage(dest_dir)
                download.checkpoint()
            else:
                return RESTResponse({"error": "unknown state parameter"},
                                    status=HTTP_BAD_REQUEST)

        return RESTResponse({
            "modified":
            True,
            "infohash":
            hexlify(download.get_def().get_infohash())
        })

    @docs(tags=["Libtorrent"],
          summary=
          "Return the .torrent file associated with the specified download.",
          parameters=[{
              'in': 'path',
              'name': 'infohash',
              'description':
              'Infohash of the download from which to get the .torrent file',
              'type': 'string',
              'required': True
          }],
          responses={200: {
              'description': 'The torrent'
          }})
    async def get_torrent(self, request):
        infohash = unhexlify(request.match_info['infohash'])
        download = self.download_manager.get_download(infohash)
        if not download:
            return DownloadsEndpoint.return_404(request)

        torrent = download.get_torrent_data()
        if not torrent:
            return DownloadsEndpoint.return_404(request)

        return RESTResponse(lt.bencode(torrent),
                            headers={
                                'content-type':
                                'application/x-bittorrent',
                                'Content-Disposition':
                                'attachment; filename=%s.torrent' %
                                hexlify(infohash).encode('utf-8')
                            })

    @docs(tags=["Libtorrent"],
          summary="Return file information of a specific download.",
          parameters=[{
              'in': 'path',
              'name': 'infohash',
              'description':
              'Infohash of the download to from which to get file information',
              'type': 'string',
              'required': True
          }],
          responses={
              200: {
                  "schema":
                  schema(
                      GetFilesResponse={
                          "files": [
                              schema(
                                  File={
                                      'index': Integer,
                                      'name': String,
                                      'size': Integer,
                                      'included': Boolean,
                                      'progress': Float
                                  })
                          ]
                      })
              }
          })
    async def get_files(self, request):
        infohash = unhexlify(request.match_info['infohash'])
        download = self.download_manager.get_download(infohash)
        if not download:
            return DownloadsEndpoint.return_404(request)
        return RESTResponse({"files": self.get_files_info_json(download)})

    @docs(tags=["Libtorrent"],
          summary="Stream the contents of a file that is being downloaded.",
          parameters=[{
              'in': 'path',
              'name': 'infohash',
              'description': 'Infohash of the download to stream',
              'type': 'string',
              'required': True
          }, {
              'in': 'path',
              'name': 'fileindex',
              'description': 'The fileindex to stream',
              'type': 'string',
              'required': True
          }],
          responses={206: {
              'description': 'Contents of the stream'
          }})
    async def stream(self, request):
        infohash = unhexlify(request.match_info['infohash'])
        download = self.download_manager.get_download(infohash)
        if not download:
            return DownloadsEndpoint.return_404(request)

        file_index = int(request.match_info['fileindex'])

        http_range = request.http_range
        start = http_range.start or 0

        if download.stream is None:
            download.add_stream()
        await wait_for(
            download.stream.enable(file_index, None if start > 0 else 0), 10)

        stop = download.stream.filesize if http_range.stop is None else min(
            http_range.stop, download.stream.filesize)

        if not start < stop or not 0 <= start < download.stream.filesize or not 0 < stop <= download.stream.filesize:
            return RESTResponse('Requested Range Not Satisfiable', status=416)

        response = RESTStreamResponse(
            status=206,
            reason='OK',
            headers={
                'Accept-Ranges': 'bytes',
                'Content-Type': 'application/octet-stream',
                'Content-Length': f'{stop - start}',
                'Content-Range': f'{start}-{stop}/{download.stream.filesize}'
            })
        response.force_close()
        with suppress(CancelledError, ConnectionResetError):
            async with StreamChunk(download.stream, start) as chunk:
                await response.prepare(request)
                bytes_todo = stop - start
                bytes_done = 0
                self._logger.info('Got range request for %s-%s (%s bytes)',
                                  start, stop, bytes_todo)
                while not request.transport.is_closing():
                    if chunk.seekpos >= download.stream.filesize:
                        break
                    data = await chunk.read()
                    try:
                        if len(data) == 0:
                            break
                        if bytes_done + len(data) > bytes_todo:
                            # if we have more data than we need
                            endlen = bytes_todo - bytes_done
                            if endlen != 0:
                                await wait_for(response.write(data[:endlen]),
                                               STREAM_PAUSE_TIME)

                                bytes_done += endlen
                            break
                        await wait_for(response.write(data), STREAM_PAUSE_TIME)
                        bytes_done += len(data)

                        if chunk.resume():
                            self._logger.debug(
                                "Stream %s-%s is resumed, starting sequential buffer",
                                start, stop)
                    except AsyncTimeoutError:
                        # This means that stream writer has a full buffer, in practice means that
                        # the client keeps the conenction but sets the window size to 0. In this case
                        # there is no need to keep sequenial buffer if there are other chunks waiting for prios
                        if chunk.pause():
                            self._logger.debug(
                                "Stream %s-%s is paused, stopping sequential buffer",
                                start, stop)
                return response
Beispiel #30
0
class UnhandledErrorSchema(Schema):
    error = schema(DetailedErrorSchema={'handled': Boolean, 'code': Integer, 'message': String})