Exemplo n.º 1
0
    async def add_download(self, request):
        params = await request.json()
        uri = params.get('uri')
        if not uri:
            return RESTResponse({"error": "uri parameter missing"},
                                status=HTTP_BAD_REQUEST)

        download_config, error = DownloadsEndpoint.create_dconfig_from_params(
            params)
        if error:
            return RESTResponse({"error": error}, status=HTTP_BAD_REQUEST)

        try:
            download = await self.download_manager.start_download_from_uri(
                uri, config=download_config)
        except Exception as e:
            return RESTResponse({"error": str(e)},
                                status=HTTP_INTERNAL_SERVER_ERROR)

        return RESTResponse({
            "started":
            True,
            "infohash":
            hexlify(download.get_def().get_infohash())
        })
Exemplo n.º 2
0
async def error_middleware(request, handler):
    try:
        response = await handler(request)
    except HTTPNotFound:
        return RESTResponse(
            {
                'error': {
                    'handled': True,
                    'message': f'Could not find {request.path}'
                }
            },
            status=HTTP_NOT_FOUND)
    except Exception as e:
        logger.exception(e)
        full_exception = traceback.format_exc()

        default_core_exception_handler.unhandled_error_observer(
            None, {
                'exception': e,
                'should_stop': False
            })

        return RESTResponse(
            {
                "error": {
                    "handled": False,
                    "code": e.__class__.__name__,
                    "message": str(full_exception)
                }
            },
            status=HTTP_INTERNAL_SERVER_ERROR)
    return response
Exemplo n.º 3
0
    async def get_log(self, request):
        # First, flush all the logs to make sure it is written to file
        for handler in logging.getLogger().handlers:
            handler.flush()

        # Default response
        response = {'content': '', 'max_lines': 0}

        # Get the location of log file
        param_process = request.query.get('process', 'core')
        log_name = f'tribler-{param_process}-info.log'
        log_file_name = self.log_dir / log_name

        # If the log file is not present in the versioned state directory, try root state directory location
        if not log_file_name.exists():
            log_file_name = get_root_state_directory() / log_name

        # If the log file is still not found, maybe it is not created yet, then return the default response
        if not log_file_name.exists():
            return RESTResponse(response)

        # If the log file exists and return last requested 'max_lines' of log
        try:
            max_lines = int(request.query['max_lines'])
            with log_file_name.open(mode='r') as log_file:
                response['content'] = self.tail(log_file, max_lines)
            response['max_lines'] = max_lines
        except ValueError:
            with log_file_name.open(mode='r') as log_file:
                response['content'] = self.tail(log_file,
                                                100)  # default 100 lines
            response['max_lines'] = 0

        return RESTResponse(response)
Exemplo n.º 4
0
    def validate_infohash(infohash: str) -> Tuple[bool, Optional[RESTResponse]]:
        try:
            infohash = unhexlify(infohash)
            if len(infohash) != 20:
                return False, RESTResponse({"error": "Invalid infohash"}, status=HTTP_BAD_REQUEST)
        except binascii.Error:
            return False, RESTResponse({"error": "Invalid infohash"}, status=HTTP_BAD_REQUEST)

        return True, None
Exemplo n.º 5
0
    async def completions(self, request):
        args = request.query
        if 'q' not in args:
            return RESTResponse({"error": "query parameter missing"},
                                status=HTTP_BAD_REQUEST)

        keywords = args['q'].strip().lower()
        # TODO: add XXX filtering for completion terms
        results = self.mds.get_auto_complete_terms(keywords, max_terms=5)
        return RESTResponse({"completions": results})
Exemplo n.º 6
0
    async def update_channel_entry(self, request):
        # TODO: unify checks for parts of the path, i.e. proper hex for public key, etc.
        try:
            parameters = await request.json()
        except (ContentTypeError, ValueError):
            return RESTResponse({"error": "Bad JSON input data"}, status=HTTP_BAD_REQUEST)

        public_key = unhexlify(request.match_info['public_key'])
        id_ = request.match_info['id']
        error, result = self.update_entry(public_key, id_, parameters)
        return RESTResponse(result, status=error or 200)
Exemplo n.º 7
0
    async def create_remote_search_request(self, request):
        # Query remote results from the GigaChannel Community.
        # Results are returned over the Events endpoint.
        try:
            sanitized = self.sanitize_parameters(request.query)
        except (ValueError, KeyError) as e:
            return RESTResponse({"error": f"Error processing request parameters: {e}"}, status=HTTP_BAD_REQUEST)

        request_uuid, peers_list = self.gigachannel_community.send_search_request(**sanitized)
        peers_mid_list = [hexlify(p.mid) for p in peers_list]

        return RESTResponse({"request_uuid": str(request_uuid), "peers": peers_mid_list})
Exemplo n.º 8
0
    async def search(self, request):
        try:
            sanitized = self.sanitize_parameters(request.query)
            tags = sanitized.pop('tags', None)
        except (ValueError, KeyError):
            return RESTResponse(
                {"error": "Error processing request parameters"},
                status=HTTP_BAD_REQUEST)

        include_total = request.query.get('include_total', '')

        mds: MetadataStore = self.mds

        def search_db():
            with db_session:
                pony_query = mds.get_entries(**sanitized)
                search_results = [r.to_simple_dict() for r in pony_query]
                if include_total:
                    total = mds.get_total_count(**sanitized)
                    max_rowid = mds.get_max_rowid()
                else:
                    total = max_rowid = None
            return search_results, total, max_rowid

        try:
            with db_session:
                if tags:
                    lower_tags = {tag.lower() for tag in tags}
                    infohash_set = self.tags_db.get_infohashes(lower_tags)
                    sanitized['infohash_set'] = infohash_set

            search_results, total, max_rowid = await mds.run_threaded(search_db
                                                                      )
        except Exception as e:  # pylint: disable=broad-except;  # pragma: no cover
            self._logger.exception("Error while performing DB search: %s: %s",
                                   type(e).__name__, e)
            return RESTResponse(status=HTTP_BAD_REQUEST)

        self.add_tags_to_metadata_list(search_results,
                                       hide_xxx=sanitized["hide_xxx"])

        response_dict = {
            "results": search_results,
            "first": sanitized["first"],
            "last": sanitized["last"],
            "sort_by": sanitized["sort_by"],
            "sort_desc": sanitized["sort_desc"],
        }
        if include_total:
            response_dict.update(total=total, max_rowid=max_rowid)

        return RESTResponse(response_dict)
Exemplo n.º 9
0
    async def get_channel_entries(self, request):
        public_key = unhexlify(request.match_info['public_key'])
        id_ = request.match_info['id']
        with db_session:
            entry = self.mds.ChannelNode.get(public_key=public_key, id_=id_)

            if entry:
                # TODO: handle costly attributes in a more graceful and generic way for all types of metadata
                entry_dict = entry.to_simple_dict()
            else:
                return RESTResponse({"error": "entry not found in database"}, status=HTTP_NOT_FOUND)

        return RESTResponse(entry_dict)
Exemplo n.º 10
0
 async def delete_channel_entries(self, request):
     with db_session:
         request_parsed = await request.json()
         results_list = []
         for entry in request_parsed:
             public_key = unhexlify(entry.pop("public_key"))
             id_ = entry.pop("id")
             entry = self.mds.ChannelNode.get(public_key=public_key, id_=id_)
             if not entry:
                 return RESTResponse({"error": "Entry %i not found" % id_}, status=HTTP_BAD_REQUEST)
             entry.delete()
             result = {"public_key": hexlify(public_key), "id": id_, "state": "Deleted"}
             results_list.append(result)
         return RESTResponse(results_list)
Exemplo n.º 11
0
    async def post_commit(self, request):
        channel_pk, channel_id = self.get_channel_from_request(request)
        with db_session:
            if channel_id == 0:
                for t in self.mds.CollectionNode.commit_all_channels():
                    self.gigachannel_manager.updated_my_channel(TorrentDef.load_from_dict(t))
            else:
                coll = self.mds.CollectionNode.get(public_key=channel_pk, id_=channel_id)
                if not coll:
                    return RESTResponse({"success": False}, status=HTTP_NOT_FOUND)
                torrent_dict = coll.commit_channel_torrent()
                if torrent_dict:
                    self.gigachannel_manager.updated_my_channel(TorrentDef.load_from_dict(torrent_dict))

        return RESTResponse({"success": True})
Exemplo n.º 12
0
 async def update_channel_entries(self, request):
     try:
         request_parsed = await request.json()
     except (ContentTypeError, ValueError):
         return RESTResponse({"error": "Bad JSON"}, status=HTTP_BAD_REQUEST)
     results_list = []
     for entry in request_parsed:
         public_key = unhexlify(entry.pop("public_key"))
         id_ = entry.pop("id")
         error, result = self.update_entry(public_key, id_, entry)
         # TODO: handle the results for a list that contains some errors in a smarter way
         if error:
             return RESTResponse(result, status=error)
         results_list.append(result)
     return RESTResponse(results_list)
Exemplo n.º 13
0
 async def create_collection(self, request):
     with db_session:
         _, channel_id = self.get_channel_from_request(request)
         request_parsed = await request.json()
         collection_name = request_parsed.get("name", "New collection")
         md = self.mds.CollectionNode(origin_id=channel_id, title=collection_name, status=NEW)
         return RESTResponse({"results": [md.to_simple_dict()]})
Exemplo n.º 14
0
 async def create_channel(self, request):
     with db_session:
         _, channel_id = self.get_channel_from_request(request)
         request_parsed = await request.json()
         channel_name = request_parsed.get("name", "New channel")
         md = self.mds.ChannelMetadata.create_channel(channel_name, origin_id=channel_id)
         return RESTResponse({"results": [md.to_simple_dict()]})
Exemplo n.º 15
0
async def handle_version_request(request):
    global response, response_code, response_lag, last_request_user_agent  # pylint: disable=global-statement
    if response_lag > 0:
        await sleep(response_lag)
    user_agent = request.headers.get('User-Agent')
    last_request_user_agent = user_agent
    return RESTResponse(response, status=response_code)
Exemplo n.º 16
0
 async def is_channel_dirty(self, request):
     channel_pk, _ = self.get_channel_from_request(request)
     with db_session:
         dirty = self.mds.MetadataNode.exists(
             lambda g: g.public_key == channel_pk and g.status in
             DIRTY_STATUSES)
         return RESTResponse({"dirty": dirty})
Exemplo n.º 17
0
    async def get_channel_contents(self, request):
        sanitized = self.sanitize_parameters(request.query)
        include_total = request.query.get('include_total', '')
        channel_pk, channel_id = self.get_channel_from_request(request)
        sanitized.update({"channel_pk": channel_pk, "origin_id": channel_id})
        remote = sanitized.pop("remote", None)

        total = None

        remote_failed = False
        if remote:
            try:
                contents_list = await self.gigachannel_community.remote_select_channel_contents(**sanitized)
            except (RequestTimeoutException, NoChannelSourcesException, CancelledError):
                remote_failed = True

        if not remote or remote_failed:
            with db_session:
                contents = self.mds.get_entries(**sanitized)
                contents_list = [c.to_simple_dict() for c in contents]
                total = self.mds.get_total_count(**sanitized) if include_total else None
        self.add_download_progress_to_metadata_list(contents_list)
        self.add_tags_to_metadata_list(contents_list, hide_xxx=sanitized["hide_xxx"])
        response_dict = {
            "results": contents_list,
            "first": sanitized['first'],
            "last": sanitized['last'],
            "sort_by": sanitized['sort_by'],
            "sort_desc": int(sanitized['sort_desc']),
        }
        if total is not None:
            response_dict.update({"total": total})

        return RESTResponse(response_dict)
Exemplo n.º 18
0
    async def get_channels_peers(self, _):
        # Get debug stats for peers serving channels
        current_time = time.time()
        result = []
        mapping = self.gigachannel_community.channels_peers
        with db_session:
            for id_tuple, peers in mapping._channels_dict.items():  # pylint:disable=W0212
                channel_pk, channel_id = id_tuple
                chan = self.mds.ChannelMetadata.get(public_key=channel_pk,
                                                    id_=channel_id)

                peers_list = []
                for p in peers:
                    peers_list.append(
                        (hexlify(p.mid), int(current_time - p.last_response)))

                chan_dict = {
                    "channel_name": chan.title if chan else None,
                    "channel_pk": hexlify(channel_pk),
                    "channel_id": channel_id,
                    "peers": peers_list,
                }
                result.append(chan_dict)

        return RESTResponse({"channels_list": result})
Exemplo n.º 19
0
 async def get_circuit_slots(self, request):
     return RESTResponse({
         "slots": {
             "random": self.tunnel_community.random_slots,
             "competing": self.tunnel_community.competing_slots
         }
     })
Exemplo n.º 20
0
 async def get_memory_dump(self, request):
     if sys.platform == "win32":
         # On Windows meliae (especially older versions) segfault on writing to file
         dump_buffer = MemoryDumpBuffer()
         try:
             scanner.dump_all_objects(dump_buffer)
         except OverflowError as e:
             # https://bugs.launchpad.net/meliae/+bug/569947
             logging.error(
                 "meliae dump failed (your version may be too old): %s",
                 str(e))
         content = dump_buffer.getvalue()
         dump_buffer.close()
     else:
         # On other platforms, simply writing to file is much faster
         dump_file_path = self.state_dir / 'memory_dump.json'
         scanner.dump_all_objects(dump_file_path)
         with open(dump_file_path) as dump_file:
             content = dump_file.read()
     date_str = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
     return RESTResponse(
         content,
         headers={
             'Content-Type', 'application/json', 'Content-Disposition',
             'attachment; filename=tribler_memory_dump_%s.json' % date_str
         })
Exemplo n.º 21
0
    async def get_libtorrent_settings(self, request):
        args = request.query
        hop = 0
        if 'hop' in args and args['hop']:
            hop = int(args['hop'])

        if hop not in self.download_manager.ltsessions:
            return RESTResponse({'hop': hop, "settings": {}})

        lt_session = self.download_manager.ltsessions[hop]
        if hop == 0:
            lt_settings = self.download_manager.get_session_settings(lt_session)
            lt_settings['peer_fingerprint'] = hexlify(lt_settings['peer_fingerprint'])
        else:
            lt_settings = lt_session.get_settings()

        return RESTResponse({'hop': hop, "settings": lt_settings})
Exemplo n.º 22
0
 async def get_open_files(self, request):
     my_process = psutil.Process()
     return RESTResponse({
         "open_files": [{
             "path": open_file.path,
             "fd": open_file.fd
         } for open_file in my_process.open_files()]
     })
Exemplo n.º 23
0
    async def update_tags_entries(self, request):
        params = await request.json()
        infohash = request.match_info["infohash"]
        ih_valid, error_response = TagsEndpoint.validate_infohash(infohash)
        if not ih_valid:
            return error_response

        tags = {tag.lower() for tag in params["tags"]}

        # Validate whether the size of the tag is within the allowed range
        for tag in tags:
            if len(tag) < MIN_TAG_LENGTH or len(tag) > MAX_TAG_LENGTH:
                return RESTResponse({"error": "Invalid tag length"}, status=HTTP_BAD_REQUEST)

        self.modify_tags(unhexlify(infohash), tags)

        return RESTResponse({"success": True})
Exemplo n.º 24
0
    async def get_channel_description(self, request):
        channel_pk, channel_id = self.get_channel_from_request(request)
        with db_session:
            channel_description = self.mds.ChannelDescription.select(
                lambda g: g.public_key == channel_pk and g.origin_id == channel_id
            ).first()

        response_dict = json.loads(channel_description.json_text) if (channel_description is not None) else {}
        return RESTResponse(response_dict)
Exemplo n.º 25
0
    async def copy_channel(self, request):
        with db_session:
            channel_pk, channel_id = self.get_channel_from_request(request)
            personal_root = channel_id == 0 and channel_pk == self.mds.my_key.pub(
            ).key_to_bin()[10:]
            # TODO: better error handling
            target_collection = self.mds.CollectionNode.get(
                public_key=channel_pk, id_=channel_id)
            try:
                request_parsed = await request.json()
            except (ContentTypeError, ValueError):
                return RESTResponse({"error": "Bad JSON"},
                                    status=HTTP_BAD_REQUEST)

            if not target_collection and not personal_root:
                return RESTResponse({"error": "Target channel not found"},
                                    status=HTTP_NOT_FOUND)
            results_list = []
            for entry in request_parsed:
                public_key, id_ = unhexlify(entry["public_key"]), entry["id"]
                source = self.mds.ChannelNode.get(public_key=public_key,
                                                  id_=id_)
                if not source:
                    return RESTResponse({"error": "Source entry not found"},
                                        status=HTTP_BAD_REQUEST)
                # We must upgrade Collections to Channels when moving them to root channel, and, vice-versa,
                # downgrade Channels to Collections when moving them into existing channels
                if isinstance(source, self.mds.CollectionNode):
                    src_dict = source.to_dict()
                    if channel_id == 0:
                        rslt = self.mds.ChannelMetadata.create_channel(
                            title=source.title)
                    else:
                        dst_dict = {'origin_id': channel_id, "status": NEW}
                        for k in self.mds.CollectionNode.nonpersonal_attributes:
                            dst_dict[k] = src_dict[k]
                        dst_dict.pop("metadata_type")
                        rslt = self.mds.CollectionNode(**dst_dict)
                    for child in source.actual_contents:
                        child.make_copy(rslt.id_)
                else:
                    rslt = source.make_copy(channel_id)
                results_list.append(rslt.to_simple_dict())
            return RESTResponse(results_list)
Exemplo n.º 26
0
    async def get_torrent_health(self, request):
        timeout = request.query.get('timeout')
        if not timeout:
            timeout = TORRENT_CHECK_TIMEOUT
        elif timeout.isdigit():
            timeout = int(timeout)
        else:
            return RESTResponse({"error": f"Error processing timeout parameter '{timeout}'"}, status=HTTP_BAD_REQUEST)
        refresh = request.query.get('refresh', '0') == '1'
        nowait = request.query.get('nowait', '0') == '1'

        infohash = unhexlify(request.match_info['infohash'])
        result_future = self.torrent_checker.check_torrent_health(infohash, timeout=timeout, scrape_now=refresh)
        # Return immediately. Used by GUI to schedule health updates through the EventsEndpoint
        if nowait:
            return RESTResponse({'checking': '1'})

        # Errors will be handled by error_middleware
        result = await result_future
        return RESTResponse({'health': result})
Exemplo n.º 27
0
    async def get_tribler_stats(self, request):
        stats_dict = {}
        if self.mds:
            db_size = self.mds.get_db_file_size()
            stats_dict = {
                "db_size": db_size,
                "num_channels": self.mds.get_num_channels(),
                "num_torrents": self.mds.get_num_torrents()
            }

        return RESTResponse({'tribler_statistics': stats_dict})
Exemplo n.º 28
0
    async def get_libtorrent_session_info(self, request):
        session_stats = Future()

        def on_session_stats_alert_received(alert):
            if not session_stats.done():
                session_stats.set_result(alert.values)

        args = request.query
        hop = 0
        if 'hop' in args and args['hop']:
            hop = int(args['hop'])

        if hop not in self.download_manager.ltsessions or \
                not hasattr(self.download_manager.ltsessions[hop], "post_session_stats"):
            return RESTResponse({'hop': hop, 'session': {}})

        self.download_manager.session_stats_callback = on_session_stats_alert_received
        self.download_manager.ltsessions[hop].post_session_stats()
        stats = await session_stats
        return RESTResponse({'hop': hop, 'session': stats})
Exemplo n.º 29
0
    async def get_suggestions(self, request):
        """
        Get suggestions for a particular tag.
        """
        infohash = request.match_info["infohash"]
        ih_valid, error_response = TagsEndpoint.validate_infohash(infohash)
        if not ih_valid:
            return error_response

        with db_session:
            suggestions = self.db.get_suggestions(unhexlify(infohash))
            return RESTResponse({"suggestions": suggestions})
Exemplo n.º 30
0
    async def get_channels(self, request):
        sanitized = self.sanitize_parameters(request.query)
        sanitized[
            'subscribed'] = None if 'subscribed' not in request.query else bool(
                int(request.query['subscribed']))
        include_total = request.query.get('include_total', '')
        sanitized.update({"origin_id": 0})
        sanitized['metadata_type'] = CHANNEL_TORRENT

        with db_session:
            channels = self.mds.get_entries(**sanitized)
            total = self.mds.get_total_count(
                **sanitized) if include_total else None
            channels_list = []
            for channel in channels:
                channel_dict = channel.to_simple_dict()
                # Add progress info for those channels that are still being processed
                if channel.subscribed:
                    if channel_dict["state"] == CHANNEL_STATE.UPDATING.value:
                        try:
                            progress = self.mds.compute_channel_update_progress(
                                channel)
                            channel_dict["progress"] = progress
                        except (ZeroDivisionError, FileNotFoundError) as e:
                            self._logger.error(
                                "Error %s when calculating channel update progress. Channel data: %s-%i %i/%i",
                                e,
                                hexlify(channel.public_key),
                                channel.id_,
                                channel.start_timestamp,
                                channel.local_version,
                            )
                    elif channel_dict[
                            "state"] == CHANNEL_STATE.METAINFO_LOOKUP.value:
                        if not self.download_manager.metainfo_requests.get(
                                bytes(channel.infohash)
                        ) and self.download_manager.download_exists(
                                bytes(channel.infohash)):
                            channel_dict[
                                "state"] = CHANNEL_STATE.DOWNLOADING.value

                channels_list.append(channel_dict)
        response_dict = {
            "results": channels_list,
            "first": sanitized["first"],
            "last": sanitized["last"],
            "sort_by": sanitized["sort_by"],
            "sort_desc": int(sanitized["sort_desc"]),
        }
        if total is not None:
            response_dict.update({"total": total})
        return RESTResponse(response_dict)