Esempio n. 1
0
    def _logout_btn_clicked(self, event=None):
        self.controller.token = None

        # TODO: Move this to some request handler
        make_request("logout", dict(token=self.controller.token))

        self.controller.switch_frame("LoginPage")
Esempio n. 2
0
def test_delete_collection_bad():
    """
    Confirm that bad deletion attempts are handled correctly.

    Checks:
    * Random string as base user -> 403
    * Random string as data user -> 404
    * Random uuid as data user -> 404
    """
    session = requests.Session()

    as_user(session, USERS["base"])
    for _ in range(2):
        response = make_request(session,
                                f"/api/v1/collection/{random_string()}",
                                method="DELETE")
    assert response.code == 403
    assert not response.data

    as_user(session, USERS["data"])
    for _ in range(2):
        response = make_request(session,
                                f"/api/v1/collection/{random_string()}",
                                method="DELETE")
    assert response.code == 404
    assert not response.data

    for _ in range(2):
        response = make_request(session,
                                f"/api/v1/collection/{uuid.uuid4()}",
                                method="DELETE")
    assert response.code == 404
    assert not response.data
Esempio n. 3
0
def get_movie(server_name, server_id, movie_code):
    movie_info = []

    video_url = None
    if server_id in (0, 12, 13, 14, 15):
        video_url = json.loads(
            make_request('{0}ajax/load_embed/{1}'.format(
                SITE_URL, movie_code)))['embed_url']
        movie_info.append({'title': server_name, 'url': video_url})
    else:
        uniq = ''.join(random.SystemRandom().choice(string.ascii_lowercase +
                                                    string.digits)
                       for _ in range(6))
        key = hashlib.md5('{0}{1}7bcq9826avrbi6m4'.format(movie_code,
                                                          uniq)).hexdigest()
        cookie = 'i6m49vd7shxkn985mhodk{0}twz87wwxtp3dqiicks2dfyud213k6yg={1}'.format(
            movie_code, uniq)
        info = json.loads(
            make_request(
                '{0}ajax/get_sources/{1}/{2}/2'.format(SITE_URL, movie_code,
                                                       key), cookie))
        for source in info['playlist'][0]['sources']:
            movie_info.append({
                'title':
                '{0} ({1})'.format(server_name, source['label']),
                'url':
                make_request_no_redirect(source['file']).url
            })

    return movie_info
def test_key_login():
    """Test API key login for all users"""
    session = requests.Session()
    helpers.as_user(session, None)
    for i, userid in enumerate(helpers.USERS):
        response = helpers.make_request(
            session,
            "/api/v1/login/apikey",
            data={
                "api-user": helpers.USERS[userid],
                "api-key": str(i - 1)
            },
            method="POST",
        )
        if userid == "no-login":
            assert response.code == 401
            assert not response.data
        else:
            assert response.code == 200
            assert not response.data

            response = helpers.make_request(session,
                                            "/api/v1/developer/loginhello")
            assert response.code == 200
            assert response.data == {"test": "success"}
Esempio n. 5
0
def test_get_dataset(mdb):
    """
    Confirm that datasets are returned correctly.

    Tests:
      * Confirm that the correct dataset is returned
      * Confirm that the dataset is not listed in ``related``
    """
    session = requests.Session()

    order_id = helpers.add_order()
    ds_id = helpers.add_dataset(order_id)
    ds_id2 = helpers.add_dataset(order_id)
    coll_id = helpers.add_collection([ds_id])
    coll_id2 = helpers.add_collection([ds_id])

    helpers.as_user(session, helpers.USERS["edit"])
    order_data = mdb["orders"].find_one({"_id": order_id})

    response = helpers.make_request(session, f"/api/v1/dataset/{ds_id}")
    assert response.code == 200
    result = response.data["dataset"]
    assert result["order"]["id"] == str(order_id)
    assert set(entry["id"] for entry in result["related"]) == {str(ds_id2)}
    assert set(entry["id"] for entry in result["collections"]) == {
        str(coll_id), str(coll_id2)
    }
    assert set(entry["id"] for entry in result["authors"]) == set(
        str(entry) for entry in order_data["authors"])
    assert set(entry["id"] for entry in result["generators"]) == set(
        str(entry) for entry in order_data["generators"])
    assert result["organisation"]["id"] == str(order_data["organisation"])
    assert set(entry["id"] for entry in result["editors"]) == set(
        str(entry) for entry in order_data["editors"])

    helpers.as_user(session, helpers.USERS["base"])
    order_data = mdb["orders"].find_one({"_id": order_id})

    response = helpers.make_request(session, f"/api/v1/dataset/{ds_id}")
    assert response.code == 200
    result = response.data["dataset"]
    assert "order" not in result
    assert set(entry["id"] for entry in result["related"]) == {str(ds_id2)}
    assert set(entry["id"] for entry in result["collections"]) == {
        str(coll_id), str(coll_id2)
    }
    assert set(entry["id"] for entry in result["authors"]) == set(
        str(entry) for entry in order_data["authors"])
    assert set(entry["id"] for entry in result["generators"]) == set(
        str(entry) for entry in order_data["generators"])
    assert result["organisation"]["id"] == str(order_data["organisation"])
    assert "editors" not in result

    mdb["orders"].delete_one({"_id": order_id})
    mdb["datasets"].delete_one({"_id": ds_id})
    mdb["datasets"].delete_one({"_id": ds_id2})
    mdb["collections"].delete_one({"_id": coll_id})
    mdb["collections"].delete_one({"_id": coll_id2})
Esempio n. 6
0
def test_dataset_update_bad(dataset_for_tests):
    """Confirm that bad requests will be rejected."""
    indata = {"dataset": {"title": "Updated title"}}
    ds_uuid = helpers.random_string()
    responses = helpers.make_request_all_roles(f"/api/v1/dataset/{ds_uuid}",
                                               method="PATCH",
                                               data=indata)
    for response in responses:
        if response.role in ("edit", "data", "root"):
            assert response.code == 404
        elif response.role == "no-login":
            assert response.code == 401
        else:
            assert response.code == 403
            assert not response.data

    ds_uuid = uuid.uuid4().hex
    responses = helpers.make_request_all_roles(f"/api/v1/dataset/{ds_uuid}",
                                               method="PATCH",
                                               data=indata)
    for response in responses:
        if response.role in ("edit", "data", "root"):
            assert response.code == 404
        elif response.role == "no-login":
            assert response.code == 401
        else:
            assert response.code == 403
            assert not response.data

    ds_uuid = dataset_for_tests
    session = requests.Session()
    helpers.as_user(session, helpers.USERS["data"])
    indata = {"title": ""}
    response = helpers.make_request(session,
                                    f"/api/v1/dataset/{ds_uuid}",
                                    method="PATCH",
                                    data=indata)
    assert response.code == 400
    assert not response.data

    indata = {"dataset": {"extra": "asd"}}
    response = helpers.make_request(session,
                                    f"/api/v1/dataset/{ds_uuid}",
                                    method="PATCH",
                                    data=indata)
    assert response.code == 400
    assert not response.data

    indata = {"dataset": {"timestamp": "asd"}}
    response = helpers.make_request(session,
                                    f"/api/v1/dataset/{ds_uuid}",
                                    method="PATCH",
                                    data=indata)
    assert response.code == 400
    assert not response.data
def test_logout():
    """Assure that session is cleared after logging out."""
    session = requests.Session()
    helpers.as_user(session, helpers.USERS["root"])
    response = helpers.make_request(session, "/api/v1/user/me")
    for field in response.data["user"]:
        assert response.data["user"][field]
    response = helpers.make_request(session, "/api/v1/logout", ret_json=False)
    response = helpers.make_request(session, "/api/v1/user/me")
    for field in response.data["user"]:
        assert not response.data["user"][field]
Esempio n. 8
0
def test_delete_dataset_permissions(mdb):
    """
    Confirm that permissions for deleting datasets are correct.

    Checks:
      * DATA_MANAGER can delete any dataset
      * DATA_EDIT can delete dataset where they are editors (in the order)
      * Other users cannot delete any dataset, even if they are editors
    """
    order_id = helpers.add_order()
    ds_id = helpers.add_dataset(order_id)
    session = requests.Session()

    for role in helpers.USERS:
        helpers.as_user(session, helpers.USERS[role])
        response = helpers.make_request(session,
                                        f"/api/v1/dataset/{ds_id}",
                                        method="DELETE",
                                        ret_json=True)
        if role in ("edit", "data", "root"):
            assert response.code == 200
            ds_id = helpers.add_dataset(order_id)
        elif role == "no-login":
            assert response.code == 401
        else:
            assert response.code == 403
        assert not response.data

    edit_user = mdb["users"].find_one({"auth_ids": helpers.USERS["edit"]})
    mdb["orders"].update_one({"_id": order_id},
                             {"$pull": {
                                 "editors": edit_user["_id"]
                             }})
    helpers.as_user(session, helpers.USERS["edit"])
    response = helpers.make_request(session,
                                    f"/api/v1/dataset/{ds_id}",
                                    method="DELETE",
                                    ret_json=True)
    assert response.code == 403
    assert not response.data

    base_user = mdb["users"].find_one({"auth_ids": helpers.USERS["base"]})
    mdb["orders"].update_one({"_id": order_id},
                             {"$push": {
                                 "editors": base_user["_id"]
                             }})
    helpers.as_user(session, helpers.USERS["base"])
    response = helpers.make_request(session,
                                    f"/api/v1/dataset/{ds_id}",
                                    method="DELETE",
                                    ret_json=True)
    assert response.code == 403
    assert not response.data
Esempio n. 9
0
def test_add_dataset_data(mdb):
    """
    Confirm that values are set correctly and logs are created.

    Checks:
      * All values can be set correctly
      * Dataset is added correctly to the order
      * Description is escaped
    """
    order_id = helpers.add_order()
    indata = {
        "dataset": {
            "title": "New add dataset data title",
            "description": "<br />",
            "tags": ["testing", "add_dataset"],
        }
    }
    indata["dataset"].update(TEST_LABEL)
    session = requests.session()
    helpers.as_user(session, USERS["data"])

    response = helpers.make_request(
        session,
        f"/api/v1/order/{order_id}/dataset",
        method="POST",
        data=indata,
        ret_json=True,
    )
    assert response.code == 200
    assert "id" in response.data
    assert len(response.data["id"]) == 38
    order_info = mdb["orders"].find_one({"_id": order_id})
    assert len(order_info["datasets"]) == 1

    added_ds = mdb["datasets"].find_one({"_id": response.data["id"]})
    for key in indata["dataset"]:
        if key == "description":
            assert added_ds[key] == "&lt;br /&gt;"
        else:
            assert added_ds[key] == indata["dataset"][key]

    response = helpers.make_request(
        session,
        f"/api/v1/order/{order_id}/dataset",
        method="POST",
        data=indata,
        ret_json=True,
    )
    assert response.code == 200
    assert "id" in response.data
    assert len(response.data["id"]) == 38
    order_info = mdb["orders"].find_one({"_id": order_id})
    assert len(order_info["datasets"]) == 2
Esempio n. 10
0
def test_dataset_update_permissions(mdb):
    """
    Confirm that permissions for updating datasets are correct.

    Checks:
      * DATA_MANAGER can update any dataset
      * DATA_EDIT can update datasets where they are editors (in the order)
      * Other users cannot update any dataset, even if they are editors
    """
    session = requests.Session()
    order_id = helpers.add_order()
    ds_id = helpers.add_dataset(order_id)

    indata = {"dataset": {"title": "Updated dataset permissions title"}}
    responses = helpers.make_request_all_roles(f"/api/v1/dataset/{ds_id}",
                                               method="PATCH",
                                               data=indata)
    for response in responses:
        if response.role in ("edit", "data", "root"):
            assert response.code == 200
        elif response.role == "no-login":
            assert response.code == 401
        else:
            assert response.code == 403
        assert not response.data

    indata = {"dataset": {"title": "Updated dataset permissions title 2"}}
    edit_user = mdb["users"].find_one({"auth_ids": helpers.USERS["edit"]})
    mdb["orders"].update_one({"_id": order_id},
                             {"$pull": {
                                 "editors": edit_user["_id"]
                             }})
    helpers.as_user(session, helpers.USERS["edit"])
    response = helpers.make_request(session,
                                    f"/api/v1/dataset/{ds_id}",
                                    method="PATCH",
                                    data=indata)
    assert response.code == 403
    assert not response.data

    base_user = mdb["users"].find_one({"auth_ids": helpers.USERS["base"]})
    mdb["orders"].update_one({"_id": order_id},
                             {"$push": {
                                 "editors": base_user["_id"]
                             }})
    helpers.as_user(session, helpers.USERS["base"])
    response = helpers.make_request(session,
                                    f"/api/v1/dataset/{ds_id}",
                                    method="PATCH",
                                    data=indata)
    assert response.code == 403
    assert not response.data
def test_add_user(mdb):
    """Add a user."""
    indata = {"user": {"email": "*****@*****.**"}}
    session = requests.Session()
    for role in USERS:
        as_user(session, USERS[role])
        response = make_request(session,
                                "/api/v1/user",
                                ret_json=True,
                                method="POST",
                                data=indata)
        if role in ("users", "root", "edit"):
            assert response.code == 200
            assert "id" in response.data
            new_user_info = mdb["users"].find_one({"_id": response.data["id"]})
            assert indata["user"]["email"] == new_user_info["email"]
            indata["user"]["email"] = "new_" + indata["user"]["email"]
        elif role == "no-login":
            assert response.code == 401
            assert not response.data
        else:
            assert response.code == 403
            assert not response.data

    indata = {
        "user": {
            "affiliation": "Added University",
            "name": "Added name",
            "email": "*****@*****.**",
            "permissions": ["DATA_EDIT"],
        }
    }
    session = requests.session()
    as_user(session, USERS["edit"])
    response = make_request(session,
                            "/api/v1/user",
                            ret_json=True,
                            method="POST",
                            data=indata)
    assert response.code == 403

    as_user(session, USERS["root"])
    response = make_request(session,
                            "/api/v1/user",
                            ret_json=True,
                            method="POST",
                            data=indata)
    assert response.code == 200
    assert "id" in response.data
    new_user_info = mdb["users"].find_one({"_id": response.data["id"]})
    for key in indata["user"]:
        assert new_user_info[key] == indata["user"][key]
def test_update_current_user(mdb):
    """Update the info about the current user."""
    session = requests.Session()

    indata = {"user": {}}
    for user in USERS:
        as_user(session, USERS[user])
        user_info = mdb["users"].find_one({"auth_ids": USERS[user]})
        response = make_request(session,
                                "/api/v1/user/me",
                                ret_json=True,
                                method="PATCH",
                                data=indata)
        if user != "no-login":
            assert response.code == 400
        else:
            assert response.code == 401
        assert not response.data
        new_user_info = mdb["users"].find_one({"auth_ids": USERS[user]})
        assert user_info == new_user_info

    indata = {
        "user": {
            "affiliation": "Updated University",
            "name": "Updated name"
        }
    }
    session = requests.Session()
    for user in USERS:
        as_user(session, USERS[user])
        user_info = mdb["users"].find_one({"auth_ids": USERS[user]})
        response = make_request(session,
                                "/api/v1/user/me",
                                ret_json=True,
                                method="PATCH",
                                data=indata)
        if user != "no-login":
            assert response.code == 200
            assert not response.data
            new_user_info = mdb["users"].find_one({"auth_ids": USERS[user]})
            for key in new_user_info:
                if key in indata["user"].keys():
                    assert new_user_info[key] == indata["user"][key]
                else:
                    mdb["users"].update_one(new_user_info, {"$set": user_info})
        else:
            assert response.code == 401
            assert not response.data
Esempio n. 13
0
def handle_listing(page, platform, url, sql_handler):
    if platform == 'indeed':
        title = extractors.extract_element_text(
            page, 'h1', {'class': 'jobsearch-JobInfoHeader-title'})
        company = extractors.extract_element_text(
            page, 'div', {'class': 'jobsearch-CompanyReview--heading'})
        if not company:
            company = extractors.extract_element_text(
                page, 'div', {'class': 'icl-u-lg-mr--sm icl-u-xs-mr--xs'})
        job_meta_header = extractors.extract_element_text(
            page, 'span', {'class': 'jobsearch-JobMetadataHeader-item'})
        desc = extractors.extract_element_text(page, 'div',
                                               {'id': 'jobDescriptionText'})
        url = extractors.extract_element_attr_value(page, 'meta',
                                                    {'id': 'indeed-share-url'},
                                                    'content')
        job_id = helpers.get_url_param_value(url, 'jk')
        date = extractors.extract_indeed_job_footer_text(page)
        sql_handler.save_indeed_job(job_id=job_id,
                                    date=date,
                                    company=company,
                                    title=title,
                                    job_meta=job_meta_header,
                                    text=desc,
                                    url=url,
                                    platform=platform)
    if platform == 'twitter':
        next_token = handle_twitter_response(page)
        while next_token:
            token_url = helpers.format_url(
                url, platform, add_param={'pagination_token': next_token})
            page = helpers.make_request(token_url, platform)
            next_token = handle_twitter_response(page)

    if platform == 'Volkswagen_press':
        id = platform + '_' + helpers.get_url_path_element(url, -1)
        title = extractors.extract_element_text(page, 'h1',
                                                {'class': 'page--title'})
        company = "Volkswagen"
        date = extractors.extract_element_text(page, 'div',
                                               {'class': 'meta--item'}, 0)
        date_string = extractors.extract_date_string_from_text(date, platform)
        meta_topics = extractors.extract_child_element_text(
            page, 'div', {'class': 'meta--item'}, 'a', {'content-link': ''}, 2,
            0)
        short_summary = extractors.extract_list_text_by_parent(
            page, 'div', {'class': 'topic-list'})
        summary = extractors.extract_child_element_text(
            page, 'div', {'class': 'page-item--intro'}, 'p', None, 0, 0)
        text = extractors.extract_concatinated_text_by_element(
            page, 'div', {'class': 'page-item--text'}, 'p')
        sql_handler.save_press_release(release_id=id,
                                       company=company,
                                       release_date=date_string,
                                       topics=meta_topics,
                                       url=url,
                                       title=title,
                                       short_summary=short_summary,
                                       summary=summary,
                                       text=text)
Esempio n. 14
0
def test_get_collection_logs_bad():
    """Confirm that bad identifiers return 404."""
    session = requests.session()
    helpers.as_user(session, helpers.USERS["data"])
    for _ in range(3):
        response = make_request(session,
                                f"/api/v1/collection/{uuid.uuid4()}/log",
                                ret_json=True)
        assert response.code == 404
        assert not response.data
        response = make_request(
            session,
            f"/api/v1/collection/{helpers.random_string()}/log",
            ret_json=True)
        assert response.code == 404
        assert not response.data
Esempio n. 15
0
def begin_crawl():
	# Explore out all of our category start_urls into subcategories
	with open(settings.start_file, "r") as f:
		for line in f:
			line = line.strip()
			if not line or line.startswith("#"):
				continue # skip blank and commented out lines

			page, html = make_request(line)
			count = 0

			# Look for sub links
			subcategories = page.findAll("div", "bcx-grid__image") # downward arrow graphics
			subcategories.extend(page.findAll("li", "sub-categories__list__item")) # carousel hover menu
			sidebar = page.find("div", "browseBox")
			if sidebar:
				subcategories.extend(sidebar.findAll("li"))

			for subcategory in subcategories:
				line = subcategory.find("a")
				if not link:
					continue
				link = link["href"]
				count += 1
				enqueue_url(link)

			log("Found {} subcategories on {}".format(count, line))
Esempio n. 16
0
def GetMoviesBySearch(conn, getBy, searchTerm, start=0):
    if searchBy.has_key(getBy.lower()):
        method = 'VideoLibrary.GetMovies'
        json_params = {
            'jsonrpc': '2.0',
            'method': method,
            'id': 15,
            'params': {
                'properties': [],
                'limits': {
                    'start': start,
                    'end': start + 3
                },
                'sort': {
                    'order': 'ascending',
                    'method': 'title',
                    'ignorearticle': True
                },
                'filter': {
                    'field': searchBy[getBy.lower()],
                    'operator': 'contains',
                    'value': searchTerm
                }
            }
        }
    else:
        return {'error': 'Unable to search by ' + getBy + '.'}
    res = helpers.make_request(conn, method, json_params)
    if (res.has_key('result') and res['result'].has_key('movies')
            and len(res['result']['movies']) > 0):
        movies = res['result']['movies']
        for i in range(0, len(movies)):
            return movies
    else:
        print 'No movies found matching your search for ' + searchTerm + '.'
Esempio n. 17
0
def begin_crawl():

    # explode out all of our category `start_urls` into subcategories
    with open(settings.start_file, "r") as f:
        for line in f:
            line = line.strip()
            if not line or line.startswith("#"):
                continue  # skip blank and commented out lines

            page, html = make_request(line)
            count = 0

            # look for subcategory links on this page
            subcategories = page.findAll("div", "bxc-grid__image")  # downward arrow graphics
            subcategories.extend(page.findAll("li", "sub-categories__list__item"))  # carousel hover menu
            sidebar = page.find("div", "browseBox")
            if sidebar:
                subcategories.extend(sidebar.findAll("li"))  # left sidebar

            for subcategory in subcategories:
                link = subcategory.find("a")
                if not link:
                    continue
                link = link["href"]
                count += 1
                enqueue_url(link)

            log("Found {} subcategories on {}".format(count, line))
Esempio n. 18
0
def user_post_feed(user_id, max_cursor=0):
    request_url = Constants.BASE_URL + Constants.USER_POST_FEED_ENDP.format(
        user_id, max_cursor) + helpers.query(Constants.DEVICE_VARS)
    # as_cp = ptts.signature_gen.generate_as_cp(request_url, helpers.get_timestamp())
    # request_url = request_url + "&as={:s}&cp={:s}".format(as_cp[0], as_cp[1])
    request_response = helpers.make_request(request_url, request_type="get")
    return request_response.json() if request_response else None
Esempio n. 19
0
def test_get_dataset_logs_permissions(mdb):
    """
    Get dataset logs.

    Assert that DATA_MANAGEMENT or user in editors is required.
    """
    dataset_data = mdb["datasets"].aggregate([{"$sample": {"size": 1}}]).next()
    order_data = mdb["orders"].find_one({"datasets": dataset_data["_id"]})
    user_data = mdb["users"].find_one(
        {"$or": [{
            "_id": {
                "$in": order_data["editors"]
            }
        }]})
    responses = helpers.make_request_all_roles(
        f'/api/v1/dataset/{dataset_data["_id"]}/log', ret_json=True)
    for response in responses:
        if response.role in ("data", "root"):
            assert response.code == 200
            assert "logs" in response.data
        elif response.role == "no-login":
            assert response.code == 401
            assert not response.data
        else:
            assert response.code == 403
            assert not response.data

    session = requests.Session()

    helpers.as_user(session, user_data["auth_ids"][0])
    response = helpers.make_request(
        session, f'/api/v1/dataset/{dataset_data["_id"]}/log', ret_json=True)

    assert response.code == 200
    assert "logs" in response.data
Esempio n. 20
0
def hashtag_search(text):
    request_url = Constants.BASE_URL + Constants.HASHTAG_SEARCH_ENDP.format(
        text) + helpers.query(Constants.DEVICE_VARS)
    # as_cp = ptts.signature_gen.generate_as_cp(request_url, helpers.get_timestamp())
    # request_url = request_url + "&as={:s}&cp={:s}".format(as_cp[0], as_cp[1])
    request_response = helpers.make_request(request_url, request_type="get")
    return request_response.json() if request_response else None
Esempio n. 21
0
def get_following(target_user_id):
    request_url = Constants.BASE_URL + Constants.USER_FOLLOWING_FNDP.format(
        target_user_id) + helpers.query(Constants.DEVICE_VARS)
    # as_cp = ptts.signature_gen.generate_as_cp(request_url, helpers.get_timestamp())
    # request_url = request_url + "&as={:s}&cp={:s}".format(as_cp[0], as_cp[1])
    request_response = helpers.make_request(request_url, request_type="get")
    return request_response.json() if request_response else None
Esempio n. 22
0
def get_live_feed(live_room_id):
    request_url = Constants.BASE_URL + Constants.LIVE_ROOM_ENDP.format(
        live_room_id) + helpers.query(Constants.DEVICE_VARS)
    # as_cp = ptts.signature_gen.generate_as_cp(request_url, helpers.get_timestamp())
    # request_url = request_url + "&as={:s}&cp={:s}".format(as_cp[0], as_cp[1])
    request_response = helpers.make_request(request_url, request_type="get")
    return request_response.json() if request_response else None
Esempio n. 23
0
def test_add_order_log(mdb):
    """
    Confirm that logs are created when orders are added.

    Checks:
    * Confirm that a log entry is created after order is added.
    """
    indata = {
        "order": {
            "description": "Test add order log description",
            "title": "Test add order log title",
        }
    }
    indata["order"].update(TEST_LABEL)

    session = requests.Session()
    helpers.as_user(session, USERS["edit"])
    response = helpers.make_request(session,
                                    "/api/v1/order",
                                    method="POST",
                                    data=indata,
                                    ret_json=True)
    assert response.code == 200
    assert "id" in response.data
    assert len(response.data["id"]) == 38
    order = mdb["orders"].find_one({"_id": response.data["id"]})
    logs = list(mdb["logs"].find({
        "data_type": "order",
        "data._id": response.data["id"]
    }))
    assert len(logs) == 1
    assert logs[0]["data"] == order
    assert logs[0]["action"] == "add"
Esempio n. 24
0
def _get_menu():
    resp = make_request(SITE_URL)

    soup = BeautifulSoup(resp)

    div_menu = soup.find('div', {'id': 'menu'})

    li_level1 = div_menu.find('ul').findAll('li', recursive=False)

    menu = []

    for li in li_level1:
        a = li.find('a')
        attrs = dict(a.attrs)
        if attrs['title'] in ('Genre', 'Country'):
            children = []
            for a in li.find('ul').findAll('a'):
                children.append({
                    'title': a.text,
                    'url': dict(a.attrs)['href']
                })
            menu.append({
                'title': attrs['title'],
                'url': '',
                'children': children
            })
        elif attrs['title'] == 'TV - Series':
            menu.append({'title': attrs['title'], 'url': attrs['href']})

    return menu
Esempio n. 25
0
def GetPlayerItem(conn):
    playerid = helpers.get_player_id(conn)

    if playerid > 0:
        method = 'Player.GetItem'
        json_params = {
            'jsonrpc': '2.0',
            'method': method,
            'id': 1,
            'params': {
                'playerid': playerid
            }
        }
        res = helpers.make_request(conn, method, json_params)
        if (res.has_key('result') and res['result'].has_key('item')
                and res['result']['item'].has_key('label')):
            print(res['result']['item']['label'])
        else:
            print 'An error occurred'

    elif playerid == 0:
        print 'There is no player'

    else:
        print 'An error occurred'
def test_delete_user(mdb):
    """Test deleting users (added when testing to add users)"""
    re_users = re.compile("@added.example.com")
    users = list(mdb["users"].find({"email": re_users}, {"_id": 1}))
    if not users:
        assert False
    session = requests.Session()
    i = 0
    while i < len(users):
        for role in USERS:
            as_user(session, USERS[role])
            response = make_request(session,
                                    f'/api/v1/user/{users[i]["_id"]}',
                                    method="DELETE")
            if role in ("users", "root"):
                assert response.code == 200
                assert not response.data
                assert not mdb["users"].find_one({"_id": users[i]["_id"]})
                assert mdb["logs"].find_one({
                    "data._id": users[i]["_id"],
                    "action": "delete",
                    "data_type": "user",
                })
                i += 1
                if i >= len(users):
                    break
            elif role == "no-login":
                assert response.code == 401
                assert not response.data
            else:
                assert response.code == 403
                assert not response.data
Esempio n. 27
0
def begin_crawl(crawl_more):

    visited = {}
    product_dict = {}
    if crawl_more:

        with open(settings.a_URL_file, 'r') as w:
            urls = (w.readlines())
        for url in urls:
            url = url.strip()
            visited[url] = True

    w = open(settings.a_URL_file, 'a')
    with open(settings.start_file, "r") as f:
        for line in f:
            line = line.strip()
            if not line or line.startswith("#"):
                continue  # skip blank and commented out lines
            page, html = make_request(line)
            url = line
            count = 0
            while page != None and count <= 50:
                items = page.findAll("li", "s-result-item")
                for item in items[:settings.max_details_per_listing]:
                    product_image = get_primary_img(item)
                    if not product_image:
                        continue
                    product_title = get_title(item)
                    product_url = get_url(item)
                    product_price = get_price(item)
                    if product_url not in visited:
                        count += 1
                        print product_url, product_price, product_title
                        visited[product_url] = True  # mark that we've seen it
                        # need to add host to url
                        product_url = format_url(product_url)
                        w.write('%s\n' % product_url)
                        product_dict[product_url] = (product_title,
                                                     product_price)
                        print count, product_url, product_dict[product_url]

                next_link = page.find("a", id="pagnNextLink")
                if next_link:
                    page, html = make_request(next_link["href"])
                    url = next_link["href"]
    w.close()
    pickle.dump(product_dict, open("amazon-products.p", "wb"))
Esempio n. 28
0
def test_get_order_logs_bad():
    """
    Request the logs for multiple orders.

    Confirm that bad identifiers give response 404.
    """
    session = requests.session()
    for _ in range(2):
        as_user(session, USERS["data"])
        response = make_request(session,
                                f"/api/v1/order/{uuid.uuid4()}/log",
                                ret_json=True)
        assert response.code == 404
        response = make_request(session,
                                f"/api/v1/order/{random_string()}/log",
                                ret_json=True)
        assert response.code == 404
Esempio n. 29
0
def get_user_info(user_id):
    request_url = Constants.BASE_URL + Constants.USER_INFO_ENDP.format(
        user_id) + helpers.query(Constants.DEVICE_VARS)
    #as_cp = ptts.signature_gen.generate_as_cp(request_url, helpers.get_timestamp())
    #request_url = request_url + "&as={:s}&cp={:s}".format(as_cp[0], as_cp[1])
    #request_url = request_url + "&as=a1qwert123&cp=cbfhckdckkde1&mas=01937dea4a12a8c410eb526555c121d44decec4c0ccc0c8666c61c"
    request_response = helpers.make_request(request_url, request_type="get")
    return request_response.json() if request_response else None
Esempio n. 30
0
def test_get_collection_bad():
    """
    Request collections using bad identifiers.

    All are expected to return 404.
    """
    session = requests.Session()
    for _ in range(2):
        response = make_request(session, f"/api/v1/collection/{uuid.uuid4()}")
        assert response.code == 404
        assert not response.data

    for _ in range(2):
        response = make_request(session,
                                f"/api/v1/collection/{random_string()}")
        assert response.code == 404
        assert not response.data
Esempio n. 31
0
def test_delete_order_bad():
    """Confirm that bad uuids get an appropriate response."""
    session = requests.Session()

    as_user(session, USERS["data"])
    for _ in range(2):
        response = make_request(session,
                                f"/api/v1/order/{random_string()}",
                                method="DELETE")
    assert response.code == 404
    assert not response.data

    for _ in range(2):
        response = make_request(session,
                                f"/api/v1/order/{uuid.uuid4()}",
                                method="DELETE")
    assert response.code == 404
    assert not response.data
Esempio n. 32
0
def archive_shows():
    url = h.extract_var(args, 'url')

    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))

    ul = h.bs_find_with_class(soup, 'ul', 'archive-shows')

    for li in ul.findAll('li'):
        a = li.find('a')
        a_attrs = dict(a.attrs)
        h.add_dir(addon_handle, base_url, a_attrs['title'], a_attrs['href'], 'show')
Esempio n. 33
0
def episode():
    url = h.extract_var(args, 'url')

    name = h.extract_var(args, 'name')

    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))

    div = h.bs_find_with_class(soup, 'div', 'video-player')

    scripts = div.findAll('script')
    script = None
    for _script in scripts:
        if 'CryptoJS' in _script.text:
            script = _script
            break

    url = ''
    if script:
        script_text = script.text
        _dailytoday = ''
        _subject = ''

        parser = Parser()
        tree = parser.parse(script.text)
        for node in tree.children():
            ecma = node.to_ecma()
            if ecma.startswith('var dailytoday ='):
                _dailytoday = node.children()[0].children()[1].to_ecma()[1:-1]
            elif ecma.startswith('var subject ='):
                _subject = node.children()[0].children()[1].to_ecma()[1:-1]
            # elif "var bigmumbai = " not in ecma and "bigmumbai = " in ecma:
            #     print ecma

        if _dailytoday and _subject:
            url = decrypt.decrypt_url(_dailytoday, _subject)
        else:
            url = script.text.split('bigmumbai = ', 2)[2].split(';')[0][1:-1]

        print url
        plot = h.bs_find_with_class(soup, 'div', 'vp-info').find('span', {'itemprop': 'description'}).text
        thumbnail = soup.find('div', {'itemprop': 'video'}).find('meta', {'itemprop': 'thumbnailUrl'})['content']
        h.add_dir_video(addon_handle, name, url, thumbnail, plot)
    else:
        iframe = div.find('iframe')
        if iframe:
            attrs = dict(iframe.attrs)
            youtube_url = attrs['src']
            print youtube_url
            video_id = urlparse.urlparse(youtube_url).path.replace('/embed/', '')
            url = 'plugin://plugin.video.youtube/play/?video_id=%s' % video_id
            h.add_dir_video(addon_handle, name, url, '', '')
Esempio n. 34
0
def fetch_listing():

    global crawl_time
    url = dequeue_url()
    if not url:
        log("WARNING: No URLs found in the queue. Retrying...")
        pile.spawn(fetch_listing)
        return

    page, html = make_request(url)
    if not page:
        return

    items = page.findAll("li", "s-result-item")
    log("Found {} items on {}".format(len(items), url))

    for item in items[:settings.max_details_per_listing]:

        product_image = get_primary_img(item)
        if not product_image:
            log("No product image detected, skipping")
            continue

        product_title = get_title(item)
        product_url = get_url(item)
        product_price = get_price(item)

        product = ProductRecord(
            title=product_title,
            product_url=format_url(product_url),
            listing_url=format_url(url),
            price=product_price,
            primary_img=product_image,
            crawl_time=crawl_time

        )
        product_id = product.save()
        # download_image(product_image, product_id)

    # add next page to queue
    next_link = page.find("a", id="pagnNextLink")
    if next_link:
        log(" Found 'Next' link on {}: {}".format(url, next_link["href"]))
        enqueue_url(next_link["href"])
        pile.spawn(fetch_listing)
Esempio n. 35
0
def show():
    url = h.extract_var(args, 'url')

    url = '%svideo/' % (url)

    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))

    info_div = h.bs_find_with_class(soup, 'div', 'video-n-info-wrap')

    pagination = h.bs_find_with_class(info_div, 'ul', 'pagination')
    pages = {
        'prev': [],
        'next': []
    }
    if pagination:
        page_type = 'prev'
        pages_li = pagination.findAll('li')[1:-1]
        for li in pages_li:
            attrs = dict(li.attrs)
            if 'class' in attrs and attrs['class'] == 'active':
                page_type = 'next'
            else:
                a = li.find('a')
                a_attrs = dict(a.attrs)
                pages[page_type].append({
                    'href': a_attrs['href'],
                    'page': a.text
                })

    for page in pages['prev']:
        h.add_dir(addon_handle, base_url, '<< Page %s' % page['page'], page['href'], 'show')

    related_div = h.bs_find_with_class(info_div, 'div', 'related-videos')
    ul = related_div.find('ul')
    for li in ul.findAll('li'):
        a = li.find('a')
        a_attrs = dict(a.attrs)
        href = a_attrs['href']
        # if href.endswith('-full-episode.html'):
        h.add_dir(addon_handle, base_url, a_attrs['title'], href, 'episode', dict(a.find('img').attrs)['src'])

    for page in pages['next']:
        h.add_dir(addon_handle, base_url, '>> Page %s' % page['page'], page['href'], 'show')
Esempio n. 36
0
def current_shows():
    url = h.extract_var(args, 'url')

    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))

    h2s = h.bs_find_all_with_class(soup, 'h2', 'bubble-title')

    # XXX: If want sorted
    # import operator
    # shows = {}
    # shows[a_attrs['href']] = a_attrs['title']
    # shows = sorted(shows.items(), key=operator.itemgetter(1))

    # XXX: View mode thumbnail supported in xbmcswift2

    h2 = None
    for h2 in h2s:
        if h2.text == 'Current Shows':
            for li in h2.findNext('ul').findAll('li'):
                a = li.find('a')
                a_attrs = dict(a.attrs)
                img_src = dict(a.find('img').attrs)['src']
                h.add_dir(addon_handle, base_url, a_attrs['title'], a_attrs['href'], 'show', img_src, img_src)
            break
Esempio n. 37
0
    for config in changed:
        log = " - " + config + ' (' + ', '.join(changed_attributes[config]) + ')'
        if len(changed_attributes[config]) != 1 or 'nb_hits' not in changed_attributes[config]:
            log = '\033[0;35m' + log + '\033[0m'
        print(log)

print("")

if len(added) > 0 or len(removed) > 0 or len(changed) > 0:
    if helpers.confirm() is True:
        if len(added) > 0:
            print("")
            for config in added:
                key = algolia_helper.add_docsearch_key(config)
                print(config + ' (' + key + ')')
                helpers.make_request('/', 'POST', ref_configs[config])

        if len(changed) > 0:
            print("")

            for config in changed:
                config_id = str(inverted_actual_configs[config])
                key = algolia_helper.get_docsearch_key(config)

                print(config + ' (' + key + ')')
                helpers.make_request('/' + config_id, 'PUT', ref_configs[config])
                helpers.make_request('/' + config_id + '/reindex', 'POST')

        for config in removed:
            config_id = str(inverted_actual_configs[config])
Esempio n. 38
0
def main_index():
    soup = BeautifulSoup(h.make_request(SITE_ROOT, cookie_file, cookie_jar))

    scripts = soup.find('body').findAll('script', {'src': None})

    script = None
    for s in scripts:
        if 'var sn = ' in s.text:
            script = s

    script_text = script.text

    idx1 = script_text.find('var sn = ')
    idx2 = script_text[idx1:].find(';')
    sn = script_text[idx1 + 10:idx1 + idx2 - 1]

    script_text = script_text[idx1 + idx2 - 1:]

    idx1 = script_text.find('$.ajax({url:')
    idx2 = script_text[idx1:].find(',')
    lb_url = script_text[idx1 + 14:idx1 + idx2 - 1]

    lb_info = h.make_request(lb_url, cookie_file, cookie_jar)
    lb = lb_info.split('=')[1]

    script_text = script_text[idx1 + idx2 - 1:]

    idx1 = script_text.find('function showChannels (){')
    idx2 = script_text[idx1:].find('data = ')
    idx1 = idx1 + idx2
    idx2 = script_text[idx1:].find("'")
    idx1 = idx1 + idx2 + 1
    idx2 = script_text[idx1:].find("'")

    u = script_text[idx1: idx1 + idx2] + sn

    idx1 = script_text.find('function createVideo(')
    idx2 = script_text[idx1:].find('url = ')
    idx1 = idx1 + idx2 + 7
    idx2 = script_text[idx1:].find(";")
    _s_u = script_text[idx1: idx1 + idx2 - 1].split('"')

    channels_info = demjson.decode(h.make_request(u, cookie_file, cookie_jar))
    for channel_info in channels_info['channelsList']:
        caption = channel_info['caption']
        channel_name = channel_info['channelName'].strip('\n')

        name = '%s (%s)' % (caption, channel_name)

        if 'streamsList' in channel_info and len(channel_info['streamsList']) > 0:
            streams_info = []
            for stream in channel_info['streamsList']:
                s_u = list(_s_u)
                s_u[1] = lb
                s_u[3] = stream['streamName']
                s_u[5] = str(stream['streamId'])

                stream_info = {
                    'stream_caption': stream['caption'],
                    'stream_url': ''.join(s_u),
                }
                streams_info.append(json.dumps(stream_info))
            h.add_dir(addon_handle, base_url, name, urllib.urlencode({'streams_info': streams_info}), 'streams')
        else:
            # offline
            h.add_dir_video(addon_handle, name, '', '', '')