Esempio n. 1
0
    def setUp(self):
        self.config = None #placeholder
        self.TEST_PROVIDER_CONFIG = [
            ("wikipedia", {})
        ]
        # hacky way to delete the "ti" db, then make it fresh again for each test.
        temp_dao = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        temp_dao.delete_db(os.getenv("CLOUDANT_DB"))
        self.d = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))

        # do the same thing for the redis db, set up the test redis database.  We're using DB Number 8
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        provider_queues = {}
        providers = ProviderFactory.get_providers(self.TEST_PROVIDER_CONFIG)
        for provider in providers:
            provider_queues[provider.provider_name] = backend.PythonQueue(provider.provider_name+"_queue")

        self.b = backend.Backend(
            backend.RedisQueue("alias-unittest", self.r), 
            provider_queues, 
            [backend.PythonQueue("couch_queue")], 
            self.r)

        self.fake_item = {
            "_id": "1",
            "type": "item",
            "num_providers_still_updating":1,
            "aliases":{"pmid":["111"]},
            "biblio": {},
            "metrics": {}
        }
        self.fake_aliases_dict = {"pmid":["222"]}
        self.tiid = "abcd"
Esempio n. 2
0
    def setUp(self):
        self.d = None
        
        self.db = setup_postgres_for_unittests(db, app)

        # setup a clean new redis test database.  We're putting unittest redis at DB Number 8.
        self.r = tiredis.from_url("redis://*****:*****@example.com'
        self.test_meta = {    
                    'max_registered_items': 3, 
                    'planned_use': 'individual CV', 
                    'email': self.test_email, 
                    'notes': '', 
                    'api_key_owner': 'Julia Smith', 
                    "example_url":"", 
                    "organization":"NASA"
                }

        test_meta2 = copy.deepcopy(self.test_meta)
        test_meta2["email"] = '*****@*****.**'
        test_meta2["prefix"] = "SFU"
        self.existing_api_user = ApiUser(**test_meta2)

        self.existing_registered_item = RegisteredItem(self.test_alias_registered, self.existing_api_user)

        self.db.session.add(self.existing_api_user)
        self.db.session.add(self.existing_registered_item)
        self.db.session.commit()
    def setUp(self):
        self.config = None  #placeholder
        self.TEST_PROVIDER_CONFIG = [("wikipedia", {})]
        self.d = None

        # do the same thing for the redis db, set up the test redis database.  We're using DB Number 8
        self.r = tiredis.from_url("redis://localhost:6379",
                                  db=REDIS_UNITTEST_DATABASE_NUMBER)
        self.r.flushdb()

        provider_queues = {}
        providers = ProviderFactory.get_providers(self.TEST_PROVIDER_CONFIG)
        for provider in providers:
            provider_queues[provider.provider_name] = backend.PythonQueue(
                provider.provider_name + "_queue")

        self.b = backend.Backend(backend.RedisQueue("alias-unittest",
                                                    self.r), provider_queues,
                                 [backend.PythonQueue("couch_queue")], self.r)

        self.fake_item = {
            "_id": "1",
            "type": "item",
            "num_providers_still_updating": 1,
            "aliases": {
                "pmid": ["111"]
            },
            "biblio": {},
            "metrics": {},
            "last_modified": datetime.datetime(2013, 1, 1)
        }
        self.fake_aliases_dict = {"pmid": ["222"]}
        self.tiid = "abcd"

        self.db = setup_postgres_for_unittests(db, app)
Esempio n. 4
0
def fix_github_year():
    from totalimpact import item, tiredis

    myredis = tiredis.from_url(os.getenv("REDISTOGO_URL"), db=0)

    view_name = "queues/by_alias"
    view_rows = db.view(view_name, include_docs=True)
    row_count = 0
    page_size = 500
    start_key = ["url", "https://github.0000000"]
    end_key = ["url", "https://github.zzzzzzzz"]

    from couch_paginator import CouchPaginator

    page = CouchPaginator(db, view_name, page_size, include_docs=True, start_key=start_key, end_key=end_key)

    while page:
        for row in page:
            doc = row.doc
            print row.id
            try:
                doc["biblio"]["year"] = doc["biblio"]["create_date"][0:4]
                db.save(doc)
            except KeyError:
                pass
            row_count += 1
            print "."
        logger.info("%i. getting new page, last id was %s" % (row_count, row.id))
        if page.has_next:
            page = CouchPaginator(db, view_name, page_size, start_key=page.next, end_key=end_key, include_docs=True)
        else:
            page = None

    print "number items = ", row_count
Esempio n. 5
0
def update_github():
    from totalimpact import item, tiredis

    myredis = tiredis.from_url(os.getenv("REDISTOGO_URL"), db=0)

    view_name = "queues/by_alias"
    view_rows = db.view(view_name, include_docs=False)
    row_count = 0
    page_size = 500
    start_key = ["url", "https://github.0000000"]
    end_key = ["url", "https://github.zzzzzzzz"]

    from couch_paginator import CouchPaginator

    page = CouchPaginator(db, view_name, page_size, include_docs=False, start_key=start_key, end_key=end_key)

    while page:
        for row in page:
            tiid = row.id
            item.start_item_update([tiid], myredis, db, sleep_in_seconds=0.05)
            row_count += 1
            print "."
        logger.info("%i. getting new page, last id was %s" % (row_count, row.id))
        if page.has_next:
            page = CouchPaginator(db, view_name, page_size, start_key=page.next, end_key=end_key, include_docs=True)
        else:
            page = None

    print "number items = ", row_count
Esempio n. 6
0
    def setUp(self):
        self.config = None #placeholder
        self.TEST_PROVIDER_CONFIG = [
            ("wikipedia", {})
        ]
        self.d = None

        # do the same thing for the redis db, set up the test redis database.  We're using DB Number 8
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        provider_queues = {}
        providers = ProviderFactory.get_providers(self.TEST_PROVIDER_CONFIG)
        for provider in providers:
            provider_queues[provider.provider_name] = backend.PythonQueue(provider.provider_name+"_queue")

        self.b = backend.Backend(
            backend.RedisQueue("alias-unittest", self.r), 
            provider_queues, 
            [backend.PythonQueue("couch_queue")], 
            self.r)

        self.fake_item = {
            "_id": "1",
            "type": "item",
            "num_providers_still_updating":1,
            "aliases":{"pmid":["111"]},
            "biblio": {},
            "metrics": {},
            "last_modified": datetime.datetime(2013, 1, 1)
        }
        self.fake_aliases_dict = {"pmid":["222"]}
        self.tiid = "abcd"

        self.db = setup_postgres_for_unittests(db, app)
Esempio n. 7
0
def fix_github_year():
    from totalimpact import item, tiredis
    myredis = tiredis.from_url(os.getenv("REDISTOGO_URL"), db=0)

    view_name = "queues/by_alias"
    view_rows = db.view(view_name, include_docs=True)
    row_count = 0
    page_size = 500
    start_key = ["url", "https://github.0000000"]
    end_key = ["url", "https://github.zzzzzzzz"]

    from couch_paginator import CouchPaginator
    page = CouchPaginator(db, view_name, page_size, include_docs=True, start_key=start_key, end_key=end_key)

    while page:
        for row in page:
            doc = row.doc
            print row.id
            try:
                doc["biblio"]["year"] = doc["biblio"]["create_date"][0:4]
                db.save(doc)
            except KeyError:
                pass
            row_count += 1
            print "."
        logger.info(u"%i. getting new page, last id was %s" %(row_count, row.id))
        if page.has_next:
            page = CouchPaginator(db, view_name, page_size, start_key=page.next, end_key=end_key, include_docs=True)
        else:
            page = None

    print "number items = ", row_count
Esempio n. 8
0
def update_github():
    from totalimpact import item, tiredis
    myredis = tiredis.from_url(os.getenv("REDISTOGO_URL"), db=0)

    view_name = "queues/by_alias"
    view_rows = db.view(view_name, include_docs=False)
    row_count = 0
    page_size = 500
    start_key = ["url", "https://github.0000000"]
    end_key = ["url", "https://github.zzzzzzzz"]

    from couch_paginator import CouchPaginator
    page = CouchPaginator(db, view_name, page_size, include_docs=False, start_key=start_key, end_key=end_key)

    while page:
        for row in page:
            tiid = row.id
            item.start_item_update([tiid], myredis, db, sleep_in_seconds=0.05)                        
            row_count += 1
            print "."
        logger.info(u"%i. getting new page, last id was %s" %(row_count, row.id))
        if page.has_next:
            page = CouchPaginator(db, view_name, page_size, start_key=page.next, end_key=end_key, include_docs=True)
        else:
            page = None

    print "number items = ", row_count
    def setUp(self):
        # hacky way to delete the "ti" db, then make it fresh again for each test.
        temp_dao = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        temp_dao.delete_db(os.getenv("CLOUDANT_DB"))
        self.d = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        self.d.update_design_doc()

        # do the same thing for the redis db, set up the test redis database.  We're using DB Number 8
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()
Esempio n. 10
0
    def setUp(self):
        self.d = None

        # do the same thing for the redis db, set up the test redis database.  We're using DB Number 8
        self.r = tiredis.from_url("redis://localhost:6379",
                                  db=REDIS_UNITTEST_DATABASE_NUMBER)
        self.r.flushdb()

        self.db = setup_postgres_for_unittests(db, app)

        self.aliases = [["doi", "10.123"], ["doi", "10.124"],
                        ["doi", "10.125"]]
Esempio n. 11
0
def start_product_update(profile_id, tiids_to_update, priority):
    myredis = tiredis.from_url(os.getenv("REDIS_URL"), db=tiredis.REDIS_MAIN_DATABASE_NUMBER)  # main app is on DB 0

    # do all of this first and quickly
    for tiid in tiids_to_update:
        myredis.clear_provider_task_ids(tiid)
        myredis.set_provider_task_ids(tiid, ["STARTED"])  # set this right away
    
    # this import here to avoid circular dependancies
    from core_tasks import put_on_celery_queue
    put_on_celery_queue(profile_id, tiids_to_update, priority)
    return
Esempio n. 12
0
    def setUp(self):
        # hacky way to delete the "ti" db, then make it fresh again for each test.
        temp_dao = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        temp_dao.delete_db(os.getenv("CLOUDANT_DB"))
        self.d = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        self.d.update_design_doc()
        
        self.myrefsets = {"nih": {"2011": {
                        "facebook:comments": {0: [1, 99], 1: [91, 99]}, "mendeley:groups": {0: [1, 99], 3: [91, 99]}
                    }}}

        # setup a clean new redis test database.  We're putting unittest redis at DB Number 8.
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()
    def setUp(self):
        self.d = None

        # do the same thing for the redis db, set up the test redis database.  We're using DB Number 8
        self.r = tiredis.from_url("redis://localhost:6379", db=REDIS_UNITTEST_DATABASE_NUMBER)
        self.r.flushdb()

        self.db = setup_postgres_for_unittests(db, app)

        self.aliases = [
            ["doi", "10.123"],
            ["doi", "10.124"],
            ["doi", "10.125"]
        ]
Esempio n. 14
0
def main(action_type, number_to_update=35, specific_publisher=None):
    #35 every 10 minutes is 35*6perhour*24hours=5040 per day

    redis_url = os.getenv("REDIS_URL")

    myredis = tiredis.from_url(redis_url)

    try:
        if action_type == "gold_update":
            print "running " + action_type
            tiids = gold_update(number_to_update, myredis)
    except (KeyboardInterrupt, SystemExit): 
        # this approach is per http://stackoverflow.com/questions/2564137/python-how-to-terminate-a-thread-when-main-program-ends
        sys.exit()
Esempio n. 15
0
    def setUp(self):
        # setup a clean new redis database at our unittest redis DB location: Number 8
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        bibtex.Bibtex.paginate = lambda self, x: {"pages": [1,2,3,4], "number_entries":10}
        bibtex.Bibtex.member_items = lambda self, x: ("doi", str(x))
        self.memberitems_resp = [
            ["doi", "1"],
            ["doi", "2"],
            ["doi", "3"],
            ["doi", "4"],
        ]

        self.mi = models.MemberItems(bibtex.Bibtex(), self.r)
Esempio n. 16
0
    def setUp(self):
        # hacky way to delete the "ti" db, then make it fresh again for each test.
        temp_dao = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        temp_dao.delete_db(os.getenv("CLOUDANT_DB"))
        self.d = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        self.d.update_design_doc()

        # do the same thing for the redis db.  We're using DB 8 for unittests.
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        #setup api test client
        self.app = app
        self.app.testing = True
        self.client = self.app.test_client()
Esempio n. 17
0
    def setUp(self):

        self.db = setup_postgres_for_unittests(db, app)

        # do the same thing for the redis db, set up the test redis database.  We're using DB Number 8
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()
        now = datetime.datetime.utcnow()
        self.before = now - datetime.timedelta(days=2)
        self.last_week = now - datetime.timedelta(days=7)
        self.last_year = now - datetime.timedelta(days=370)

        # save basic item beforehand, and some additional items
        self.fake_item_doc = {
            "_id": "tiid1",
            "type": "item",
            "last_modified": now.isoformat(),
            "last_update_run": now.isoformat(),
            "aliases":{"doi":["10.7554/elife.1"]},
            "biblio": {"year":"2012"},
            "metrics": {}
        }
        self.fake_item_obj = item_module.create_objects_from_item_doc(self.fake_item_doc)        
        self.db.session.add(self.fake_item_obj)

        another_elife = copy.copy(self.fake_item_doc)
        another_elife["_id"] = "tiid2"
        another_elife["aliases"] = {"doi":["10.7554/ELIFE.2"]}
        another_elife["last_modified"] = self.before.isoformat()
        another_elife["last_update_run"] = self.before.isoformat()
        another_elife_obj = item_module.create_objects_from_item_doc(another_elife)        
        self.db.session.add(another_elife_obj)

        different_journal = copy.copy(self.fake_item_doc)
        different_journal["_id"] = "tiid3"
        different_journal["aliases"] = {"doi":["10.3897/zookeys.3"], "biblio":[{"year":1999}]}
        different_journal["last_modified"] = now.isoformat()
        different_journal["last_update_run"] = self.last_week.isoformat()
        different_journal_obj = item_module.create_objects_from_item_doc(different_journal)        
        self.db.session.add(different_journal_obj)

        different_journal2 = copy.copy(different_journal)
        different_journal2["_id"] = "tiid4"
        different_journal2["last_update_run"] = self.last_year.isoformat()
        different_journal_obj2 = item_module.create_objects_from_item_doc(different_journal2)        
        self.db.session.add(different_journal_obj2)

        self.db.session.commit()
    def setUp(self):

        self.db = setup_postgres_for_unittests(db, app)

        # do the same thing for the redis db, set up the test redis database.  We're using DB Number 8
        self.r = tiredis.from_url("redis://localhost:6379", db=REDIS_UNITTEST_DATABASE_NUMBER)
        self.r.flushdb()
        now = datetime.datetime.utcnow()
        self.before = now - datetime.timedelta(days=2)
        self.last_week = now - datetime.timedelta(days=7)
        self.last_year = now - datetime.timedelta(days=370)

        # save basic item beforehand, and some additional items
        self.fake_item_doc = {
            "_id": "tiid1",
            "type": "item",
            "last_modified": now.isoformat(),
            "last_update_run": now.isoformat(),
            "aliases":{"doi":["10.7554/elife.1"]},
            "biblio": {"year":"2012"},
            "metrics": {}
        }
        self.fake_item_obj = item_module.create_objects_from_item_doc(self.fake_item_doc)        
        self.db.session.add(self.fake_item_obj)

        another_elife = copy.copy(self.fake_item_doc)
        another_elife["_id"] = "tiid2"
        another_elife["aliases"] = {"doi":["10.7554/ELIFE.2"]}
        another_elife["last_modified"] = self.before.isoformat()
        another_elife["last_update_run"] = self.before.isoformat()
        another_elife_obj = item_module.create_objects_from_item_doc(another_elife)        
        self.db.session.add(another_elife_obj)

        different_journal = copy.copy(self.fake_item_doc)
        different_journal["_id"] = "tiid3"
        different_journal["aliases"] = {"doi":["10.3897/zookeys.3"], "biblio":[{"year":1999}]}
        different_journal["last_modified"] = now.isoformat()
        different_journal["last_update_run"] = self.last_week.isoformat()
        different_journal_obj = item_module.create_objects_from_item_doc(different_journal)        
        self.db.session.add(different_journal_obj)

        different_journal2 = copy.copy(different_journal)
        different_journal2["_id"] = "tiid4"
        different_journal2["last_update_run"] = self.last_year.isoformat()
        different_journal_obj2 = item_module.create_objects_from_item_doc(different_journal2)        
        self.db.session.add(different_journal_obj2)

        self.db.session.commit()
Esempio n. 19
0
    def setUp(self):
        # setup a clean new redis database at our unittest redis DB location: Number 8
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        bibtex.Bibtex.paginate = lambda self, x: {
            "pages": [1, 2, 3, 4],
            "number_entries": 10
        }
        bibtex.Bibtex.member_items = lambda self, x: ("doi", str(x))
        self.memberitems_resp = [
            ["doi", "1"],
            ["doi", "2"],
            ["doi", "3"],
            ["doi", "4"],
        ]

        self.mi = models.MemberItems(bibtex.Bibtex(), self.r)
Esempio n. 20
0
def main():

    mydao = None

    myredis = tiredis.from_url(os.getenv("REDISTOGO_URL"))
    alias_queue = RedisQueue("aliasqueue", myredis)
    # to clear alias_queue:
    #import redis, os
    #myredis = redis.from_url(os.getenv("REDISTOGO_URL"))
    #myredis.delete(["aliasqueue"])


    # these need to match the tiid alphabet defined in models:
    couch_queues = {}
    for i in "abcdefghijklmnopqrstuvwxyz1234567890":
        couch_queues[i] = PythonQueue(i+"_couch_queue")
        couch_worker = CouchWorker(couch_queues[i], myredis, mydao)
        couch_worker.spawn_and_loop() 
        logger.info(u"launched backend couch worker with {i}_couch_queue".format(
            i=i))


    polling_interval = 0.1   # how many seconds between polling to talk to provider
    provider_queues = {}
    providers = ProviderFactory.get_providers(default_settings.PROVIDERS)
    for provider in providers:
        provider_queues[provider.provider_name] = PythonQueue(provider.provider_name+"_queue")
        provider_worker = ProviderWorker(
            provider, 
            polling_interval, 
            alias_queue,
            provider_queues[provider.provider_name], 
            couch_queues,
            ProviderWorker.wrapper,
            myredis)
        provider_worker.spawn_and_loop()

    backend = Backend(alias_queue, provider_queues, couch_queues, myredis)
    try:
        backend.run_in_loop() # don't need to spawn this one
    except (KeyboardInterrupt, SystemExit): 
        # this approach is per http://stackoverflow.com/questions/2564137/python-how-to-terminate-a-thread-when-main-program-ends
        sys.exit()
Esempio n. 21
0
def main(action_type, number_to_update=35):
    #35 every 10 minutes is 35*6perhour*24hours=5040 per day

    cloudant_db = os.getenv("CLOUDANT_DB")
    cloudant_url = os.getenv("CLOUDANT_URL")
    redis_url = os.getenv("REDIS_URL")

    mydao = dao.Dao(cloudant_url, cloudant_db)
    myredis = tiredis.from_url(redis_url)

    try:
        if action_type == "active_publishers":
            print "running " + action_type
            tiids = update_active_publisher_items(number_to_update, myredis, mydao)
        elif action_type == "least_recently_updated":
            print "running " + action_type
            tiids = update_least_recently_updated(number_to_update, myredis, mydao)
    except (KeyboardInterrupt, SystemExit): 
        # this approach is per http://stackoverflow.com/questions/2564137/python-how-to-terminate-a-thread-when-main-program-ends
        sys.exit()
Esempio n. 22
0
    def setUp(self):
        from totalimpact import dao

        # hacky way to delete the "ti" db, then make it fresh again for each test.
        temp_dao = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        temp_dao.delete_db(os.getenv("CLOUDANT_DB"))
        self.d = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        self.d.update_design_doc()

        # setup a clean new redis test database.  We're putting unittest redis at DB Number 8.
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        self.test_alias = ("doi", "10.1371/journal.pcbi.1")
        self.test_alias_registered = ("doi", "10.1371/journal.pcbi.2")
        self.test_alias_registered_string = ":".join(self.test_alias_registered)

        self.sample_user_api_doc = {'key_history': {'2012-12-27T12:09:20.072080': 'SFUlqzam8'}, 'created': '2012-12-27T12:09:20.072080', 'current_key': 'SFUlqzam8', 
            'registered_items': {self.test_alias_registered_string: {"tiid":"tiid2", "registered_date":"2012etc"}}, 
            'max_registered_items':3,
            'meta': {'usage': 'individual CV', 'api_limit': '', 'notes': '', 'api_key_owner': '', 'email': ''}, '_id': 'XeZhf8BWNgM5r9B9Xu3whT', 'type': 'api_user'}
        self.d.db.save(self.sample_user_api_doc)       
Esempio n. 23
0
def main():

    mydao = None

    myredis = tiredis.from_url(os.getenv("REDISTOGO_URL"))
    alias_queue = RedisQueue("aliasqueue", myredis)
    # to clear alias_queue:
    #import redis, os
    #myredis = redis.from_url(os.getenv("REDISTOGO_URL"))
    #myredis.delete(["aliasqueue"])

    # these need to match the tiid alphabet defined in models:
    couch_queues = {}
    for i in "abcdefghijklmnopqrstuvwxyz1234567890":
        couch_queues[i] = PythonQueue(i + "_couch_queue")
        couch_worker = CouchWorker(couch_queues[i], myredis, mydao)
        couch_worker.spawn_and_loop()
        logger.info(
            u"launched backend couch worker with {i}_couch_queue".format(i=i))

    polling_interval = 0.1  # how many seconds between polling to talk to provider
    provider_queues = {}
    providers = ProviderFactory.get_providers(default_settings.PROVIDERS)
    for provider in providers:
        provider_queues[provider.provider_name] = PythonQueue(
            provider.provider_name + "_queue")
        provider_worker = ProviderWorker(
            provider, polling_interval, alias_queue,
            provider_queues[provider.provider_name], couch_queues,
            ProviderWorker.wrapper, myredis)
        provider_worker.spawn_and_loop()

    backend = Backend(alias_queue, provider_queues, couch_queues, myredis)
    try:
        backend.run_in_loop()  # don't need to spawn this one
    except (KeyboardInterrupt, SystemExit):
        # this approach is per http://stackoverflow.com/questions/2564137/python-how-to-terminate-a-thread-when-main-program-ends
        sys.exit()
Esempio n. 24
0
    def setUp(self):
        # hacky way to delete the "ti" db, then make it fresh again for each test.
        temp_dao = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        temp_dao.delete_db(os.getenv("CLOUDANT_DB"))
        self.d = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        # need views to make sure to create them
        self.d.update_design_doc()

        # do the same thing for the redis db, set up the test redis database.  We're using DB Number 8
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()
        now = datetime.datetime.now()
        yesterday = now - datetime.timedelta(days=1)
        last_week = now - datetime.timedelta(days=7)

        # save basic item beforehand, and some additional items
        self.fake_item = {
            "_id": "tiid1",
            "type": "item",
            "last_modified": now.isoformat(),
            "aliases":{"doi":["10.7554/elife.1"]},
            "biblio": {"year":2012},
            "metrics": {}
        }
        self.d.save(self.fake_item)

        another_elife = copy.copy(self.fake_item)
        another_elife["_id"] = "tiid2"
        another_elife["aliases"] = {"doi":["10.7554/ELIFE.2"]}
        another_elife["last_modified"] = yesterday.isoformat()
        self.d.save(another_elife)

        different_journal = copy.copy(self.fake_item)
        different_journal["_id"] = "tiid3"
        different_journal["aliases"] = {"doi":["10.3897/zookeys.3"]}
        different_journal["last_modified"] = now.isoformat()
        different_journal["last_update_run"] = last_week.isoformat()
        self.d.save(different_journal)
Esempio n. 25
0
    def setUp(self):
        self.d = None

        self.db = setup_postgres_for_unittests(db, app)

        # setup a clean new redis test database.  We're putting unittest redis at DB Number 8.
        self.r = tiredis.from_url("redis://*****:*****@example.com'
        self.test_meta = {
            'max_registered_items': 3,
            'planned_use': 'individual CV',
            'email': self.test_email,
            'notes': '',
            'api_key_owner': 'Julia Smith',
            "example_url": "",
            "organization": "NASA"
        }

        test_meta2 = copy.deepcopy(self.test_meta)
        test_meta2["email"] = '*****@*****.**'
        test_meta2["prefix"] = "SFU"
        self.existing_api_user = ApiUser(**test_meta2)

        self.existing_registered_item = RegisteredItem(
            self.test_alias_registered, self.existing_api_user)

        self.db.session.add(self.existing_api_user)
        self.db.session.add(self.existing_registered_item)
        self.db.session.commit()
Esempio n. 26
0
def set_redis(url, db):
    """useful for unit testing, where you want to use a local database
    """
    global myredis 
    myredis = tiredis.from_url(url, db)
    return myredis
Esempio n. 27
0
import redis
import uuid

from totalimpact import dao, app, tiredis, collection, api_user, mixpanel
from totalimpact import item as item_module
from totalimpact.models import MemberItems, UserFactory, NotAuthenticatedError
from totalimpact.providers.provider import ProviderFactory, ProviderItemNotFoundError, ProviderError, ProviderServerError, ProviderTimeout
from totalimpact import default_settings
import logging


logger = logging.getLogger("ti.views")
logger.setLevel(logging.DEBUG)

mydao = dao.Dao(os.environ["CLOUDANT_URL"], os.getenv("CLOUDANT_DB"))
myredis = tiredis.from_url(os.getenv("REDISTOGO_URL"), db=0) #main app is on DB 0

logger.debug("Building reference sets")
myrefsets = None
myrefsets_histograms = None
try:
    (myrefsets, myrefsets_histograms) = collection.build_all_reference_lookups(myredis, mydao)
    logger.debug("Reference sets dict has %i keys" %len(myrefsets.keys()))
except (couchdb.ResourceNotFound, LookupError, AttributeError), e:
    logger.error("Exception %s: Unable to load reference sets" % (e.__repr__()))

def set_db(url, db):
    """useful for unit testing, where you want to use a local database
    """
    global mydao 
    mydao = dao.Dao(url, db)
Esempio n. 28
0
def set_redis(url, db):
    """useful for unit testing, where you want to use a local database
    """
    global myredis
    myredis = tiredis.from_url(url, db)
    return myredis
Esempio n. 29
0
import analytics
import requests

from totalimpact import app, tiredis, collection, api_user, incoming_email
from totalimpact import item as item_module
from totalimpact.models import MemberItems, NotAuthenticatedError
from totalimpact.providers.provider import ProviderFactory, ProviderItemNotFoundError, ProviderError, ProviderServerError, ProviderTimeout
from totalimpact import unicode_helpers
from totalimpact import default_settings
import logging

logger = logging.getLogger("ti.views")
logger.setLevel(logging.DEBUG)

mydao = None
myredis = tiredis.from_url(os.getenv("REDISTOGO_URL"),
                           db=0)  # main app is on DB 0

logger.debug(u"Building reference sets")
myrefsets = None
myrefsets_histograms = None
try:
    (myrefsets, myrefsets_histograms) = collection.build_all_reference_lookups(
        myredis, mydao)
    logger.debug(u"Reference sets dict has %i keys" % len(myrefsets.keys()))
except (LookupError, AttributeError), e:
    logger.error(u"Exception %s: Unable to load reference sets" %
                 (e.__repr__()))


def set_db(url, db):
    """useful for unit testing, where you want to use a local database
Esempio n. 30
0
    def setUp(self):
        ALIAS_DATA = {
            "title":["Why Most Published Research Findings Are False"],
            "url":["http://www.plosmedicine.org/article/info:doi/10.1371/journal.pmed.0020124"],
            "doi": ["10.1371/journal.pmed.0020124"]
        }


        STATIC_META = {
            "display_name": "readers",
            "provider": "Mendeley",
            "provider_url": "http://www.mendeley.com/",
            "description": "Mendeley readers: the number of readers of the article",
            "icon": "http://www.mendeley.com/favicon.ico",
            "category": "bookmark",
            "can_use_commercially": "0",
            "can_embed": "1",
            "can_aggregate": "1",
            "other_terms_of_use": "Must show logo and say 'Powered by Santa'",
            }

        self.KEY1 = "8888888888.8"
        self.KEY2 = "9999999999.9"
        self.VAL1 = 1
        self.VAL2 = 2

        METRICS_DATA = {
            "ignore": False,
            "static_meta": STATIC_META,
            "provenance_url": ["http://api.mendeley.com/research/public-chemical-compound-databases/"],
            "values":{
                "raw": self.VAL1,
                "raw_history": {
                    self.KEY1: self.VAL1,
                    self.KEY2: self.VAL2
                }
            }
        }

        METRICS_DATA2 = {
            "ignore": False,
            "latest_value": 21,
            "static_meta": STATIC_META,
            "provenance_url": ["http://api.mendeley.com/research/public-chemical-compound-databases/"],
            "values":{
                "raw": self.VAL1,
                "raw_history": {
                    self.KEY1: self.VAL1,
                    self.KEY2: self.VAL2
                }
            }
        }

        METRICS_DATA3 = {
            "ignore": False,
            "latest_value": 31,
            "static_meta": STATIC_META,
            "provenance_url": ["http://api.mendeley.com/research/public-chemical-compound-databases/"],
            "values":{
                "raw": self.VAL1,
                "raw_history": {
                    self.KEY1: self.VAL1,
                    self.KEY2: self.VAL2
                }
            }
        }

        BIBLIO_DATA = {
            "title": "An extension of de Finetti's theorem",
            "journal": "Advances in Applied Probability",
            "author": [
                "Pitman, J"
            ],
            "collection": "pitnoid",
            "volume": "10",
            "id": "p78",
            "year": "1978",
            "pages": "268 to 270"
        }


        self.ITEM_DATA = {
            "_id": "test",
            "created": 1330260456.916,
            "last_modified": 12414214.234,
            "aliases": ALIAS_DATA,
            "metrics": {
                "wikipedia:mentions": METRICS_DATA,
                "bar:views": METRICS_DATA2
            },
            "biblio": BIBLIO_DATA,
            "type": "item"
        }

        self.TEST_PROVIDER_CONFIG = [
            ("wikipedia", {})
        ]


        # hacky way to delete the "ti" db, then make it fresh again for each test.
        temp_dao = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        temp_dao.delete_db(os.getenv("CLOUDANT_DB"))
        self.d = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        self.d.update_design_doc()
        
        self.myrefsets = {"nih": {"2011": {
                        "facebook:comments": {0: [1, 99], 1: [91, 99]}, "mendeley:groups": {0: [1, 99], 3: [91, 99]}
                    }}}

        # setup a clean new redis test database.  We're putting unittest redis at DB Number 8.
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()
Esempio n. 31
0
    def setUp(self):
        self.BIBLIO_DATA = {
            "title": "An extension of de Finetti's theorem",
            "journal": "Advances in Applied Probability",
            "author": [
                "Pitman, J"
            ],
            "authors": "Pitman",
            "collection": "pitnoid",
            "volume": "10",
            "id": "p78",
            "year": "1978",
            "pages": "268 to 270"
        }

        self.ALIAS_DATA = {
            "title":["Why Most Published Research Findings Are False"],
            "url":["http://www.plosmedicine.org/article/info:doi/10.1371/journal.pmed.0020124"],
            "doi": ["10.1371/journal.pmed.0020124"],
            "biblio": [self.BIBLIO_DATA]
        }


        self.KEY1 = '2012-08-23T14:40:16.888888'
        self.KEY2 = '2012-08-23T14:40:16.999999'
        self.VAL1 = 1
        self.VAL2 = 2

        METRICS_DATA = {
            "provenance_url": "http://api.mendeley.com/research/public-chemical-compound-databases/",
            "values":{
                "raw": self.VAL2,
                "raw_history": {
                    self.KEY1: self.VAL1,
                    self.KEY2: self.VAL2
                }
            }
        }

        METRICS_DATA2 = {
            "provenance_url": "http://api.mendeley.com/research/public-chemical-compound-databases/",
            "values":{
                "raw": self.VAL2,
                "raw_history": {
                    self.KEY1: self.VAL1,
                    self.KEY2: self.VAL2
                }
            }
        }

        METRICS_DATA3 = {
            "provenance_url": "http://api.mendeley.com/research/public-chemical-compound-databases/",
            "values":{
                "raw": self.VAL2,
                "raw_history": {
                    self.KEY1: self.VAL1,
                    self.KEY2: self.VAL2
                }
            }
        }


        self.ITEM_DATA = {
            "_id": "test",
            "created": '2012-08-23T14:40:16.399932',
            "last_modified": '2012-08-23T14:40:16.399932',
            "last_update_run": '2012-08-23T14:40:16.399932',
            "aliases": self.ALIAS_DATA,
            "metrics": {
                "wikipedia:mentions": METRICS_DATA,
                "altmetric_com:tweets": METRICS_DATA2
            },
            "biblio": self.BIBLIO_DATA,
            "type": "item"
        }

        self.TEST_PROVIDER_CONFIG = [
            ("wikipedia", {})
        ]

        self.d = None
        
        self.myrefsets = {"nih": {"2011": {
                        "facebook:comments": {0: [1, 99], 1: [91, 99]}, "mendeley:groups": {0: [1, 99], 3: [91, 99]}
                    }}}

        # setup a clean new redis test database.  We're putting unittest redis at DB Number 8.
        self.r = tiredis.from_url("redis://localhost:6379", db=REDIS_UNITTEST_DATABASE_NUMBER)
        self.r.flushdb()

        self.db = setup_postgres_for_unittests(db, app)
Esempio n. 32
0
from celery.signals import task_postrun, task_prerun, task_failure, worker_process_init
from celery import group, chain, chord
from celery import current_app as celery_app
from celery.signals import task_sent
from celery.utils import uuid
from eventlet import timeout

from totalimpact import item as item_module
from totalimpact import db
from totalimpact import REDIS_MAIN_DATABASE_NUMBER
from totalimpact import tiredis, default_settings
from totalimpact.providers.provider import ProviderFactory, ProviderError, ProviderTimeout
import rate_limit

logger = logging.getLogger("core.tasks")
myredis = tiredis.from_url(os.getenv("REDIS_URL"), db=REDIS_MAIN_DATABASE_NUMBER)

rate = rate_limit.RateLimiter(redis_url=os.getenv("REDIS_URL"), redis_db=REDIS_MAIN_DATABASE_NUMBER)
rate.add_condition({'requests':25, 'seconds':1})


# from https://github.com/celery/celery/issues/1671#issuecomment-47247074
# pending this being fixed in useful celery version
"""
Monkey patch for celery.chord.type property
"""
def _type(self):
    if self._type:
        return self._type
    if self._app:
        app = self._app
 def setUp(self):
     # we're putting unittests for redis in their own db (number 8) so they can be deleted with abandon
     self.r = tiredis.from_url("redis://localhost:6379",
                               db=REDIS_UNITTEST_DATABASE_NUMBER)
     self.r.flushdb()
Esempio n. 34
0
    def setUp(self):
        """
        This test item is a lightly-modified version of a real doc from our
        demo collection; it's available at http://total-impact-core.herokuapp.com/collection/kn5auf
        """
        test_item = '''
            {
            "_id": "1aff9dfebea711e1bdf912313d1a5e63",
            "_rev": "968-c7891982fca2ea41346a20b80c2b888d",
            "aliases": {
                "doi": [
                    "10.5061/dryad.j1fd7"
                ],
                "title": [
                    "Data from: Data archiving is a good use of research funds",
                    "data from: data archiving is a good  investment"
                ],
                "url": [
                    "http://datadryad.org/handle/10255/dryad.33537",
                    "http://hdl.handle.net/10255/dryad.33537"
                ]
            },
            "biblio": {
                "authors": "Piwowar, Vision, Whitlock, Piwowar, Vision, Whitlock, Piwowar, Vision, Whitlock",
                "genre": "dataset",
                "h1": "Data from: Data archiving is a good  investment",
                "repository": "Dryad Digital Repository",
                "title": "Data from: Data archiving is a good  investment",
                "year": "2011"
            },
            "created": "2012-06-25T09:21:11.960271",
            "currently_updating": false,
            "last_modified": "2012-11-18T04:57:40.539053",
            "metrics": {
                "delicious:bookmarks": {
                    "provenance_url": "http://www.delicious.com/url/4794ddb7a3e934ba23165af65fcfa9cd",
                    "static_meta": {
                        "description": "The number of bookmarks to this artifact (maximum=100).",
                        "display_name": "bookmarks",
                        "icon": "http://www.delicious.com/favicon.ico",
                        "provider": "Delicious",
                        "provider_url": "http://www.delicious.com/"
                    },
                    "values": {
                        "raw": 1
                    }
                },
                "dryad:total_downloads": {
                    "provenance_url": "http://dx.doi.org/10.5061/dryad.j1fd7",
                    "static_meta": {
                        "description": "Dryad total downloads: combined number of downloads of the data package and data files",
                        "display_name": "total downloads",
                        "icon": "http:\\/\\/datadryad.org\\/favicon.ico",
                        "provider": "Dryad",
                        "provider_url": "http:\\/\\/www.datadryad.org\\/"
                    },
                    "values": {
                        "dryad": {
                            "CI95_lower": 91,
                            "CI95_upper": 98,
                            "estimate_lower": 96,
                            "estimate_upper": 96
                        },
                        "raw": 207,
                        "raw_history": {
                            "2012-06-25T09:21:16.027149": 132,
                            "2012-06-26T18:05:19.598432": 132,
                            "2012-06-26T20:10:16.858294": 132
                        }
                    }
                }
            },
            "type": "item"
        }
        '''

        test_api_user = """
                {
           "_id": "yDnhDa3fdFxxEsQnzYnA96",
           "created": "2012-11-19T16:11:17.713812",
           "current_key": "validkey",
           "registered_items": {
               "doi:10.1371/journal.pcbi.1000355": {
                   "tiid": "b229e24abec811e1887612313d1a5e63",
                   "registered_date": "2012-12-29T18:11:20.870026"
               }
           },
           "max_registered_items": 1000,
           "key_history": {
               "2012-11-19T16:11:17.713812": "validkey"
           },
           "meta": {
               "planned_use": "individual CV",
               "example_url": "",
               "api_key_owner": "Superman",
               "organization": "individual",
               "email": "*****@*****.**"
           },
           "type": "api_user"
        }
        """

        # hacky way to delete the "ti" db, then make it fresh again for each test.
        temp_dao = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        temp_dao.delete_db(os.getenv("CLOUDANT_DB"))
        self.d = dao.Dao("http://localhost:5984", os.getenv("CLOUDANT_DB"))
        self.d.update_design_doc()

        self.d.save(json.loads(test_item))
        self.d.save(json.loads(test_api_user))

        # do the same thing for the redis db.  We're using DB 8 for unittests.
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        #setup api test client
        self.app = app
        self.app.testing = True
        self.client = self.app.test_client()
Esempio n. 35
0
    def setUp(self):
        self.BIBLIO_DATA = {
            "title": "An extension of de Finetti's theorem",
            "journal": "Advances in Applied Probability",
            "author": [
                "Pitman, J"
            ],
            "authors": "Pitman",
            "collection": "pitnoid",
            "volume": "10",
            "id": "p78",
            "year": "1978",
            "pages": "268 to 270"
        }

        self.ALIAS_DATA = {
            "title":["Why Most Published Research Findings Are False"],
            "url":["http://www.plosmedicine.org/article/info:doi/10.1371/journal.pmed.0020124"],
            "doi": ["10.1371/journal.pmed.0020124"],
            "biblio": [self.BIBLIO_DATA]
        }


        self.KEY1 = '2012-08-23T14:40:16.888888'
        self.KEY2 = '2012-08-23T14:40:16.999999'
        self.VAL1 = 1
        self.VAL2 = 2

        METRICS_DATA = {
            "provenance_url": "http://api.mendeley.com/research/public-chemical-compound-databases/",
            "values":{
                "raw": self.VAL2,
                "raw_history": {
                    self.KEY1: self.VAL1,
                    self.KEY2: self.VAL2
                }
            }
        }

        METRICS_DATA2 = {
            "provenance_url": "http://api.mendeley.com/research/public-chemical-compound-databases/",
            "values":{
                "raw": self.VAL2,
                "raw_history": {
                    self.KEY1: self.VAL1,
                    self.KEY2: self.VAL2
                }
            }
        }

        METRICS_DATA3 = {
            "provenance_url": "http://api.mendeley.com/research/public-chemical-compound-databases/",
            "values":{
                "raw": self.VAL2,
                "raw_history": {
                    self.KEY1: self.VAL1,
                    self.KEY2: self.VAL2
                }
            }
        }


        self.ITEM_DATA = {
            "_id": "test",
            "created": '2012-08-23T14:40:16.399932',
            "last_modified": '2012-08-23T14:40:16.399932',
            "aliases": self.ALIAS_DATA,
            "metrics": {
                "wikipedia:mentions": METRICS_DATA,
                "topsy:tweets": METRICS_DATA2
            },
            "biblio": self.BIBLIO_DATA,
            "type": "item"
        }

        self.TEST_PROVIDER_CONFIG = [
            ("wikipedia", {})
        ]

        self.d = None
        
        self.myrefsets = {"nih": {"2011": {
                        "facebook:comments": {0: [1, 99], 1: [91, 99]}, "mendeley:groups": {0: [1, 99], 3: [91, 99]}
                    }}}

        # setup a clean new redis test database.  We're putting unittest redis at DB Number 8.
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        self.db = setup_postgres_for_unittests(db, app)
Esempio n. 36
0
from totalimpact import app, tiredis, collection, incoming_email, db
from totalimpact import item as item_module
from totalimpact.models import MemberItems, NotAuthenticatedError
from totalimpact.providers import provider as provider_module
from totalimpact.providers.provider import ProviderFactory, ProviderItemNotFoundError, ProviderError, ProviderServerError, ProviderTimeout
from totalimpact import unicode_helpers
from totalimpact import default_settings
from totalimpact import REDIS_MAIN_DATABASE_NUMBER
import logging


logger = logging.getLogger("ti.views")
logger.setLevel(logging.DEBUG)

mydao = None
myredis = tiredis.from_url(os.getenv("REDIS_URL"), db=REDIS_MAIN_DATABASE_NUMBER)  # main app is on DB 0

# logger.debug(u"Building reference sets")
myrefsets = None
myrefsets_histograms = None
# try:
#     (myrefsets, myrefsets_histograms) = collection.build_all_reference_lookups(myredis, mydao)
#     logger.debug(u"Reference sets dict has %i keys" %len(myrefsets.keys()))
# except (LookupError, AttributeError), e:
#     logger.error(u"Exception %s: Unable to load reference sets" % (e.__repr__()))

def set_db(url, db):
    """useful for unit testing, where you want to use a local database
    """
    global mydao 
    mydao = None
Esempio n. 37
0
 def setUp(self):
     # we're putting unittests for redis in their own db (number 8) so they can be deleted with abandon
     self.r = tiredis.from_url("redis://localhost:6379", db=8)
     self.r.flushdb()
Esempio n. 38
0
    def setUp(self):
        """
        This test item is a lightly-modified version of a real doc from our
        demo collection; it's available at http://total-impact-core.herokuapp.com/collection/kn5auf
        """
        test_item = '''
            {
            "_id": "1aff9dfebea711e1bdf912313d1a5e63",
            "_rev": "968-c7891982fca2ea41346a20b80c2b888d",
            "aliases": {
                "doi": [
                    "10.5061/dryad.j1fd7"
                ],
                "title": [
                    "Data from: Data archiving is a good use of research funds",
                    "data from: data archiving is a good  investment"
                ],
                "url": [
                    "http://datadryad.org/handle/10255/dryad.33537",
                    "http://hdl.handle.net/10255/dryad.33537"
                ]
            },
            "biblio": {
                "authors": "Piwowar, Vision, Whitlock, Piwowar, Vision, Whitlock, Piwowar, Vision, Whitlock",
                "genre": "dataset",
                "h1": "Data from: Data archiving is a good  investment",
                "repository": "Dryad Digital Repository",
                "title": "Data from: Data archiving is a good  investment",
                "year": "2011"
            },
            "created": "2012-06-25T09:21:11.960271",
            "currently_updating": false,
            "last_modified": "2012-11-18T04:57:40.539053",
            "metrics": {
                "delicious:bookmarks": {
                    "provenance_url": "http://www.delicious.com/url/4794ddb7a3e934ba23165af65fcfa9cd",
                    "static_meta": {
                        "description": "The number of bookmarks to this artifact (maximum=100).",
                        "display_name": "bookmarks",
                        "icon": "http://www.delicious.com/favicon.ico",
                        "provider": "Delicious",
                        "provider_url": "http://www.delicious.com/"
                    },
                    "values": {
                        "raw": 1,
                        "raw_history": {
                            "2012-06-23T09:21:16.027149": 1
                        }
                    }
                },
                "dryad:total_downloads": {
                    "provenance_url": "http://dx.doi.org/10.5061/dryad.j1fd7",
                    "static_meta": {
                        "description": "Dryad total downloads: combined number of downloads of the data package and data files",
                        "display_name": "total downloads",
                        "icon": "http:\\/\\/datadryad.org\\/favicon.ico",
                        "provider": "Dryad",
                        "provider_url": "http:\\/\\/www.datadryad.org\\/"
                    },
                    "values": {
                        "dryad": {
                            "CI95_lower": 91,
                            "CI95_upper": 98,
                            "estimate_lower": 96,
                            "estimate_upper": 96
                        },
                        "raw": 207,
                        "raw_history": {
                            "2012-06-25T09:21:16.027149": 132,
                            "2012-06-26T18:05:19.598432": 132,
                            "2012-06-26T20:10:16.858294": 132
                        }
                    }
                }
            },
            "type": "item"
        }
        '''

        self.test_api_user_meta = {    
                    'max_registered_items': 3, 
                    'planned_use': 'individual CV', 
                    'email': "*****@*****.**", 
                    'notes': '', 
                    'api_key_owner': 'Julia Smith', 
                    "example_url": "", 
                    "organization": "NASA",
                    "prefix": "NASA",
                }

        self.db = setup_postgres_for_unittests(db, app)

        item = item_module.create_objects_from_item_doc(json.loads(test_item))
        self.db.session.add(item)

        self.existing_api_user = api_user.ApiUser(**self.test_api_user_meta)
        self.existing_api_user.api_key = "validkey"  #override randomly assigned key
        self.db.session.add(self.existing_api_user)
        self.db.session.commit()


        # do the same thing for the redis db.  We're using DB 8 for unittests.
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        #setup api test client
        self.app = app
        self.app.testing = True
        self.client = self.app.test_client()

        # Mock out relevant methods of the Dryad provider
        self.orig_Dryad_member_items = Dryad.member_items
        Dryad.member_items = MOCK_member_items

        self.aliases = [
            ["doi", "10.123"],
            ["doi", "10.124"],
            ["doi", "10.125"]
        ]
Esempio n. 39
0
    def setUp(self):
        """
        This test item is a lightly-modified version of a real doc from our
        demo collection; it's available at http://total-impact-core.herokuapp.com/collection/kn5auf
        """
        test_item = '''
            {
            "_id": "1aff9dfebea711e1bdf912313d1a5e63",
            "_rev": "968-c7891982fca2ea41346a20b80c2b888d",
            "aliases": {
                "doi": [
                    "10.5061/dryad.j1fd7"
                ],
                "title": [
                    "Data from: Data archiving is a good use of research funds",
                    "data from: data archiving is a good  investment"
                ],
                "url": [
                    "http://datadryad.org/handle/10255/dryad.33537",
                    "http://hdl.handle.net/10255/dryad.33537"
                ]
            },
            "biblio": {
                "authors": "Piwowar, Vision, Whitlock, Piwowar, Vision, Whitlock, Piwowar, Vision, Whitlock",
                "genre": "dataset",
                "h1": "Data from: Data archiving is a good  investment",
                "repository": "Dryad Digital Repository",
                "title": "Data from: Data archiving is a good  investment",
                "year": "2011"
            },
            "created": "2012-06-25T09:21:11.960271",
            "currently_updating": false,
            "last_modified": "2012-11-18T04:57:40.539053",
            "metrics": {
                "delicious:bookmarks": {
                    "provenance_url": "http://www.delicious.com/url/4794ddb7a3e934ba23165af65fcfa9cd",
                    "static_meta": {
                        "description": "The number of bookmarks to this artifact (maximum=100).",
                        "display_name": "bookmarks",
                        "icon": "http://www.delicious.com/favicon.ico",
                        "provider": "Delicious",
                        "provider_url": "http://www.delicious.com/"
                    },
                    "values": {
                        "raw": 1,
                        "raw_history": {
                            "2012-06-23T09:21:16.027149": 1
                        }
                    }
                },
                "dryad:total_downloads": {
                    "provenance_url": "http://dx.doi.org/10.5061/dryad.j1fd7",
                    "static_meta": {
                        "description": "Dryad total downloads: combined number of downloads of the data package and data files",
                        "display_name": "total downloads",
                        "icon": "http:\\/\\/datadryad.org\\/favicon.ico",
                        "provider": "Dryad",
                        "provider_url": "http:\\/\\/www.datadryad.org\\/"
                    },
                    "values": {
                        "dryad": {
                            "CI95_lower": 91,
                            "CI95_upper": 98,
                            "estimate_lower": 96,
                            "estimate_upper": 96
                        },
                        "raw": 207,
                        "raw_history": {
                            "2012-06-25T09:21:16.027149": 132,
                            "2012-06-26T18:05:19.598432": 132,
                            "2012-06-26T20:10:16.858294": 132
                        }
                    }
                }
            },
            "type": "item"
        }
        '''

        self.test_api_user_meta = {
            'max_registered_items': 3,
            'planned_use': 'individual CV',
            'email': "*****@*****.**",
            'notes': '',
            'api_key_owner': 'Julia Smith',
            "example_url": "",
            "organization": "NASA",
            "prefix": "NASA",
        }

        self.db = setup_postgres_for_unittests(db, app)

        item = item_module.create_objects_from_item_doc(json.loads(test_item))
        self.db.session.add(item)

        self.existing_api_user = api_user.ApiUser(**self.test_api_user_meta)
        self.existing_api_user.api_key = "validkey"  #override randomly assigned key
        self.db.session.add(self.existing_api_user)
        self.db.session.commit()

        # do the same thing for the redis db.  We're using DB 8 for unittests.
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        #setup api test client
        self.app = app
        self.app.testing = True
        self.client = self.app.test_client()

        # Mock out relevant methods of the Dryad provider
        self.orig_Dryad_member_items = Dryad.member_items
        Dryad.member_items = MOCK_member_items

        self.aliases = [["doi", "10.123"], ["doi", "10.124"],
                        ["doi", "10.125"]]