def setUp(self):

        self.db = setup_postgres_for_unittests(db, app)

        # do the same thing for the redis db, set up the test redis database.  We're using DB Number 8
        self.r = tiredis.from_url("redis://localhost:6379", db=REDIS_UNITTEST_DATABASE_NUMBER)
        self.r.flushdb()
        now = datetime.datetime.utcnow()
        self.before = now - datetime.timedelta(days=2)
        self.last_week = now - datetime.timedelta(days=7)
        self.last_year = now - datetime.timedelta(days=370)

        # save basic item beforehand, and some additional items
        self.fake_item_doc = {
            "_id": "tiid1",
            "type": "item",
            "last_modified": now.isoformat(),
            "last_update_run": now.isoformat(),
            "aliases":{"doi":["10.7554/elife.1"]},
            "biblio": {"year":"2012"},
            "metrics": {}
        }
        self.fake_item_obj = item_module.create_objects_from_item_doc(self.fake_item_doc)        
        self.db.session.add(self.fake_item_obj)

        another_elife = copy.copy(self.fake_item_doc)
        another_elife["_id"] = "tiid2"
        another_elife["aliases"] = {"doi":["10.7554/ELIFE.2"]}
        another_elife["last_modified"] = self.before.isoformat()
        another_elife["last_update_run"] = self.before.isoformat()
        another_elife_obj = item_module.create_objects_from_item_doc(another_elife)        
        self.db.session.add(another_elife_obj)

        different_journal = copy.copy(self.fake_item_doc)
        different_journal["_id"] = "tiid3"
        different_journal["aliases"] = {"doi":["10.3897/zookeys.3"], "biblio":[{"year":1999}]}
        different_journal["last_modified"] = now.isoformat()
        different_journal["last_update_run"] = self.last_week.isoformat()
        different_journal_obj = item_module.create_objects_from_item_doc(different_journal)        
        self.db.session.add(different_journal_obj)

        different_journal2 = copy.copy(different_journal)
        different_journal2["_id"] = "tiid4"
        different_journal2["last_update_run"] = self.last_year.isoformat()
        different_journal_obj2 = item_module.create_objects_from_item_doc(different_journal2)        
        self.db.session.add(different_journal_obj2)

        self.db.session.commit()
示例#2
0
    def setUp(self):

        self.db = setup_postgres_for_unittests(db, app)

        # do the same thing for the redis db, set up the test redis database.  We're using DB Number 8
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()
        now = datetime.datetime.utcnow()
        self.before = now - datetime.timedelta(days=2)
        self.last_week = now - datetime.timedelta(days=7)
        self.last_year = now - datetime.timedelta(days=370)

        # save basic item beforehand, and some additional items
        self.fake_item_doc = {
            "_id": "tiid1",
            "type": "item",
            "last_modified": now.isoformat(),
            "last_update_run": now.isoformat(),
            "aliases":{"doi":["10.7554/elife.1"]},
            "biblio": {"year":"2012"},
            "metrics": {}
        }
        self.fake_item_obj = item_module.create_objects_from_item_doc(self.fake_item_doc)        
        self.db.session.add(self.fake_item_obj)

        another_elife = copy.copy(self.fake_item_doc)
        another_elife["_id"] = "tiid2"
        another_elife["aliases"] = {"doi":["10.7554/ELIFE.2"]}
        another_elife["last_modified"] = self.before.isoformat()
        another_elife["last_update_run"] = self.before.isoformat()
        another_elife_obj = item_module.create_objects_from_item_doc(another_elife)        
        self.db.session.add(another_elife_obj)

        different_journal = copy.copy(self.fake_item_doc)
        different_journal["_id"] = "tiid3"
        different_journal["aliases"] = {"doi":["10.3897/zookeys.3"], "biblio":[{"year":1999}]}
        different_journal["last_modified"] = now.isoformat()
        different_journal["last_update_run"] = self.last_week.isoformat()
        different_journal_obj = item_module.create_objects_from_item_doc(different_journal)        
        self.db.session.add(different_journal_obj)

        different_journal2 = copy.copy(different_journal)
        different_journal2["_id"] = "tiid4"
        different_journal2["last_update_run"] = self.last_year.isoformat()
        different_journal_obj2 = item_module.create_objects_from_item_doc(different_journal2)        
        self.db.session.add(different_journal_obj2)

        self.db.session.commit()
    def test_duplicates_list(self):
        item_docs = [
            {"_id": "a1", "last_modified": "now",
                "aliases": {"doi":["doi.org/aaa"], "url":["111", "def.com"]}}
            ,{"_id": "b2", "last_modified": "now",
                "aliases": {"doi":["doi.org/222"]}}
            ,{"_id": "c2", "last_modified": "now",
                "aliases": {"doi":["doi.org/222"]}}
            ,{"_id": "d2", "last_modified": "now",
                "aliases": {"doi":["doi.org/222"], "url":["foo"]}}
            ,{"_id": "e1",  "last_modified": "now",
                "aliases": {"url":["111"]}}
            ,{"_id": "f3",  "last_modified": "now",
                "aliases": {"doi":["333"], "url":["333"]}}
            ,{"_id": "g4",  "last_modified": "now",
                "aliases": {
                    "biblio": [{"title": "my paper", "authors": "smith"}]
                }}
            ,{"_id": "h4",  "last_modified": "now",
                "aliases": {
                    "biblio": [{"title": "My paper", "authors": "Smith"}]
                }}
            ]

        item_objs = [item_module.create_objects_from_item_doc(item_doc) for item_doc in item_docs]
        item_objs[-1].biblios[0].provider = "user_provided"
        tiids = [item.tiid for item in item_objs]

        response = item_module.build_duplicates_list(tiids)
        print response
        expected = [[{'tiid': u'a1', 'has_user_provided_biblio': False}, {'tiid': u'e1', 'has_user_provided_biblio': False}], [{'tiid': u'b2', 'has_user_provided_biblio': False}, {'tiid': u'c2', 'has_user_provided_biblio': False}, {'tiid': u'd2', 'has_user_provided_biblio': False}], [{'tiid': u'f3', 'has_user_provided_biblio': False}], [{'tiid': u'g4', 'has_user_provided_biblio': False}, {'tiid': u'h4', 'has_user_provided_biblio': True}]]
        assert_equals(response, expected)
示例#4
0
    def test_run_aliases_in_queue(self):
        test_couch_queue = backend.PythonQueue("test_couch_queue")
        test_couch_queue_dict = {self.fake_item["_id"][0]:test_couch_queue}
        provider_worker = backend.ProviderWorker(mocks.ProviderMock("myfakeprovider"), 
                                        None, None, None, test_couch_queue_dict, None, self.r)  
        response = provider_worker.add_to_couch_queue_if_nonzero(self.fake_item["_id"], 
                {"doi":["10.5061/dryad.3td2f"]}, 
                "aliases", 
                "dummy")

        # save basic item beforehand
        item_obj = item_module.create_objects_from_item_doc(self.fake_item)
        self.db.session.add(item_obj)
        self.db.session.commit()

        # run
        couch_worker = backend.CouchWorker(test_couch_queue, self.r, self.d)
        response = couch_worker.run()
        expected = None
        assert_equals(response, expected)

        # check couch_queue has value after
        response = item_module.get_item(self.fake_item["_id"])
        print response
        expected = {'pmid': ['111'], 'doi': ['10.5061/dryad.3td2f']}
        assert_equals(response["aliases"], expected)

        # check has updated last_modified time
        now = datetime.datetime.utcnow().isoformat()
        assert_equals(response["last_modified"][0:10], now[0:10])
示例#5
0
    def test_adds_genre(self):
        self.TEST_OBJECT = item_module.create_objects_from_item_doc(self.ITEM_DATA)        
        self.db.session.add(self.TEST_OBJECT)
        self.db.session.commit()

        item = item_module.get_item("test", self.myrefsets, self.d)
        assert_equals(item["biblio"]['genre'], "article")
示例#6
0
    def test_run_metrics_in_queue(self):
        test_couch_queue = backend.PythonQueue("test_couch_queue")
        test_couch_queue_dict = {self.fake_item["_id"][0]:test_couch_queue}
        provider_worker = backend.ProviderWorker(mocks.ProviderMock("myfakeprovider"), 
                                        None, None, None, test_couch_queue_dict, None, self.r) 
        metrics_method_response = {'dryad:package_views': (361, 'http://dx.doi.org/10.5061/dryad.7898'), 
                            'dryad:total_downloads': (176, 'http://dx.doi.org/10.5061/dryad.7898'), 
                            'dryad:most_downloaded_file': (65, 'http://dx.doi.org/10.5061/dryad.7898')}                                         
        response = provider_worker.add_to_couch_queue_if_nonzero(self.fake_item["_id"], 
                metrics_method_response,
                "metrics", 
                "dummy")

        # save basic item beforehand
        item_obj = item_module.create_objects_from_item_doc(self.fake_item)
        self.db.session.add(item_obj)
        self.db.session.commit()

        # run
        couch_worker = backend.CouchWorker(test_couch_queue, self.r, self.d)    
        couch_worker.run()
            
        # check couch_queue has value after
        response = item_module.get_item(self.fake_item["_id"])
        print response
        expected = 361
        assert_equals(response["metrics"]['dryad:package_views']['values']["raw"], expected)
    def test_run_metrics_in_queue(self):
        test_couch_queue = backend.PythonQueue("test_couch_queue")
        test_couch_queue_dict = {self.fake_item["_id"][0]: test_couch_queue}
        provider_worker = backend.ProviderWorker(
            mocks.ProviderMock("myfakeprovider"), None, None, None,
            test_couch_queue_dict, None, self.r)
        metrics_method_response = {
            'dryad:package_views':
            (361, 'http://dx.doi.org/10.5061/dryad.7898'),
            'dryad:total_downloads':
            (176, 'http://dx.doi.org/10.5061/dryad.7898'),
            'dryad:most_downloaded_file':
            (65, 'http://dx.doi.org/10.5061/dryad.7898')
        }
        response = provider_worker.add_to_couch_queue_if_nonzero(
            self.fake_item["_id"], metrics_method_response, "metrics", "dummy")

        # save basic item beforehand
        item_obj = item_module.create_objects_from_item_doc(self.fake_item)
        self.db.session.add(item_obj)
        self.db.session.commit()

        # run
        couch_worker = backend.CouchWorker(test_couch_queue, self.r, self.d)
        couch_worker.run()

        # check couch_queue has value after
        response = item_module.get_item(self.fake_item["_id"], {}, self.r)
        print response
        expected = 361
        assert_equals(
            response["metrics"]['dryad:package_views']['values']["raw"],
            expected)
    def test_run_aliases_in_queue(self):
        test_couch_queue = backend.PythonQueue("test_couch_queue")
        test_couch_queue_dict = {self.fake_item["_id"][0]: test_couch_queue}
        provider_worker = backend.ProviderWorker(
            mocks.ProviderMock("myfakeprovider"), None, None, None,
            test_couch_queue_dict, None, self.r)
        response = provider_worker.add_to_couch_queue_if_nonzero(
            self.fake_item["_id"], {"doi": ["10.5061/dryad.3td2f"]}, "aliases",
            "dummy")

        # save basic item beforehand
        item_obj = item_module.create_objects_from_item_doc(self.fake_item)
        self.db.session.add(item_obj)
        self.db.session.commit()

        # run
        couch_worker = backend.CouchWorker(test_couch_queue, self.r, self.d)
        response = couch_worker.run()
        expected = None
        assert_equals(response, expected)

        # check couch_queue has value after
        response = item_module.get_item(self.fake_item["_id"], {}, self.r)
        print response
        expected = {'pmid': ['111'], 'doi': ['10.5061/dryad.3td2f']}
        assert_equals(response["aliases"], expected)

        # check has updated last_modified time
        now = datetime.datetime.utcnow().isoformat()
        assert_equals(response["last_modified"][0:10], now[0:10])
    def test_adds_genre(self):
        self.TEST_OBJECT = item_module.create_objects_from_item_doc(self.ITEM_DATA)        
        self.db.session.add(self.TEST_OBJECT)
        self.db.session.commit()

        item = item_module.get_item("test", self.myrefsets, self.r)
        assert_equals(item["biblio"]['genre'], "article")
def item_action_on_a_page(page, skip_till_key="0000"):
    items = [row.doc for row in page]

    for item_doc in items:
        if item_doc["_id"] > skip_till_key:
            new_item_object = item_module.create_objects_from_item_doc(item_doc, skip_if_exists=True)
    print "just finished", item_doc["_id"]
    return
示例#11
0
def item_action_on_a_page(page, skip_till_key="0000"):
    items = [row.doc for row in page]

    for item_doc in items:
        if item_doc["_id"] > skip_till_key:
            new_item_object = item_module.create_objects_from_item_doc(
                item_doc, skip_if_exists=True)
    print "just finished", item_doc["_id"]
    return
    def create_test_collection(self):
        test_collection = {"_id": "testcollectionid", 
                            "title": "mycollection", 
                            "type":"collection", 
                            "created":  "2012-08-23T14:40:16.888800", 
                            "last_modified":  "2012-08-23T14:40:16.888800", 
                            "alias_tiids": {
                                       "pmid:16023720": "iaw9rzldigp4xc7p20bycnkg",
                                       "pmid:16413797": "itsq6fgx8ogi9ixysbipmtxx"}}
        test_object = collection.create_objects_from_collection_doc(test_collection) 
        db.session.add(test_object) 

        biblio1 = {
               "journal": "The Astrophysical Journal",
               "authors": "Kwok, Purton, Fitzgerald",
               "year": "1978",
               "title": "On the origin of planetary nebulae"
           }
        biblio2 = {
               "journal": "The Astrophysical Journal 2",
               "authors": "Kwok, Purton, Fitzgerald",
               "year": "1900",
               "title": "On the origin of planetary nebulae The Sequel"
           }
        metrics1 = {
           "mendeley:readers": {
               "provenance_url": "http://www.mendeley.com/research/origin-planetary-nebulae/",
               "values": {
                   "raw_history": {
                       "2013-06-22T20:41:03.178277": 4,
                       "2013-01-15T22:21:55.253826": 3,
                       "2013-07-24T17:59:26.817504": 4,
                       "2013-07-24T18:04:41.035841": 9,
                   },
                   "raw": 9
               }
           },
           "mendeley:discipline": {
               "provenance_url": "http://www.mendeley.com/research/origin-planetary-nebulae/",
               "values": {
                   "raw_history": {
                       "2013-06-22T23:03:15.852461": [
                           {
                               "name": "Astronomy / Astrophysics / Space Science",
                               "value": 100,
                               "id": 2
                           }
                       ]
                    }
                }
            }
        }
        metrics2 = {
           "topsy:tweets": {
               "provenance_url": "http://topsydrilldown",
               "values": {
                   "raw_history": {
                       "2013-11-22T20:41:03.178277": 22
                   },
                   "raw": 22
                }
            }
        }

        test_item_docs = [
            {"_id": "iaw9rzldigp4xc7p20bycnkg", "type":"item", "created": "2012-08-23T14:40:16.888800", "last_modified": "2012-08-23T14:40:16.888800", "biblio":biblio1, "metrics":metrics1, "aliases":{"pmid": ["16023720"]}},
            {"_id": "itsq6fgx8ogi9ixysbipmtxx", "type":"item", "created": "2012-08-23T14:40:16.888800", "last_modified": "2012-08-23T14:40:16.888800", "biblio":biblio2, "metrics":metrics2, "aliases":{"pmid": ["16413797"]}}
        ]

        for item_doc in test_item_docs:
            test_object = item_module.create_objects_from_item_doc(item_doc) 
            db.session.add(test_object) 

        db.session.commit() 
示例#13
0
    def create_test_collection(self):
        test_collection = {
            "_id": "testcollectionid",
            "title": "mycollection",
            "type": "collection",
            "created": "2012-08-23T14:40:16.888800",
            "last_modified": "2012-08-23T14:40:16.888800",
            "alias_tiids": {
                "pmid:16023720": "iaw9rzldigp4xc7p20bycnkg",
                "pmid:16413797": "itsq6fgx8ogi9ixysbipmtxx"
            }
        }
        test_object = collection.create_objects_from_collection_doc(
            test_collection)
        db.session.add(test_object)

        biblio1 = {
            "journal": "The Astrophysical Journal",
            "authors": "Kwok, Purton, Fitzgerald",
            "year": "1978",
            "title": "On the origin of planetary nebulae"
        }
        biblio2 = {
            "journal": "The Astrophysical Journal 2",
            "authors": "Kwok, Purton, Fitzgerald",
            "year": "1900",
            "title": "On the origin of planetary nebulae The Sequel"
        }
        metrics1 = {
            "mendeley:readers": {
                "provenance_url":
                "http://www.mendeley.com/research/origin-planetary-nebulae/",
                "values": {
                    "raw_history": {
                        "2013-06-22T20:41:03.178277": 4,
                        "2013-01-15T22:21:55.253826": 3,
                        "2013-07-24T17:59:26.817504": 4,
                        "2013-07-24T18:04:41.035841": 9,
                    },
                    "raw": 9
                }
            },
            "mendeley:discipline": {
                "provenance_url":
                "http://www.mendeley.com/research/origin-planetary-nebulae/",
                "values": {
                    "raw_history": {
                        "2013-06-22T23:03:15.852461": [{
                            "name": "Astronomy / Astrophysics / Space Science",
                            "value": 100,
                            "id": 2
                        }]
                    }
                }
            }
        }
        metrics2 = {
            "topsy:tweets": {
                "provenance_url": "http://topsydrilldown",
                "values": {
                    "raw_history": {
                        "2013-11-22T20:41:03.178277": 22
                    },
                    "raw": 22
                }
            }
        }

        test_item_docs = [{
            "_id": "iaw9rzldigp4xc7p20bycnkg",
            "type": "item",
            "created": "2012-08-23T14:40:16.888800",
            "last_modified": "2012-08-23T14:40:16.888800",
            "biblio": biblio1,
            "metrics": metrics1,
            "aliases": {
                "pmid": ["16023720"]
            }
        }, {
            "_id": "itsq6fgx8ogi9ixysbipmtxx",
            "type": "item",
            "created": "2012-08-23T14:40:16.888800",
            "last_modified": "2012-08-23T14:40:16.888800",
            "biblio": biblio2,
            "metrics": metrics2,
            "aliases": {
                "pmid": ["16413797"]
            }
        }]

        for item_doc in test_item_docs:
            test_object = item_module.create_objects_from_item_doc(item_doc)
            db.session.add(test_object)

        db.session.commit()
示例#14
0
 def save_test_item(self):
     self.TEST_OBJECT = item_module.create_objects_from_item_doc(self.ITEM_DATA)        
     self.db.session.add(self.TEST_OBJECT)
     self.db.session.commit()
示例#15
0
 def test_as_old_doc(self):
     test_object = item_module.create_objects_from_item_doc(self.ITEM_DATA)        
     new_doc = test_object.as_old_doc()
     print json.dumps(new_doc, sort_keys=True, indent=4)
     print json.dumps(self.ITEM_DATA, sort_keys=True, indent=4)
     assert_equals(new_doc, self.ITEM_DATA)
示例#16
0
    def setUp(self):
        """
        This test item is a lightly-modified version of a real doc from our
        demo collection; it's available at http://total-impact-core.herokuapp.com/collection/kn5auf
        """
        test_item = '''
            {
            "_id": "1aff9dfebea711e1bdf912313d1a5e63",
            "_rev": "968-c7891982fca2ea41346a20b80c2b888d",
            "aliases": {
                "doi": [
                    "10.5061/dryad.j1fd7"
                ],
                "title": [
                    "Data from: Data archiving is a good use of research funds",
                    "data from: data archiving is a good  investment"
                ],
                "url": [
                    "http://datadryad.org/handle/10255/dryad.33537",
                    "http://hdl.handle.net/10255/dryad.33537"
                ]
            },
            "biblio": {
                "authors": "Piwowar, Vision, Whitlock, Piwowar, Vision, Whitlock, Piwowar, Vision, Whitlock",
                "genre": "dataset",
                "h1": "Data from: Data archiving is a good  investment",
                "repository": "Dryad Digital Repository",
                "title": "Data from: Data archiving is a good  investment",
                "year": "2011"
            },
            "created": "2012-06-25T09:21:11.960271",
            "currently_updating": false,
            "last_modified": "2012-11-18T04:57:40.539053",
            "metrics": {
                "delicious:bookmarks": {
                    "provenance_url": "http://www.delicious.com/url/4794ddb7a3e934ba23165af65fcfa9cd",
                    "static_meta": {
                        "description": "The number of bookmarks to this artifact (maximum=100).",
                        "display_name": "bookmarks",
                        "icon": "http://www.delicious.com/favicon.ico",
                        "provider": "Delicious",
                        "provider_url": "http://www.delicious.com/"
                    },
                    "values": {
                        "raw": 1,
                        "raw_history": {
                            "2012-06-23T09:21:16.027149": 1
                        }
                    }
                },
                "dryad:total_downloads": {
                    "provenance_url": "http://dx.doi.org/10.5061/dryad.j1fd7",
                    "static_meta": {
                        "description": "Dryad total downloads: combined number of downloads of the data package and data files",
                        "display_name": "total downloads",
                        "icon": "http:\\/\\/datadryad.org\\/favicon.ico",
                        "provider": "Dryad",
                        "provider_url": "http:\\/\\/www.datadryad.org\\/"
                    },
                    "values": {
                        "dryad": {
                            "CI95_lower": 91,
                            "CI95_upper": 98,
                            "estimate_lower": 96,
                            "estimate_upper": 96
                        },
                        "raw": 207,
                        "raw_history": {
                            "2012-06-25T09:21:16.027149": 132,
                            "2012-06-26T18:05:19.598432": 132,
                            "2012-06-26T20:10:16.858294": 132
                        }
                    }
                }
            },
            "type": "item"
        }
        '''

        self.test_api_user_meta = {
            'max_registered_items': 3,
            'planned_use': 'individual CV',
            'email': "*****@*****.**",
            'notes': '',
            'api_key_owner': 'Julia Smith',
            "example_url": "",
            "organization": "NASA",
            "prefix": "NASA",
        }

        self.db = setup_postgres_for_unittests(db, app)

        item = item_module.create_objects_from_item_doc(json.loads(test_item))
        self.db.session.add(item)

        self.existing_api_user = api_user.ApiUser(**self.test_api_user_meta)
        self.existing_api_user.api_key = "validkey"  #override randomly assigned key
        self.db.session.add(self.existing_api_user)
        self.db.session.commit()

        # do the same thing for the redis db.  We're using DB 8 for unittests.
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        #setup api test client
        self.app = app
        self.app.testing = True
        self.client = self.app.test_client()

        # Mock out relevant methods of the Dryad provider
        self.orig_Dryad_member_items = Dryad.member_items
        Dryad.member_items = MOCK_member_items

        self.aliases = [["doi", "10.123"], ["doi", "10.124"],
                        ["doi", "10.125"]]
示例#17
0
    def setUp(self):
        """
        This test item is a lightly-modified version of a real doc from our
        demo collection; it's available at http://total-impact-core.herokuapp.com/collection/kn5auf
        """
        test_item = '''
            {
            "_id": "1aff9dfebea711e1bdf912313d1a5e63",
            "_rev": "968-c7891982fca2ea41346a20b80c2b888d",
            "aliases": {
                "doi": [
                    "10.5061/dryad.j1fd7"
                ],
                "title": [
                    "Data from: Data archiving is a good use of research funds",
                    "data from: data archiving is a good  investment"
                ],
                "url": [
                    "http://datadryad.org/handle/10255/dryad.33537",
                    "http://hdl.handle.net/10255/dryad.33537"
                ]
            },
            "biblio": {
                "authors": "Piwowar, Vision, Whitlock, Piwowar, Vision, Whitlock, Piwowar, Vision, Whitlock",
                "genre": "dataset",
                "h1": "Data from: Data archiving is a good  investment",
                "repository": "Dryad Digital Repository",
                "title": "Data from: Data archiving is a good  investment",
                "year": "2011"
            },
            "created": "2012-06-25T09:21:11.960271",
            "currently_updating": false,
            "last_modified": "2012-11-18T04:57:40.539053",
            "metrics": {
                "delicious:bookmarks": {
                    "provenance_url": "http://www.delicious.com/url/4794ddb7a3e934ba23165af65fcfa9cd",
                    "static_meta": {
                        "description": "The number of bookmarks to this artifact (maximum=100).",
                        "display_name": "bookmarks",
                        "icon": "http://www.delicious.com/favicon.ico",
                        "provider": "Delicious",
                        "provider_url": "http://www.delicious.com/"
                    },
                    "values": {
                        "raw": 1,
                        "raw_history": {
                            "2012-06-23T09:21:16.027149": 1
                        }
                    }
                },
                "dryad:total_downloads": {
                    "provenance_url": "http://dx.doi.org/10.5061/dryad.j1fd7",
                    "static_meta": {
                        "description": "Dryad total downloads: combined number of downloads of the data package and data files",
                        "display_name": "total downloads",
                        "icon": "http:\\/\\/datadryad.org\\/favicon.ico",
                        "provider": "Dryad",
                        "provider_url": "http:\\/\\/www.datadryad.org\\/"
                    },
                    "values": {
                        "dryad": {
                            "CI95_lower": 91,
                            "CI95_upper": 98,
                            "estimate_lower": 96,
                            "estimate_upper": 96
                        },
                        "raw": 207,
                        "raw_history": {
                            "2012-06-25T09:21:16.027149": 132,
                            "2012-06-26T18:05:19.598432": 132,
                            "2012-06-26T20:10:16.858294": 132
                        }
                    }
                }
            },
            "type": "item"
        }
        '''

        self.test_api_user_meta = {    
                    'max_registered_items': 3, 
                    'planned_use': 'individual CV', 
                    'email': "*****@*****.**", 
                    'notes': '', 
                    'api_key_owner': 'Julia Smith', 
                    "example_url": "", 
                    "organization": "NASA",
                    "prefix": "NASA",
                }

        self.db = setup_postgres_for_unittests(db, app)

        item = item_module.create_objects_from_item_doc(json.loads(test_item))
        self.db.session.add(item)

        self.existing_api_user = api_user.ApiUser(**self.test_api_user_meta)
        self.existing_api_user.api_key = "validkey"  #override randomly assigned key
        self.db.session.add(self.existing_api_user)
        self.db.session.commit()


        # do the same thing for the redis db.  We're using DB 8 for unittests.
        self.r = tiredis.from_url("redis://localhost:6379", db=8)
        self.r.flushdb()

        #setup api test client
        self.app = app
        self.app.testing = True
        self.client = self.app.test_client()

        # Mock out relevant methods of the Dryad provider
        self.orig_Dryad_member_items = Dryad.member_items
        Dryad.member_items = MOCK_member_items

        self.aliases = [
            ["doi", "10.123"],
            ["doi", "10.124"],
            ["doi", "10.125"]
        ]
示例#18
0
 def save_test_item(self):
     self.TEST_OBJECT = item_module.create_objects_from_item_doc(self.ITEM_DATA)        
     self.db.session.add(self.TEST_OBJECT)
     self.db.session.commit()
示例#19
0
 def test_as_old_doc(self):
     test_object = item_module.create_objects_from_item_doc(self.ITEM_DATA)        
     new_doc = test_object.as_old_doc()
     print json.dumps(new_doc, sort_keys=True, indent=4)
     print json.dumps(self.ITEM_DATA, sort_keys=True, indent=4)
     assert_equals(new_doc, self.ITEM_DATA)