class News: __bucket_keeper = None __bucket_id = None #o.claim_bucket("foo") __bucket_label = None result = o.put_stream(bucket_id, "foo3.txt", "hello there Mclovin!") print result for item in o.list_labels(bucket_id): print itemi __station_id = None def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS( ) #current using pairtree with file system self.__bucket_id = "{0}_{1}".format( {self.__bucket_label, self.__station_id}) def add_news_file(self, news_file_location, extra_json): result = self.__bucket_keeper.put_stream(self.__bucket_id, news_file_location, extra_json) print result def get_news_files(self): return self.__bucket_keeper.list(self.__bucket_id) def get_latest_news_file(self): pass
def save_to_ofs(session, filename, validity, station_id, category): o = PTOFS(storage_dir = "/home/amour/media/gdrive/Community Media/data") bucket_id = "{0}_{1}".format(station_id, category) session.consoleLog("info", "bucket id is " + bucket_id) result = o.put_stream(bucket_id, filename, open("{0}/{1}".format(base_files_directory, filename)), params={"validity":validity}) session.consoleLog("info", str(result)) session.streamFile("{0}/{1}".format(base_prompts_directory, station_message_saved_messages[station_lines[session.getVariable('destination_number')]]))
class News: __bucket_keeper = None __bucket_id = None #o.claim_bucket("foo") __bucket_label = None result = o.put_stream(bucket_id, "foo3.txt", "hello there Mclovin!") print result for item in o.list_labels(bucket_id): print itemi __station_id = None def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS() #current using pairtree with file system self.__bucket_id = "{0}_{1}".format({self.__bucket_label, self.__station_id}) def add_news_file(self, news_file_location, extra_json): result = self.__bucket_keeper.put_stream(self.__bucket_id, news_file_location, extra_json) print result def get_news_files(self): return self.__bucket_keeper.list(self.__bucket_id) def get_latest_news_file(self): pass
class TestPairtreeOFS(unittest.TestCase): def setUp(self): self.o = PTOFS(storage_dir="pt_deleteme") def tearDown(self): shutil.rmtree("pt_deleteme") def test_empty(self): pass def test_claim_bucket(self): a = self.o.claim_bucket() self.assertTrue(self.o.exists(a)) def test_store_bytes_no_params(self): a = self.o.claim_bucket() label = "foo.txt" b = self.o.put_stream(a, label, "Some bytes to store") self.assertEquals(b['_label'], "foo.txt") self.assertEquals(b['_content_length'], 19) self.assertEquals(b['_checksum'], 'md5:eee89bbbcf416f658c7bc18cd8f2b61d') def test_store_bytes_with_params(self): a = self.o.claim_bucket() label = "foo.txt" b = self.o.put_stream(a, label, "Some bytes to store", {"a":"1", "b":[1,2,3,4,5]}) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1,2,3,4,5]) self.assertEquals(b['_label'], "foo.txt") self.assertEquals(b['_content_length'], 19) self.assertEquals(b['_checksum'], 'md5:eee89bbbcf416f658c7bc18cd8f2b61d') def test_store_params_after_bytes(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store") b = self.o.update_metadata(a, label, {"a":"1", "b":[1,2,3,4,5]}) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1,2,3,4,5]) def test_params_persistence(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store", {"a":"1", "b":[1,2,3,4,5]}) b = self.o.get_metadata(a, label) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1,2,3,4,5]) def test_params_deletion(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store", {"a":"1", "b":[1,2,3,4,5]}) self.o.del_metadata_keys(a, label, ['b']) b = self.o.get_metadata(a, label) self.assertEquals(b['a'], "1") self.assertFalse(b.has_key('b'))
def save_to_ofs(session, filename, validity, station_id, category): o = PTOFS() bucket_id = "{0}_{1}".format(station_id, category) session.consoleLog("info", "bucket id is " + bucket_id) result = o.put_stream( bucket_id, filename, open("{0}/{1}".format(base_files_directory, filename)), params={"validity": validity} ) session.consoleLog("info", str(result))
def save_to_ofs(session, filename, validity, station_id, category): o = PTOFS() bucket_id = "{0}_{1}".format(station_id, category) session.consoleLog("info", "bucket id is " + bucket_id) result = o.put_stream(bucket_id, filename, open("{0}/{1}".format(base_files_directory, filename)), params={"validity": validity}) session.consoleLog("info", str(result))
def save_to_ofs(session, filename, validity, station_id, category): o = PTOFS(storage_dir="/home/amour/media/gdrive/Community Media/data") bucket_id = "{0}_{1}".format(station_id, category) session.consoleLog("info", "bucket id is " + bucket_id) result = o.put_stream( bucket_id, filename, open("{0}/{1}".format(base_files_directory, filename)), params={"validity": validity} ) session.consoleLog("info", str(result)) session.streamFile( "{0}/{1}".format( base_prompts_directory, station_message_saved_messages[station_lines[session.getVariable("destination_number")]], ) )
class CommunityMedia: __bucket_keeper = None __bucket_id = None #o.claim_bucket("foo") __bucket_label = None __station_id = None def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS( storage_dir="/home/amour/media/gdrive/Community Media/data" ) #current using pairtree with file system self.__bucket_id = "{0}_{1}".format(self.__station_id, self.__bucket_label) print "bucket id s " + self.__bucket_id def add_news_file(self, news_file_location, extra_json): result = self.__bucket_keeper.put_stream(self.__bucket_id, news_file_location, extra_json) print result def get_media_files(self): media_files = [] for label in self.__bucket_keeper.list_labels(self.__bucket_id): metadata_json = self.__bucket_keeper.get_metadata( self.__bucket_id, label) #metadata_json = json.loads(metadata) if self.__is_valid(metadata_json["_creation_date"], metadata_json["validity"]): file_url = self.__bucket_keeper.get_url( self.__bucket_id, label) media_files.append(self.__sanitize_url(file_url)) return media_files def __is_valid(self, creation_date, validity): sanitized_time = creation_date.replace('T', ' ') creation_time = datetime.strptime(sanitized_time, '%Y-%m-%d %H:%M:%S') #creation_time = time.mktime(creation_time_tuple) time_delta = timedelta(days=int(validity)) now = datetime.utcnow() return (time_delta + creation_time) > now def __sanitize_url(self, url): parts = url.split("://") return parts[len(parts) - 1]
class CommunityMedia: __bucket_keeper = None __bucket_id = None #o.claim_bucket("foo") __bucket_label = None __station_id = None def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS(storage_dir = "/home/amour/media/gdrive/Community Media/data") #current using pairtree with file system self.__bucket_id = "{0}_{1}".format(self.__station_id, self.__bucket_label) print "bucket id s " + self.__bucket_id def add_news_file(self, news_file_location, extra_json): result = self.__bucket_keeper.put_stream(self.__bucket_id, news_file_location, extra_json) print result def get_media_files(self): media_files = [] for label in self.__bucket_keeper.list_labels(self.__bucket_id): metadata_json = self.__bucket_keeper.get_metadata(self.__bucket_id, label) #metadata_json = json.loads(metadata) if self.__is_valid(metadata_json["_creation_date"], metadata_json["validity"]): file_url = self.__bucket_keeper.get_url(self.__bucket_id, label) media_files.append(self.__sanitize_url(file_url)) return media_files def __is_valid(self, creation_date, validity): sanitized_time = creation_date.replace('T', ' ') creation_time = datetime.strptime(sanitized_time, '%Y-%m-%d %H:%M:%S') #creation_time = time.mktime(creation_time_tuple) time_delta = timedelta(days=int(validity)) now = datetime.utcnow() return (time_delta + creation_time) > now def __sanitize_url(self, url): parts = url.split("://") return parts[len(parts) - 1]
class TestPairtreeOFS(unittest.TestCase): def setUp(self): self.o = PTOFS(storage_dir="pt_deleteme") def tearDown(self): shutil.rmtree("pt_deleteme") def test_empty(self): pass def test_claim_bucket(self): a = self.o.claim_bucket() self.assertTrue(self.o.exists(a)) def test_store_bytes_no_params(self): a = self.o.claim_bucket() label = "foo.txt" b = self.o.put_stream(a, label, "Some bytes to store") self.assertEquals(b['_label'], "foo.txt") self.assertEquals(b['_content_length'], 19) self.assertEquals(b['_checksum'], 'md5:eee89bbbcf416f658c7bc18cd8f2b61d') def test_store_bytes_with_params(self): a = self.o.claim_bucket() label = "foo.txt" b = self.o.put_stream(a, label, "Some bytes to store", { "a": "1", "b": [1, 2, 3, 4, 5] }) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1, 2, 3, 4, 5]) self.assertEquals(b['_label'], "foo.txt") self.assertEquals(b['_content_length'], 19) self.assertEquals(b['_checksum'], 'md5:eee89bbbcf416f658c7bc18cd8f2b61d') def test_store_params_after_bytes(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store") b = self.o.update_metadata(a, label, {"a": "1", "b": [1, 2, 3, 4, 5]}) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1, 2, 3, 4, 5]) def test_params_persistence(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store", { "a": "1", "b": [1, 2, 3, 4, 5] }) b = self.o.get_metadata(a, label) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1, 2, 3, 4, 5]) def test_params_deletion(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store", { "a": "1", "b": [1, 2, 3, 4, 5] }) self.o.del_metadata_keys(a, label, ['b']) b = self.o.get_metadata(a, label) self.assertEquals(b['a'], "1") self.assertFalse(b.has_key('b'))