class News: __bucket_keeper = None __bucket_id = None #o.claim_bucket("foo") __bucket_label = None result = o.put_stream(bucket_id, "foo3.txt", "hello there Mclovin!") print result for item in o.list_labels(bucket_id): print itemi __station_id = None def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS() #current using pairtree with file system self.__bucket_id = "{0}_{1}".format({self.__bucket_label, self.__station_id}) def add_news_file(self, news_file_location, extra_json): result = self.__bucket_keeper.put_stream(self.__bucket_id, news_file_location, extra_json) print result def get_news_files(self): return self.__bucket_keeper.list(self.__bucket_id) def get_latest_news_file(self): pass
def save_to_ofs(session, filename, validity, station_id, category): o = PTOFS(storage_dir = "/home/amour/media/gdrive/Community Media/data") bucket_id = "{0}_{1}".format(station_id, category) session.consoleLog("info", "bucket id is " + bucket_id) result = o.put_stream(bucket_id, filename, open("{0}/{1}".format(base_files_directory, filename)), params={"validity":validity}) session.consoleLog("info", str(result)) session.streamFile("{0}/{1}".format(base_prompts_directory, station_message_saved_messages[station_lines[session.getVariable('destination_number')]]))
class News: __bucket_keeper = None __bucket_id = None #o.claim_bucket("foo") __bucket_label = None result = o.put_stream(bucket_id, "foo3.txt", "hello there Mclovin!") print result for item in o.list_labels(bucket_id): print itemi __station_id = None def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS( ) #current using pairtree with file system self.__bucket_id = "{0}_{1}".format( {self.__bucket_label, self.__station_id}) def add_news_file(self, news_file_location, extra_json): result = self.__bucket_keeper.put_stream(self.__bucket_id, news_file_location, extra_json) print result def get_news_files(self): return self.__bucket_keeper.list(self.__bucket_id) def get_latest_news_file(self): pass
def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS( ) #current using pairtree with file system self.__bucket_id = "{0}_{1}".format( {self.__bucket_label, self.__station_id})
def save_to_ofs(session, filename, validity, station_id, category): o = PTOFS() bucket_id = "{0}_{1}".format(station_id, category) session.consoleLog("info", "bucket id is " + bucket_id) result = o.put_stream( bucket_id, filename, open("{0}/{1}".format(base_files_directory, filename)), params={"validity": validity} ) session.consoleLog("info", str(result))
def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS( storage_dir="/home/amour/media/gdrive/Community Media/data" ) #current using pairtree with file system self.__bucket_id = "{0}_{1}".format(self.__station_id, self.__bucket_label) print "bucket id s " + self.__bucket_id
def save_to_ofs(session, filename, validity, station_id, category): o = PTOFS() bucket_id = "{0}_{1}".format(station_id, category) session.consoleLog("info", "bucket id is " + bucket_id) result = o.put_stream(bucket_id, filename, open("{0}/{1}".format(base_files_directory, filename)), params={"validity": validity}) session.consoleLog("info", str(result))
def save_to_ofs(session, filename, validity, station_id, category): o = PTOFS(storage_dir="/home/amour/media/gdrive/Community Media/data") bucket_id = "{0}_{1}".format(station_id, category) session.consoleLog("info", "bucket id is " + bucket_id) result = o.put_stream( bucket_id, filename, open("{0}/{1}".format(base_files_directory, filename)), params={"validity": validity} ) session.consoleLog("info", str(result)) session.streamFile( "{0}/{1}".format( base_prompts_directory, station_message_saved_messages[station_lines[session.getVariable("destination_number")]], ) )
class CommunityMedia: __bucket_keeper = None __bucket_id = None #o.claim_bucket("foo") __bucket_label = None __station_id = None def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS( storage_dir="/home/amour/media/gdrive/Community Media/data" ) #current using pairtree with file system self.__bucket_id = "{0}_{1}".format(self.__station_id, self.__bucket_label) print "bucket id s " + self.__bucket_id def add_news_file(self, news_file_location, extra_json): result = self.__bucket_keeper.put_stream(self.__bucket_id, news_file_location, extra_json) print result def get_media_files(self): media_files = [] for label in self.__bucket_keeper.list_labels(self.__bucket_id): metadata_json = self.__bucket_keeper.get_metadata( self.__bucket_id, label) #metadata_json = json.loads(metadata) if self.__is_valid(metadata_json["_creation_date"], metadata_json["validity"]): file_url = self.__bucket_keeper.get_url( self.__bucket_id, label) media_files.append(self.__sanitize_url(file_url)) return media_files def __is_valid(self, creation_date, validity): sanitized_time = creation_date.replace('T', ' ') creation_time = datetime.strptime(sanitized_time, '%Y-%m-%d %H:%M:%S') #creation_time = time.mktime(creation_time_tuple) time_delta = timedelta(days=int(validity)) now = datetime.utcnow() return (time_delta + creation_time) > now def __sanitize_url(self, url): parts = url.split("://") return parts[len(parts) - 1]
class CommunityMedia: __bucket_keeper = None __bucket_id = None #o.claim_bucket("foo") __bucket_label = None __station_id = None def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS(storage_dir = "/home/amour/media/gdrive/Community Media/data") #current using pairtree with file system self.__bucket_id = "{0}_{1}".format(self.__station_id, self.__bucket_label) print "bucket id s " + self.__bucket_id def add_news_file(self, news_file_location, extra_json): result = self.__bucket_keeper.put_stream(self.__bucket_id, news_file_location, extra_json) print result def get_media_files(self): media_files = [] for label in self.__bucket_keeper.list_labels(self.__bucket_id): metadata_json = self.__bucket_keeper.get_metadata(self.__bucket_id, label) #metadata_json = json.loads(metadata) if self.__is_valid(metadata_json["_creation_date"], metadata_json["validity"]): file_url = self.__bucket_keeper.get_url(self.__bucket_id, label) media_files.append(self.__sanitize_url(file_url)) return media_files def __is_valid(self, creation_date, validity): sanitized_time = creation_date.replace('T', ' ') creation_time = datetime.strptime(sanitized_time, '%Y-%m-%d %H:%M:%S') #creation_time = time.mktime(creation_time_tuple) time_delta = timedelta(days=int(validity)) now = datetime.utcnow() return (time_delta + creation_time) > now def __sanitize_url(self, url): parts = url.split("://") return parts[len(parts) - 1]
from ofs.local import PTOFS o = PTOFS(storage_dir = "/home/amour/data") bucket_id ="1_advertisments"# "1_advertisments" #o.claim_bucket("foo") print bucket_id #result = o.put_stream(bucket_id, "foo3.wav", open("/home/amour/20150316104826_0774712133.wav"), params={"validity" : 5}) #print result for item in o.list_labels(bucket_id): print item t = o.get_metadata(bucket_id, item) print t
from flask import Flask, make_response, jsonify, request, send_file from flask.ext.restful import Api, Resource, reqparse from flask.ext.restful.utils import cors import werkzeug from flask.ext.httpauth import HTTPBasicAuth from flask.ext.sqlalchemy import SQLAlchemy from ofs.local import PTOFS import argparse import hashlib app = Flask(__name__) api = Api(app) # api.decorators=[cors.crossdomain(origin='*')] auth = HTTPBasicAuth() o = PTOFS() app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' db = SQLAlchemy(app) class User(db.Model): id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(120), unique=True) password = db.Column(db.String(120), unique=False) key = db.Column(db.String(200), unique=True) uuid = db.Column(db.String(200), unique=True) def __init__(self, username, email, passw, key, uuid): self.username = username self.email = email self.password = passw self.key = key
from ofs.local import PTOFS o = PTOFS(storage_dir="/home/amour/data") bucket_id = "1_advertisments" # "1_advertisments" #o.claim_bucket("foo") print bucket_id #result = o.put_stream(bucket_id, "foo3.wav", open("/home/amour/20150316104826_0774712133.wav"), params={"validity" : 5}) #print result for item in o.list_labels(bucket_id): print item t = o.get_metadata(bucket_id, item) print t
def setUp(self): self.o = PTOFS(storage_dir="pt_deleteme")
class TestPairtreeOFS(unittest.TestCase): def setUp(self): self.o = PTOFS(storage_dir="pt_deleteme") def tearDown(self): shutil.rmtree("pt_deleteme") def test_empty(self): pass def test_claim_bucket(self): a = self.o.claim_bucket() self.assertTrue(self.o.exists(a)) def test_store_bytes_no_params(self): a = self.o.claim_bucket() label = "foo.txt" b = self.o.put_stream(a, label, "Some bytes to store") self.assertEquals(b['_label'], "foo.txt") self.assertEquals(b['_content_length'], 19) self.assertEquals(b['_checksum'], 'md5:eee89bbbcf416f658c7bc18cd8f2b61d') def test_store_bytes_with_params(self): a = self.o.claim_bucket() label = "foo.txt" b = self.o.put_stream(a, label, "Some bytes to store", {"a":"1", "b":[1,2,3,4,5]}) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1,2,3,4,5]) self.assertEquals(b['_label'], "foo.txt") self.assertEquals(b['_content_length'], 19) self.assertEquals(b['_checksum'], 'md5:eee89bbbcf416f658c7bc18cd8f2b61d') def test_store_params_after_bytes(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store") b = self.o.update_metadata(a, label, {"a":"1", "b":[1,2,3,4,5]}) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1,2,3,4,5]) def test_params_persistence(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store", {"a":"1", "b":[1,2,3,4,5]}) b = self.o.get_metadata(a, label) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1,2,3,4,5]) def test_params_deletion(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store", {"a":"1", "b":[1,2,3,4,5]}) self.o.del_metadata_keys(a, label, ['b']) b = self.o.get_metadata(a, label) self.assertEquals(b['a'], "1") self.assertFalse(b.has_key('b'))
class TestPairtreeOFS(unittest.TestCase): def setUp(self): self.o = PTOFS(storage_dir="pt_deleteme") def tearDown(self): shutil.rmtree("pt_deleteme") def test_empty(self): pass def test_claim_bucket(self): a = self.o.claim_bucket() self.assertTrue(self.o.exists(a)) def test_store_bytes_no_params(self): a = self.o.claim_bucket() label = "foo.txt" b = self.o.put_stream(a, label, "Some bytes to store") self.assertEquals(b['_label'], "foo.txt") self.assertEquals(b['_content_length'], 19) self.assertEquals(b['_checksum'], 'md5:eee89bbbcf416f658c7bc18cd8f2b61d') def test_store_bytes_with_params(self): a = self.o.claim_bucket() label = "foo.txt" b = self.o.put_stream(a, label, "Some bytes to store", { "a": "1", "b": [1, 2, 3, 4, 5] }) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1, 2, 3, 4, 5]) self.assertEquals(b['_label'], "foo.txt") self.assertEquals(b['_content_length'], 19) self.assertEquals(b['_checksum'], 'md5:eee89bbbcf416f658c7bc18cd8f2b61d') def test_store_params_after_bytes(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store") b = self.o.update_metadata(a, label, {"a": "1", "b": [1, 2, 3, 4, 5]}) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1, 2, 3, 4, 5]) def test_params_persistence(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store", { "a": "1", "b": [1, 2, 3, 4, 5] }) b = self.o.get_metadata(a, label) self.assertEquals(b['a'], "1") self.assertEquals(b['b'], [1, 2, 3, 4, 5]) def test_params_deletion(self): a = self.o.claim_bucket() label = "foo.txt" self.o.put_stream(a, label, "Some bytes to store", { "a": "1", "b": [1, 2, 3, 4, 5] }) self.o.del_metadata_keys(a, label, ['b']) b = self.o.get_metadata(a, label) self.assertEquals(b['a'], "1") self.assertFalse(b.has_key('b'))
def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS() #current using pairtree with file system self.__bucket_id = "{0}_{1}".format({self.__bucket_label, self.__station_id})
def __init__(self, bucket_label, station_id): self.__station_id = station_id self.__bucket_label = bucket_label self.__bucket_keeper = PTOFS(storage_dir = "/home/amour/media/gdrive/Community Media/data") #current using pairtree with file system self.__bucket_id = "{0}_{1}".format(self.__station_id, self.__bucket_label) print "bucket id s " + self.__bucket_id
from ofs.local import PTOFS o = PTOFS(storage_dir = "/home/amour/media/gdrive/Community Media/data") bucket_id = "11_1" #o.claim_bucket("foo") print bucket_id #result = o.put_stream(bucket_id, "foo3.txt", "hello there Mclovin!") #print result for item in o.list_labels(bucket_id): print item
from ofs.local import PTOFS o = PTOFS(storage_dir="/home/amour/media/gdrive/Community Media/data") bucket_id = "11_1" #o.claim_bucket("foo") print bucket_id #result = o.put_stream(bucket_id, "foo3.txt", "hello there Mclovin!") #print result for item in o.list_labels(bucket_id): print item