def photo_thumbnail(request, owner, nb): storage = Client(domain=settings.MOGILEFS_DOMAIN, trackers=settings.MOGILEFS_TRACKERS) f = storage.read_file( path.join(settings.PROCESSED_PHOTOS_DIR, '%s,%s-160x160.jpg' % (owner, nb))) return HttpResponse(make_reader(f), content_type='image/jpeg')
def test_mkcol(self): client = Client(TEST_NS, HOSTS) for x in xrange(0, 10): key = 'test_file_%s_%s_%d' % (random.random(), time.time(), x) client.new_file(key).write("SPAM%s" % x) paths = client.get_paths(key) self.assertTrue(paths)
def func(largefile): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) fp = client.new_file(key, largefile=largefile) fp.write("spam") fp.close() try: fp.write("egg") except: pass else: assert False, "operation not permitted to closed file" try: fp.read() except: pass else: assert False, "operation not permitted to closed file" try: fp.seek(0) except: pass else: assert False, "operation not permitted to closed file" try: fp.tell() except: pass else: assert False, "operation not permitted to closed file"
def test_mkcol(): client = Client(TEST_NS, HOSTS) for x in xrange(0, 1000): key = 'test_file_%s_%s_%d' % (random.random(), time.time(), x) client.new_file(key).write("SPAM%s" % x) paths = client.get_paths(key) assert paths
def main(argv): mog_domain = '' mog_trackers = '' output_dir = '' try: opts, args = getopt.getopt(argv, "d:t:o:", ["domain=", "trackers=", "output-dir="]) except getopt.GetoptError: print sys.argv[0] + ' -d <domain> -t <trackers> -o <output-dir>' for opt, arg in opts: if opt in ("-d", "--domain"): mog_domain = arg elif opt in ("-t", "--trackers"): mog_trackers = arg elif opt in ("-o", "--output-dir"): if arg[len(arg) - 1] == '/': output_dir = arg else: output_dir = arg + '/' if not os.path.exists(output_dir): os.makedirs(output_dir) datastore = Client(domain=mog_domain, trackers=mog_trackers.split(',')) keys = datastore.keys() start = time.time() for index in range(len(keys)): print('downloading: ' + keys[index]) file = open(output_dir + keys[index], 'w') file.write(datastore.get_file_data(keys[index])) file.close() print 'dump completed' print("time used : %.2f seconds" % (time.time() - start))
def test_file_like_object(self): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) fp = client.new_file(key) fp.write("spam\negg\nham\n") fp.seek(0) line = fp.readline() self.assertEqual(line, "spam\n") line = fp.readline() self.assertEqual(line, "egg\n") line = fp.readline() self.assertEqual(line, "ham\n") line = fp.readline() self.assertEqual(line, '') fp.seek(0) lines = fp.readlines() self.assertEqual(lines, ["spam\n", "egg\n", "ham\n"]) fp.close()
def test_mkcol(self): client = Client(TEST_NS, HOSTS) for x in xrange(0, 10): key = "test_file_%s_%s_%d" % (random.random(), time.time(), x) client.new_file(key).write("SPAM%s" % x) paths = client.get_paths(key) self.assertTrue(paths)
def update(self, title = None, group_id = None, image_data = None, status = None): self.__ensure_category_obj() if title != None: self.category_obj.title = title if group_id != None: self.category_obj.group_id = group_id if status != None: self.category_obj.status = int(status) if image_data != None: self.__datastore = Client( domain = settings.MOGILEFS_DOMAIN, trackers = settings.MOGILEFS_TRACKERS ) _key = md5(image_data).hexdigest() _large_data = self.__resize(image_data, 86, 86) _fp = self.__datastore.new_file('category/large/' + _key) _fp.write(_large_data) _fp.close() _small_data = self.__resize(image_data, 43, 43) _fp = self.__datastore.new_file('category/small/' + _key) _fp.write(_small_data) _fp.close() self.category_obj.image_store_hash = _key self.category_obj.save()
def test_read_file(): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) client.store_content(key, key) fp = client.read_file(key) assert fp is not None assert key == fp.read()
def test_read_file(self): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) client.store_content(key, key) fp = client.read_file(key) self.assertNotEqual(fp, None) self.assertEqual(key, fp.read())
def func(largefile): client = Client('spamdomain', HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) try: fp = client.new_file(key) except MogileFSError, e: pass
def test_rename_dupliate_key(self): client = Client(TEST_NS, HOSTS) key1 = 'test_file_%s_%s' % (random.random(), time.time()) key2 = 'key2:' + key1 client.store_content(key1, key1) client.store_content(key2, key2) self.assertEqual(client.rename(key1, key2), False)
def test_read_file(): cl = Client(TEST_NS, HOSTS) cl = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) cl.store_content(key, key) with cl.read_file(key) as fp: assert fp.read() == key
def test_store_content(): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) data = ''.join(random.choice("0123456789") for x in xrange(8192 * 2)) length = client.store_content(key, data) assert length == len(data) content = client.get_file_data(key) assert content == data
def func(largefile): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) try: fp = client.new_file(key, cls='spam') except MogileFSError: pass else: assert False
def test_store_content(self): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) data = ''.join(random.choice("0123456789") for _ in xrange(8192 * 2)) length = client.store_content(key, data) self.assertEqual(length, len(data)) content = client.get_file_data(key) self.assertEqual(content, data)
def test_new_file(): cl = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) with cl.new_file(key) as fp: assert fp.__exit__ fp.write(key) assert cl.get_paths(key) assert cl.get_file_data(key) == key
def test_store_file(self): client = Client(TEST_NS, HOSTS) key = "test_file_%s_%s" % (random.random(), time.time()) data = "".join(random.choice("0123456789") for _ in xrange(8192 * 2)) fp = StringIO(data) length = client.store_file(key, fp) self.assertEqual(length, len(data)) content = client.get_file_data(key) self.assertEqual(content, data)
def test_closed_file(self): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) fp = client.new_file(key) fp.write("spam") fp.close() self.assertRaises(ValueError, fp.write, "egg") self.assertRaises(ValueError, fp.read) self.assertRaises(ValueError, fp.seek, 0) self.assertRaises(ValueError, fp.tell)
def test_seek(self): client = Client(TEST_NS, HOSTS) key = "test_file_%s_%s" % (random.random(), time.time()) fp = client.new_file(key) fp.write("SPAM") fp.seek(1) self.assertEqual(fp.tell(), 1) fp.write("p") fp.close() self.assertEqual(client.get_file_data(key), "SpAM")
def test_rename_dupliate_key(): client = Client(TEST_NS, HOSTS) key1 = 'test_file_%s_%s' % (random.random(), time.time()) key2 = 'key2:' + key1 client.store_content(key1, key1) client.store_content(key2, key2) try: client.rename(key1, key2) except MogileFSError, e: pass
def func(largefile): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) fp = client.new_file(key, largefile=largefile) fp.write("SPAM") fp.seek(-10) assert fp.tell() == 0 fp.write("s") fp.close() assert client.get_file_data(key) == "sPAM"
def test_seek_negative(self): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) fp = client.new_file(key) fp.write("SPAM") fp.seek(-10) self.assertEqual(fp.tell(), 0) fp.write("s") fp.close() self.assertEqual(client.get_file_data(key), "sPAM")
def test_readonly_file(): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) client.store_content(key, "SPAM") fp = client.read_file(key) try: fp.write("egg") except: pass else: assert False, "operation not permitted to read-only file"
def test_seek_read(self): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) client.store_content(key, "0123456789") fp = client.read_file(key) fp.seek(1) self.assertEqual(fp.tell(), 1) content = fp.read(3) assert content == "123" self.assertEqual(fp.tell(), 4)
def test_seek_read(): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) client.store_content(key, "0123456789") fp = client.read_file(key) fp.seek(1) assert fp.tell() == 1 content = fp.read(3) assert content == "123" assert fp.tell() == 4
def get_mogile_client(self, node): ''' Initialize Mogilefs client against given node:port combination ''' logging.debug( 'Initializing mogilefs client for domain: %s, tracker: %s', self.domain, node) client = Client(domain=self.domain, trackers=node.split(',')) # Test if we can execute simple request on mogilefs server if client.sleep(1): logging.info('Successfuly connected to mogilefs server %s', node) return client else: raise Exception( 'Connection to mogilefs server {} failed'.format(node))
def prepare_photo_files(owner, nb): storage = Client(domain = settings.MOGILEFS_DOMAIN, trackers = settings.MOGILEFS_TRACKERS) #im = Image.open(path.join(settings.UNPROCESSED_PHOTOS_DIR, '%s.jpg' % id)) f = storage.read_file(path.join(settings.UNPROCESSED_PHOTOS_DIR, '%s,%s.jpg' % (owner, nb))) im = Image.open(f) im.thumbnail((160, 160), Image.ANTIALIAS) tn = storage.new_file(path.join(settings.PROCESSED_PHOTOS_DIR, '%s,%s-160x160.jpg' % (owner, nb))) im.save(tn, 'JPEG') #sleep(20) photo = Photo.get(owner, nb) photo.ready()
def __init__(self, base_url=None, container=None, domain=None, trackers=None, client=None): """ :param base_url: public facing url prefix. e.g "/my/private/docs/" :param container: prefix to use for the mogilefs storage. e.g "documents" :param domain: mogilefs domain to store stuff into. e.g "privatemedia" :param trackers: list of mogilefs trackers :return: with the above examples the file "my/file.pdf" will be handled like this: mogilefs: domain: privatemedia key: documents/my/file.pdf public url: /my/private/docs/my/file.pdf """ self.base_url = base_url or settings.MEDIA_URL self.container = container or getattr(settings, 'MOGILEFS_MEDIA_CONTAINER', '') self.domain = domain or getattr(settings, 'MOGILEFS_DOMAIN', '') self.trackers = trackers or getattr(settings, 'MOGILEFS_TRACKERS', []) self.client = client or Client(domain=self.domain, trackers=self.trackers)
def prepare_photo_files(owner, nb): storage = Client(domain=settings.MOGILEFS_DOMAIN, trackers=settings.MOGILEFS_TRACKERS) #im = Image.open(path.join(settings.UNPROCESSED_PHOTOS_DIR, '%s.jpg' % id)) f = storage.read_file( path.join(settings.UNPROCESSED_PHOTOS_DIR, '%s,%s.jpg' % (owner, nb))) im = Image.open(f) im.thumbnail((160, 160), Image.ANTIALIAS) tn = storage.new_file( path.join(settings.PROCESSED_PHOTOS_DIR, '%s,%s-160x160.jpg' % (owner, nb))) im.save(tn, 'JPEG') #sleep(20) photo = Photo.get(owner, nb) photo.ready()
def test_new_file(self): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) fp = client.new_file(key) assert fp is not None data = "0123456789" * 50 fp.write(data) fp.close() paths = client.get_paths(key) #assert len(paths) > 1, "should exist in one ore more places" assert paths content = client.get_file_data(key) assert content == data
def test_new_file(): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) fp = client.new_file(key) assert fp is not None data = "0123456789" * 50 fp.write(data) fp.close() paths = client.get_paths(key) #assert len(paths) > 1, "should exist in one ore more places" assert paths content = client.get_file_data(key) assert content == data
def test_new_large_file(): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) fp = client.new_file(key, largefile=True) assert fp is not None for x in xrange(50): fp.write("0123456789") fp.close() paths = client.get_paths(key) #assert len(paths) > 1, "should exist in one ore more places" assert paths content = client.get_file_data(key) assert content == "0123456789" * 50
def test_rename(self): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) client.new_file(key).write(key) paths = client.get_paths(key) self.assertTrue(paths) newkey = 'test_file2_%s_%s' % (random.random(), time.time()) client.rename(key, newkey) paths = client.get_paths(newkey) self.assertTrue(paths) content = client.get_file_data(newkey) assert content == key
def __init__(self, location=None, base_url=None, **kwargs): self.clients = {} for server in settings.DISTRIBUTED_MOGILEFS_CONFIG['SERVERS']: srv = settings.DISTRIBUTED_MOGILEFS_CONFIG['SERVERS'][server] self.clients[server] = Client(domain=srv['DOMAIN'], trackers=srv['TRACKERS']) self.servers = settings.DISTRIBUTED_MOGILEFS_CONFIG['SERVERS'] self.ring = HashRing(settings.DISTRIBUTED_MOGILEFS_CONFIG['SLOTS']) self.kwargs = kwargs
def test_delete(self): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) client.new_file(key).write("SPAM") paths = client.get_paths(key) self.assertTrue(paths) client.delete(key) paths = client.get_paths(key) self.assertFalse(paths)
def create(cls, user_id, origin_data): _key = md5(origin_data).hexdigest() _datastore = Client( domain = settings.MOGILEFS_DOMAIN, trackers = settings.MOGILEFS_TRACKERS ) _origin_link = 'avatar/origin/' + _key _fp = _datastore.new_file(_origin_link) _fp.write(origin_data) _fp.close() _square_data = cls.crop_square(origin_data) _large_link = 'avatar/large/' + _key _large_data = cls.resize(_square_data, 180, 180) _fp = _datastore.new_file(_large_link) _fp.write(_large_data) _fp.close() _small_link = 'avatar/small/' + _key _small_data = cls.resize(_square_data, 50, 50) _fp = _datastore.new_file(_small_link) _fp.write(_small_data) _fp.close() _inst = cls(user_id) if _inst.avatar_obj == None: _avatar_obj = AvatarModel.objects.create( user_id = user_id, avatar_origin = _origin_link, avatar_small = _small_link, avatar_large = _large_link ) _inst.avatar_obj = _avatar_obj else: _inst.avatar_obj.avatar_origin = _origin_link _inst.avatar_obj.avatar_large = _large_link _inst.avatar_obj.avatar_small = _small_link _inst.avatar_obj.save() return _inst
def upload(request): storage = Client(domain = settings.MOGILEFS_DOMAIN, trackers = settings.MOGILEFS_TRACKERS) photo_file = request.FILES.get('photo') photo_desc = request.POST.get('desc', '') if photo_file is None or not photo_file.name.endswith('.jpg') or \ photo_file.size > settings.MAX_PHOTO_SIZE: # Mamy błąd return HttpResponseRedirect('/') else: model = Photo.create(owner=request.user.id, desc=photo_desc) #with open(path.join(settings.UNPROCESSED_PHOTOS_DIR, '%s.jpg' % model.id), 'wb') as f: with storage.new_file(path.join(settings.UNPROCESSED_PHOTOS_DIR, '%s,%s.jpg' % (model.owner, model.nb))) as f: for chunk in photo_file.chunks(): f.write(chunk) f.close() prepare_photo_files.delay(model.owner, model.nb) return HttpResponseRedirect('/')
def save_square_image_data_fixed(cls, store_hash, image_data): # _image_sizes = [64, 128, 240, 310, 480, 640] log.info(store_hash) _datastore = Client(domain=settings.MOGILEFS_DOMAIN, trackers=settings.MOGILEFS_TRACKERS) # _square_data = cls.crop_square(image_data) # _img = WandImage(blob = image_data) # if _img.width > 800 or _img.height > 800: # _square_data = cls.resize(image_data, 800, 800) _fp = _datastore.new_file('img/' + store_hash + '.jpg') _fp.write(image_data) _fp.close() for _size in Images_size: log.info(_size) _data_resized = cls.resize(image_data, _size, _size) _fp = _datastore.new_file('img/' + store_hash + '.jpg_' + str(_size) + 'x' + str(_size) + '.jpg') _fp.write(_data_resized) _fp.close()
def upload(request): storage = Client(domain=settings.MOGILEFS_DOMAIN, trackers=settings.MOGILEFS_TRACKERS) photo_file = request.FILES.get('photo') photo_desc = request.POST.get('desc', '') if photo_file is None or not photo_file.name.endswith('.jpg') or \ photo_file.size > settings.MAX_PHOTO_SIZE: # Mamy błąd return HttpResponseRedirect('/') else: model = Photo.create(owner=request.user.id, desc=photo_desc) #with open(path.join(settings.UNPROCESSED_PHOTOS_DIR, '%s.jpg' % model.id), 'wb') as f: with storage.new_file( path.join(settings.UNPROCESSED_PHOTOS_DIR, '%s,%s.jpg' % (model.owner, model.nb))) as f: for chunk in photo_file.chunks(): f.write(chunk) f.close() prepare_photo_files.delay(model.owner, model.nb) return HttpResponseRedirect('/')
def test_rename(self): client = Client(TEST_NS, HOSTS) key = "test_file_%s_%s" % (random.random(), time.time()) client.new_file(key).write(key) paths = client.get_paths(key) self.assertTrue(paths) newkey = "test_file2_%s_%s" % (random.random(), time.time()) client.rename(key, newkey) paths = client.get_paths(newkey) self.assertTrue(paths) content = client.get_file_data(newkey) assert content == key
def test_rename(): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) client.new_file(key).write(key) paths = client.get_paths(key) assert paths newkey = 'test_file2_%s_%s' % (random.random(), time.time()) client.rename(key, newkey) paths = client.get_paths(newkey) assert paths content = client.get_file_data(newkey) assert content == key
def test_delete(): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) client.new_file(key).write("SPAM") paths = client.get_paths(key) assert paths client.delete(key) paths = client.get_paths(key) assert not paths
def func(largefile): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) fp = client.new_file(key, largefile=largefile) fp.write("spam\negg\nham\n") fp.seek(0) line = fp.readline() assert line == "spam\n" line = fp.readline() assert line == "egg\n" line = fp.readline() assert line == "ham\n" line = fp.readline() assert line == '' fp.seek(0) lines = fp.readlines() assert lines == ["spam\n", "egg\n", "ham\n"] fp.close()
def test_list_keys(self): keys = ["spam", "egg", "ham"] domain = "test:list_keys:%s:%s:%s" % (random.random(), time.time(), TEST_NS) self.moga.create_domain(domain) mogc = Client(domain, HOSTS) for k in keys: mogc.store_content(k, k) try: files = mogc.list_keys() assert len(files) == 3 files = mogc.list_keys(limit=1) assert len(files) == 1 files = mogc.list_keys(prefix='sp') assert len(files) == 1 finally: for k in keys: mogc.delete(k) self.moga.delete_domain(domain)
def save_origin_image_data(cls, store_hash, image_data): _datastore = Client(domain=settings.MOGILEFS_DOMAIN, trackers=settings.MOGILEFS_TRACKERS) _fp = _datastore.new_file('img/' + store_hash + '.jpg') _fp.write(image_data) _fp.close()
def test_new_file_unexisting_domain(self): client = Client('unexisting_domain', HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) self.assertRaises(MogileFSError, client.new_file, key)
def read_image_data_by_store_key(store_key): _datastore = Client(domain=settings.MOGILEFS_DOMAIN, trackers=settings.MOGILEFS_TRACKERS) return _datastore.get_file_data(store_key)
def test_new_file_unexisting_class(self): client = Client(TEST_NS, HOSTS) key = 'test_file_%s_%s' % (random.random(), time.time()) self.assertRaises(MogileFSError, client.new_file, key, 'unexisting')
title = title.replace(' ', '') if not categories.has_key(title): print title else: categories[title]['large'] = f else: title = f.replace('.png', '') title = title.replace(' ', '/') if not categories.has_key(title): print title else: categories[title]['small'] = f from pymogile import Client from hashlib import md5 datastore = Client(domain='staging', trackers=['10.0.1.23:7001']) for title, obj in categories.items(): if obj.has_key('large') and obj.has_key('small'): f = open(image_path + '/' + obj['large'], 'r') large_data = f.read() f.close() key = md5(large_data).hexdigest() f = open(image_path + '/' + obj['small'], 'r') small_data = f.read() f.close() mgf = datastore.new_file('category/large/' + key) mgf.write(large_data) mgf.close()
def test_sleep(self): client = Client(TEST_NS, HOSTS) self.assertEqual(client.sleep(1), True)
#-*- coding: utf-8 -*- from pymogile import Client c = Client(domain="privatemedia", trackers=['127.0.0.1:7001']) cpub = Client(domain="publicmedia", trackers=['127.0.0.1:7001']) # list keys c.list_keys() # create a file in mogilefs f = c.new_file('foobar.txt') f.write('hi, my name bar, foo bar.') f.close() # show paths c.get_paths('foobar.txt') c.get_paths('404.txt') # get file data c.get_file_data('404.txt') c.get_file_data('foobar.txt') # remove c.delete('foobar.txt') # django storage backend from ogv_documents.storage.mogilefs import MogilefsStorage from django.core.files.base import ContentFile
class Category(object): def __init__(self, category_id): self.category_id= int(category_id) def __ensure_category_obj(self): if not hasattr(self, 'category_obj'): self.category_obj = CategoryModel.objects.get(pk = self.category_id) def __resize(self, data, w, h): _img = Image(blob = data) _img.resize(w, h) return _img.make_blob() def update(self, title = None, group_id = None, image_data = None, status = None): self.__ensure_category_obj() if title != None: self.category_obj.title = title if group_id != None: self.category_obj.group_id = group_id if status != None: self.category_obj.status = int(status) if image_data != None: self.__datastore = Client( domain = settings.MOGILEFS_DOMAIN, trackers = settings.MOGILEFS_TRACKERS ) _key = md5(image_data).hexdigest() _large_data = self.__resize(image_data, 86, 86) _fp = self.__datastore.new_file('category/large/' + _key) _fp.write(_large_data) _fp.close() _small_data = self.__resize(image_data, 43, 43) _fp = self.__datastore.new_file('category/small/' + _key) _fp.write(_small_data) _fp.close() self.category_obj.image_store_hash = _key self.category_obj.save() @classmethod def create(cls, title, group_id, status = 1): _category_obj = CategoryModel.objects.create( title = title, group_id = group_id, status = status ) _inst = cls(_category_obj.id) _inst.category_obj = _category_obj return _inst def get_group_id(self): self.__ensure_category_obj() return self.category_obj.group_id def __load_category_context(self): self.__ensure_category_obj() _context = {} _context['category_id'] = self.category_obj.id _context['category_title'] = self.category_obj.title _context['group_id'] = self.category_obj.group_id _context['status'] = self.category_obj.status if self.category_obj.image_store_hash: _context['category_icon_large'] = settings.IMAGE_SERVER + 'category/large/' + self.category_obj.image_store_hash _context['category_icon_small'] = settings.IMAGE_SERVER + 'category/small/' + self.category_obj.image_store_hash return _context def read(self): return self.__load_category_context() @staticmethod def get_category_title_dict(): _dict = {} for _obj in CategoryModel.objects.all(): _dict[_obj.id] = _obj.title return _dict @staticmethod def find(group_id = None, like_word = None, status = None, offset = None, count = None, order_by = None): _hdl = CategoryModel.objects.all() if group_id != None: _hdl = _hdl.filter(group_id = group_id) if like_word != None: _q = Q(title__icontains = like_word) _hdl = _hdl.filter(_q) if status == None: pass elif status > 0: _hdl = _hdl.filter(status__gt = 0) else: _hdl = _hdl.filter(status__lte = 0) if order_by == '-status': _hdl = _hdl.order_by('-status') if offset != None and count != None: _hdl = _hdl[offset : offset + count] _rslt = [] for _cat_obj in _hdl: _context = { 'category_id' : _cat_obj.id, 'category_title' : _cat_obj.title, 'group_id' : _cat_obj.group_id, 'status' : _cat_obj.status, } if _cat_obj.image_store_hash: _context['category_icon_large'] = settings.IMAGE_SERVER + 'category/large/' + _cat_obj.image_store_hash _context['category_icon_small'] = settings.IMAGE_SERVER + 'category/small/' + _cat_obj.image_store_hash _rslt.append(_context) return _rslt @staticmethod def allgroups(): _rslt = [] for _group_obj in CategoryGroupModel.objects.all(): _rslt.append({ 'group_id' : _group_obj.id, 'title' : _group_obj.title, 'status' : _group_obj.status, 'category_count' : CategoryModel.objects.filter(group_id = _group_obj.id).count() }) return _rslt @staticmethod def all_group_with_full_category(): _rslt = Category.allgroups() for _group in _rslt: _group['content'] = [] for _category_obj in CategoryModel.objects.filter(group_id = _group['group_id'], status__gte = 0): _context = { 'category_id' : _category_obj.id, 'category_title' : _category_obj.title, 'status' : _category_obj.status } if _category_obj.image_store_hash: _context['category_icon_large'] = settings.IMAGE_SERVER + 'category/large/' + _category_obj.image_store_hash _context['category_icon_small'] = settings.IMAGE_SERVER + 'category/small/' + _category_obj.image_store_hash _group['content'].append(_context) return _rslt @staticmethod def get_category_by_taobao_cid(cid): _cid = int(cid) try: _obj = TaobaoItemNeoCategoryMappingModel.objects.get(taobao_category_id = _cid) return _obj.neo_category_id except TaobaoItemNeoCategoryMappingModel.DoesNotExist: pass return 300 @staticmethod def get_category_by_jd_cid(cid): #TODO:这里后期可能要进行修改 return 300