def collect_garbages(model, **kwargs): days_before = kwargs.get("days_before", settings("GC_DAYS_BEFORE", 30)) max_records = kwargs.get("max_records", settings("GC_MAX_RECORDS", 1000)) timestamp_field = kwargs.get("timestamp_field", "ctime") filters = {"%s__lte" % timestamp_field: date.today() - timedelta(days=days_before)} objects = model.objects.filter(**filters) count = objects.count() logger.debug( u"processing garbage collection (%s, %s < %s, %s objects)" % (model.__name__, timestamp_field, date.today() - timedelta(days=days_before), count) ) for obj in iterator(objects, max_records): logger.debug(u"deleting %s object (%s)" % (model.__name__, obj)) obj.delete() # chain the task if count > max_records: kwargs["countdown"] = RETRY_INTERVAL collect_garbages.delay(model, **kwargs) return True
def collect_garbages(model, **kwargs): days_before = kwargs.get('days_before', settings('GC_DAYS_BEFORE', 30)) max_records = kwargs.get('max_records', settings('GC_MAX_RECORDS', 1000)) timestamp_field = kwargs.get('timestamp_field', 'ctime') filters = { '%s__lte' % timestamp_field: date.today() - timedelta(days=days_before) } objects = model.objects.filter(**filters) count = objects.count() logger.debug(u'processing garbage collection (%s, %s < %s, %s objects)' % (model.__name__, timestamp_field, date.today() - timedelta(days=days_before), count)) for obj in iterator(objects, max_records): logger.debug(u'deleting %s object (%s)' % ( model.__name__, obj, )) obj.delete() # chain the task if count > max_records: kwargs['countdown'] = RETRY_INTERVAL collect_garbages.delay(model, **kwargs) return True
def test_get(self): from ficuspumila.core.auth.proxies import User u = User.objects.get(username=settings('SUPERUSER_USERNAME')) self.assertEqual(u.username, settings('SUPERUSER_USERNAME')) self.assertEqual(to_python(u.is_superuser), True) self.assertRaises((client.ObjectDoesNotExist, ObjectDoesNotExist), lambda: User.objects.get(username='******')) return u
def test_get(self): from ficuspumila.core.content.proxies import Owner o = Owner.objects.get(user__username=settings('SUPERUSER_USERNAME')) self.assertEqual(o.user.username, settings('SUPERUSER_USERNAME')) self.assertEqual(o.source_owner_id, settings('SUPERUSER_USERNAME')) self.assertRaises((client.ObjectDoesNotExist, ObjectDoesNotExist), lambda: Owner.objects.get(source_owner_id='crazymonkey')) return o
def test_get(self): from ficuspumila.core.content.proxies import Source s = Source.objects.get(user__username=settings('SUPERUSER_USERNAME')) self.assertEqual(s.user.username, settings('SUPERUSER_USERNAME')) self.assertEqual(s.name, settings('SUPERUSER_USERNAME')) self.assertRaises((client.ObjectDoesNotExist, ObjectDoesNotExist), lambda: Source.objects.get(name='crazymonkey')) return s
def test_get(self): from ficuspumila.core.content.proxies import Owner o = Owner.objects.get(user__username=settings('SUPERUSER_USERNAME')) self.assertEqual(o.user.username, settings('SUPERUSER_USERNAME')) self.assertEqual(o.source_owner_id, settings('SUPERUSER_USERNAME')) self.assertRaises( (client.ObjectDoesNotExist, ObjectDoesNotExist), lambda: Owner.objects.get(source_owner_id='crazymonkey')) return o
def test_get(self): from ficuspumila.core.content.proxies import FileSpecification, Owner o = Owner.objects.get(user__username=settings('SUPERUSER_USERNAME')) fs = FileSpecification.objects.get(owner=o, name='SCREENSHOT') self.assertEqual(fs.name, 'SCREENSHOT') self.assertEqual(fs.owner.user.username, settings('SUPERUSER_USERNAME')) self.assertRaises((client.ObjectDoesNotExist, ObjectDoesNotExist), lambda: FileSpecification.objects.get(name='crazymonkey')) return fs
def test_get(self): from ficuspumila.core.content.proxies import FileSpecification, Owner o = Owner.objects.get(user__username=settings('SUPERUSER_USERNAME')) fs = FileSpecification.objects.get(owner=o, name='SCREENSHOT') self.assertEqual(fs.name, 'SCREENSHOT') self.assertEqual(fs.owner.user.username, settings('SUPERUSER_USERNAME')) self.assertRaises( (client.ObjectDoesNotExist, ObjectDoesNotExist), lambda: FileSpecification.objects.get(name='crazymonkey')) return fs
class Item(models.Model): class Meta: unique_together = ( 'owner', 'source_item_id', ) owner = djmodels.ForeignKey(Owner) source_item_id = djmodels.CharField(max_length=255) item_type = djmodels.SmallIntegerField(choices=ItemTypes, default=0) meta_type = djmodels.SmallIntegerField(choices=settings( 'META_TYPES', ((0, 'Track'), )), default=0) parents = djmodels.ManyToManyField('self', symmetrical=False, related_name='children', blank=True, null=True) # resources = models.ManyToManyField('content.Resource', # related_name='items') enabled = djmodels.BooleanField(default=True) def __unicode__(self): return '%s@%s (%s)' % (self.source_item_id, self.owner.source, self.owner)
def event_receiver(request): """ This actually does nothing, called for testing purpose only """ if request.method == 'POST': data = json.loads(get_raw_post_data(request)) logger.debug(u'got event notification (%s)' % data) digest = generate_hmac_digest(settings('HMAC_KEY', DEV_HMAC_KEY), '%s%s' % (data['event'], data['ctime'],)) logger.debug(u'hmac digest (%s)' % digest) if digest == data['hmac']: logger.debug(u'the notification has been processed normally.') return HttpResponse(u'OK') else: logger.exception(u'invalid notifcation detected.') return HttpBadRequest(u'Invalid notification detected') return HttpMethodNotAllowed()
def event_receiver(request): """ This actually does nothing, called for testing purpose only """ if request.method == 'POST': data = json.loads(get_raw_post_data(request)) logger.debug(u'got event notification (%s)' % data) digest = generate_hmac_digest( settings('HMAC_KEY', DEV_HMAC_KEY), '%s%s' % ( data['event'], data['ctime'], )) logger.debug(u'hmac digest (%s)' % digest) if digest == data['hmac']: logger.debug(u'the notification has been processed normally.') return HttpResponse(u'OK') else: logger.exception(u'invalid notifcation detected.') return HttpBadRequest(u'Invalid notification detected') return HttpMethodNotAllowed()
def metadata(self): try: meta = getattr(import_module(settings('META_PROXIES_MODULE')), self.meta_type_display) except Exception, e: logger.exception(e) raise exceptions.ProxyException(_('No metadata model found, check ' 'if META_TYPES and ' 'META_PROXIES_MODULE settings are ' 'correct.'))
def metadata(self): try: meta = getattr(import_module(settings('META_PROXIES_MODULE')), self.meta_type_display) except Exception, e: logger.exception(e) raise exceptions.ProxyException( _('No metadata model found, check ' 'if META_TYPES and ' 'META_PROXIES_MODULE settings are ' 'correct.'))
def __init__(self, **kwargs): def get_name(path): return path.rpartition('.')[-1].lower() logger.debug(u'trying to initialize (%s)' % kwargs,) try: # import modules services = {} for k, v in settings('SERVICES').iteritems(): service = k.split('.') user = v['user'].split('.') services[get_name(k)] = { 'service': getattr(import_module('.'.join(service[:-1])), service.pop().title()), 'user' : getattr(import_module('.'.join(user[:-1])), user.pop().title()), } logger.debug(u'installed services: %s' % services) # instantiate service self.service = [s[1]['service'].objects.get( user=int(kwargs.get(k))) for s in [(get_name(k), v,) for k, v in services.iteritems()] if kwargs.get(s[0])][0] # validate token self.token = self.validate_token( self.service, kwargs.get(Authenticator.TOKEN_PARAM, None), kwargs.get(Authenticator.FORMAT_PARAM, None), kwargs.get(Authenticator.DATA_PARAM, None)) if self.token.get('username'): # TODO: detect service user from django user pass else: # detect service user from params self.user = self.user if self.user else [s[1]['user'].objects.get(**{ '%s' % s[0]: self.service, '%s' % s[2]: self.token.get(s[2]), }) for s in [(get_name(k), v, '%s_%s_id' % (get_name(k), v['user'].__name__.lower(),)) for k, v in services.iteritems()] if kwargs.get(s[0])][0] except IndexError, e: logger.exception(u'service or user not found in params')
def query_country_code(cls, ip): if settings('IPINFODB_API_KEY') is None: raise ProxyException(u'"IPINFODB_API_KEY" is not defiend in settings.') try: response = requests.get(settings('IPINFODB_API_URL'), params={'key': settings('IPINFODB_API_KEY'), 'ip': ip, 'format': 'json'}) if response.status_code == 200: logger.debug(u'api response (%s -> HTTP %s: %s)' % ( ip, response.status_code, response.json(),)) return response.json()['countryCode'] except Exception, e: pass
def __init__(self, to, attribute, related_name=None, default=fields.NOT_PROVIDED, null=False, blank=False, readonly=False, full=False, unique=False, help_text=None, use_in='all', full_list=True, full_detail=True, order_by=None, limit=settings('TO_MANY_FIELD_LIMIT', 10), filters={}): attribute = limit_queryset(attribute, order_by=order_by, limit=limit, filters=filters) readonly = True null = True super(LimitedToManyField, self).__init__( to, attribute, related_name=related_name, default=default, null=null, blank=blank, readonly=readonly, full=full, unique=unique, help_text=help_text, use_in=use_in, full_list=full_list, full_detail=full_detail )
def meta_type_display(self): if 'get_meta_type_display' in dir(self): return self.get_meta_type_display() meta_types = settings('META_TYPES') if not meta_types: return None try: return meta_types[self.meta_type][1] except IndexError, e: raise exceptions.ProxyException(_('Unexpected meta type detected ' '(%s, %s).') % (self.meta_type, meta_types,))
def query_country_code(cls, ip): if settings('IPINFODB_API_KEY') is None: raise ProxyException( u'"IPINFODB_API_KEY" is not defiend in settings.') try: response = requests.get(settings('IPINFODB_API_URL'), params={ 'key': settings('IPINFODB_API_KEY'), 'ip': ip, 'format': 'json' }) if response.status_code == 200: logger.debug(u'api response (%s -> HTTP %s: %s)' % ( ip, response.status_code, response.json(), )) return response.json()['countryCode'] except Exception, e: pass
def generate_token(self, data={}, format='json'): try: if format == 'json': data = json.dumps(( time.time() + settings('TOKEN_TIMEOUT'), data,)) else: raise exceptions.ProxyException(_('Format not supported: %s') % format) return crypto.Transcoder( key=self.token_key, iv=self.token_iv).algorithm.encrypt(data) except Exception, e: logger.exception('an error has occurred during generating token: %s' % e) raise e
def meta_type_display(self): if 'get_meta_type_display' in dir(self): return self.get_meta_type_display() meta_types = settings('META_TYPES') if not meta_types: return None try: return meta_types[self.meta_type][1] except IndexError, e: raise exceptions.ProxyException( _('Unexpected meta type detected ' '(%s, %s).') % ( self.meta_type, meta_types, ))
def generate_token(self, data={}, format='json'): try: if format == 'json': data = json.dumps(( time.time() + settings('TOKEN_TIMEOUT'), data, )) else: raise exceptions.ProxyException( _('Format not supported: %s') % format) return crypto.Transcoder(key=self.token_key, iv=self.token_iv).algorithm.encrypt(data) except Exception, e: logger.exception( 'an error has occurred during generating token: %s' % e) raise e
def __init__(self, to, attribute, related_name=None, default=fields.NOT_PROVIDED, null=False, blank=False, readonly=False, full=False, unique=False, help_text=None, use_in='all', full_list=True, full_detail=True, order_by=None, limit=settings('TO_MANY_FIELD_LIMIT', 10), filters={}): attribute = limit_queryset(attribute, order_by=order_by, limit=limit, filters=filters) readonly = True null = True super(LimitedToManyField, self).__init__(to, attribute, related_name=related_name, default=default, null=null, blank=blank, readonly=readonly, full=full, unique=unique, help_text=help_text, use_in=use_in, full_list=full_list, full_detail=full_detail)
def allow_relation(self, obj1, obj2, **hints): if (obj1._state.db in settings('DATABASES_FOR_READ', DEFAULT) and obj2._state.db in settings('DATABASES_FOR_READ', DEFAULT)): return True return None
def _parents(self, obj): return coreadmin.related_objects(obj.item, 'parents', '/%stests/album/?item__children=%s' % (settings('ADMIN_PREFIX'), obj.item.pk))
def _source(self, obj): return ('<a href="/%scontent/source/?user__id=%i">%s</a>' % (settings('ADMIN_PREFIX'), obj.source.user_id, obj.source))
def _item(self, obj): if obj.item: return '<a href="/%scontent/item/?id=%i">%s</a>' % (settings('ADMIN_PREFIX'), item.id, item) return None
def _user(obj): return ('<a href="/%sauth/user/?id=%i">%s</a>' % (settings('ADMIN_PREFIX'), obj.user.id, obj.user))
def _filespecifications(self, obj): return admin.related_objects(obj, 'filespecification_set', '/%scontent/filespecification/?owner=%s' % (settings('ADMIN_PREFIX'), obj.user.id))
def _name(self, obj): return ('<a href="/%scontent/sourceeventname/?id=%i">%s</a>' % (settings('ADMIN_PREFIX'), obj.name.id, obj.name))
def wrapper(*args, **kwargs): if settings('API_URL'): raise RemotelyUncallableException(func.__name__) return func(*args, **kwargs)
def _owner(obj): if obj.owner: return '<a href="/%scontent/owner/?user_id=%i">%s</a>' % ( settings('ADMIN_PREFIX'), obj.owner.user_id, obj.owner)
def db_for_write(self, model, **hints): destination = random.choice(settings('DATABASES_FOR_WRITE', DEFAULT)) return destination
def db_for_read(self, model, **hints): destination = random.choice(settings('DATABASES_FOR_READ', DEFAULT)) return destination
def queue_collect_garbage(sender, **kwargs): if random(settings('GC_PROBABILITY', 0.05)): collect_garbages.delay(sender, **kwargs)
logger.debug(u'token: %s' % token) try: token = service.decrypt_token(token, format, True) except AuthException, e: raise e except Exception, e: logger.exception(u'failed to validate token: %s' % e) raise AuthException(_(u'Invalid token detected.')) logger.debug(u'token decrypted: %s' % token) if token[0]: if token[0] > time.time() + settings('TOKEN_TIMEOUT'): logger.debug(u'token expiration is bigger than expected: token %s > %s' % ( token[0], timezone.now() + settings('TOKEN_TIMEOUT'),)) raise AuthException(_(u'Expiration specified in token is ' u'bigger than expected.')) if token[0] < time.time(): logger.debug(u'token is expired: token %s < %s') raise AuthException(_(u'Token expired.')) else: raise AuthException(_(u'Invalid token detected.')) token = token[1] if len(token) > 0 else {}
def _items(self, obj): return admin.related_objects(obj, 'item_set', '/%scontent/item/?owner=%i' % (settings('ADMIN_PREFIX'), obj.user.id))
def _children(self, obj): return coreadmin.related_objects( obj.item, 'children', '/%stests/track/?item__parents=%s' % (settings('ADMIN_PREFIX'), obj.item.pk))
def _parent(self, obj): if obj.parent: return '<a href="/%scontent/filespecification/?id=%i">%s</a>' % (settings('ADMIN_PREFIX'), obj.parent.id, obj.parent) return None
def _item(self, obj): if obj.item: return '<a href="/%scontent/item/?id=%i">%s</a>' % ( settings('ADMIN_PREFIX'), item.id, item) return None
def _children(self, obj): return admin.related_objects(obj, 'children', '/%scontent/item/?parents=%s' % (settings('ADMIN_PREFIX'), obj.pk))
#-*- coding: utf-8 -*- from django.conf.urls import include, patterns, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns from ficuspumila.apps.example import resources as example_api from ficuspumila.settings import ficuspumila as settings admin.autodiscover() urlpatterns = patterns( '', # admin url(r'^%s' % settings('ADMIN_PREFIX'), include(admin.site.urls)), # api url(r'^api/', include('ficuspumila.urls.api')), url(r'^api/v1/apps/', include(example_api.get_urls())), # event - testing purpose only url(r'^event_receiver/', 'ficuspumila.core.views.event_receiver'), ) urlpatterns += staticfiles_urlpatterns()
def _owner(obj): if obj.owner: return '<a href="/%scontent/owner/?user_id=%i">%s</a>' % (settings('ADMIN_PREFIX'), obj.owner.user_id, obj.owner)
def get_credentials(self): return self.create_basic(username=settings('SUPERUSER_USERNAME'), password=settings('SUPERUSER_PASSWORD'))
def _item(obj): return ('<a href="/%scontent/item/?id=%i">%s</a>' % (settings('ADMIN_PREFIX'), obj.item.id, obj.item))
def _children(self, obj): return coreadmin.related_objects(obj.item, 'children', '/%stests/track/?item__parents=%s' % (settings('ADMIN_PREFIX'), obj.item.pk))
def allow_syncdb(self, db, model): if model._meta.app_label not in settings('SYNCDB_DISABLED'): return True return False
def _parents(self, obj): return coreadmin.related_objects( obj.item, 'parents', '/%stests/album/?item__children=%s' % (settings('ADMIN_PREFIX'), obj.item.pk))
def _children(self, obj): return admin.related_objects( obj, 'children', '/%scontent/item/?parents=%s' % (settings('ADMIN_PREFIX'), obj.pk))
# -*- coding: utf-8 -*- import hashlib import logging import sys from django.core.cache import cache as djcache from functools import wraps from ficuspumila.settings import ficuspumila as settings TIMEOUT = settings('CACHE_TIMEOUT', 60 * 15) logger = logging.getLogger(__name__) def _generate_key(key, **kwargs): return hashlib.sha1(key).hexdigest() def set(key, value, expiration=TIMEOUT, **kwargs): logger.debug(u'setting cache (%s -> %s)' % ( key, value, )) return djcache.set(_generate_key(key, **kwargs), value, expiration) def add(key, value, expiration=TIMEOUT, **kwargs): logger.debug(u'adding cache (%s -> %s)' % ( key,
class Country(object): PK_FIELD = 'alpha2' COMMENT = '#' DELIMITER = '\t' COLUMN_FIELD_MAP = ( ( u'ISO', 'alpha2', ), ( u'ISO3', 'alpha3', ), ( u'ISO-Numeric', 'numeric3', ), ( u'fips', 'fips', ), ( u'Country', 'name', ), ( u'Capital', 'capital', ), ( u'Area(in sq km)', 'area', ), ( u'Population', 'population', ), ( u'Continent', 'continent', ), ( u'tld', 'tld', ), ( u'CurrencyCode', 'currency_code', ), ( u'CurrencyName', 'currency_name', ), ( u'Phone', 'phone', ), ( u'Postal Code Format', 'postal_code_format', ), ( u'Postal Code Regex', 'postal_code_regex', ), ( u'Languages', 'languages', ), ( u'geonameid', 'geonameid', ), ( u'neighbours', 'neighbours', ), ( u'EquivalentFipsCode', 'equivalent_fips_code', ), ) def __init__(self, generator): self.generator = generator def update_objects(self): def cindex(field): return [ i for i, v in enumerate(self.COLUMN_FIELD_MAP) if v[1] == field ][0] def populate(row): # logger.debug(u'populating... (%s)' % row) index = self.generator.index('common.country', row[cindex(self.PK_FIELD)]) try: if index >= 0: del self.generator.data[index] except IndexError, e: pass area = int(math.ceil(float(row[cindex('area')]))) if len( row[cindex('area')]) else None population = row[cindex('population')] if len( row[cindex('population')]) else None geonameid = row[cindex('geonameid')] if len( row[cindex('geonameid')]) else None self.generator.data.append({ 'pk': row[cindex(self.PK_FIELD)], 'model': 'common.country', 'fields': { 'alpha3': row[cindex('alpha3')], 'numeric3': row[cindex('numeric3')], 'fips': row[cindex('fips')], 'name': row[cindex('name')], 'capital': row[cindex('capital')], 'area': area, 'population': population, 'continent': row[cindex('continent')], 'tld': row[cindex('tld')], 'currency_code': row[cindex('currency_code')], 'currency_name': row[cindex('currency_name')], 'phone': row[cindex('phone')], 'postal_code_format': row[cindex('postal_code_format')], 'postal_code_regex': row[cindex('postal_code_regex')], 'languages': row[cindex('languages')], 'geonameid': geonameid, 'neighbours': row[cindex('neighbours')], 'equivalent_fips_code': row[cindex('equivalent_fips_code')], 'ctime': '%s' % timezone.now(), 'utime': '%s' % timezone.now(), } }) # update objects in fixture response = requests.get(settings('GEONAMES_COUNTRY_INFO')) if response.status_code == 200: tsv = StringIO.StringIO(response.text) prevline = '' columns = None for line in tsv.readlines(): if not line.startswith(self.COMMENT): # detect column headers if columns is None and prevline.startswith(self.COMMENT): columns = prevline[1:].strip().split(self.DELIMITER) # interrupt if column structure has changed if columns != [c for c, f in self.COLUMN_FIELD_MAP]: raise Exception( u'Column structure seems to have changed!') # fill missing columns row = line.strip().split(self.DELIMITER) while len(row) < len(self.COLUMN_FIELD_MAP): row.append(u'') populate(row) prevline = line tsv.close() else: logger.warning(u'failed to fetch the source (HTTP %s: %s)' % (response.status_code, response.text))