def process_sqs_queue(queue_url, aws_region, queue_wait_time): log = getLogger('accounts.sqs') log.info('Processing account events from %s', queue_url) try: # Connect to the SQS queue. sqs = boto3.client( 'sqs', aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, region_name=aws_region) # Poll for messages indefinitely. while True: response = sqs.receive_message(QueueUrl=queue_url, WaitTimeSeconds=queue_wait_time, MaxNumberOfMessages=10) msgs = response.get('Messages', []) if response else [] for message in msgs: try: process_fxa_event(message.get('Body', '')) # This intentionally deletes the event even if it was some # unrecognized type. Not point leaving a backlog. if 'ReceiptHandle' in message: sqs.delete_message( QueueUrl=queue_url, ReceiptHandle=message['ReceiptHandle']) except Exception as exc: log.exception('Error while processing message: %s' % exc) except Exception as exc: log.exception('Error while processing account events: %s' % exc) raise exc
def process_fxa_event(raw_body, **kwargs): """Parse and process a single firefox account event.""" # Try very hard not to error out if there's junk in the queue. log = getLogger('accounts.sqs') event_type = None try: body = json.loads(raw_body) event = json.loads(body['Message']) event_type = event.get('event') uid = event.get('uid') timestamp = event.get('ts', 0) if not (event_type and uid and timestamp): raise ValueError( 'Properties event, uuid, and ts must all be non-empty') except (ValueError, KeyError, TypeError) as e: log.exception('Invalid account message: %s' % e) else: if event_type == 'primaryEmailChanged': email = event.get('email') if not email: log.error('Email property must be non-empty for "%s" event' % event_type) else: primary_email_change_event.delay(email, uid, timestamp) else: log.debug('Dropping unknown event type %r', event_type)
def process_fxa_event(raw_body, **kwargs): """Parse and process a single firefox account event.""" # Try very hard not to error out if there's junk in the queue. log = getLogger('accounts.sqs') event_type = None try: body = json.loads(raw_body) event = json.loads(body['Message']) event_type = event.get('event') uid = event.get('uid') timestamp = event.get('ts', 0) if not (event_type and uid and timestamp): raise ValueError( 'Properties event, uuid, and ts must all be non-empty') except (ValueError, KeyError, TypeError) as e: log.exception('Invalid account message: %s' % e) else: if event_type == 'primaryEmailChanged': email = event.get('email') if not email: log.error('Email property must be non-empty for "%s" event' % event_type) else: primary_email_change_event.delay(email, uid, timestamp) else: log.debug('Dropping unknown event type %r', event_type)
def process_sqs_queue(queue_url, aws_region, queue_wait_time): log = getLogger('accounts.sqs') log.info('Processing account events from %s', queue_url) try: # Connect to the SQS queue. sqs = boto3.client( 'sqs', aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, region_name=aws_region) # Poll for messages indefinitely. while True: response = sqs.receive_message( QueueUrl=queue_url, WaitTimeSeconds=queue_wait_time, MaxNumberOfMessages=10) msgs = response.get('Messages', []) if response else [] for message in msgs: try: process_fxa_event(message.get('Body', '')) # This intentionally deletes the event even if it was some # unrecognized type. Not point leaving a backlog. if 'ReceiptHandle' in message: sqs.delete_message( QueueUrl=queue_url, ReceiptHandle=message['ReceiptHandle']) except Exception as exc: log.exception('Error while processing message: %s' % exc) except Exception as exc: log.exception('Error while processing account events: %s' % exc) raise exc
def process_fxa_event(raw_body, **kwargs): """Parse and process a single firefox account event.""" # Try very hard not to error out if there's junk in the queue. log = getLogger('accounts.sqs') event_type = None try: body = json.loads(raw_body) event = json.loads(body['Message']) event_type = event.get('event') uid = event.get('uid') timestamp = datetime.fromtimestamp(event.get('ts', '')) if not (event_type and uid and timestamp): raise ValueError( 'Properties event, uuid, and ts must all be non-empty') except (ValueError, KeyError, TypeError), e: log.exception('Invalid account message: %s' % e)
def process_fxa_event(raw_body, **kwargs): """Parse and process a single firefox account event.""" # Try very hard not to error out if there's junk in the queue. log = getLogger('accounts.sqs') event_type = None try: body = json.loads(raw_body) event = json.loads(body['Message']) event_type = event.get('event') uid = event.get('uid') timestamp = datetime.fromtimestamp(event.get('ts', '')) if not (event_type and uid and timestamp): raise ValueError( 'Properties event, uuid, and ts must all be non-empty') except (ValueError, KeyError, TypeError), e: log.exception('Invalid account message: %s' % e)
def process_sqs_queue(queue_url): log = getLogger('accounts.sqs') log.info('Processing account events from %s', queue_url) try: region = queue_url.split('.')[1] available_regions = boto3._get_default_session().get_available_regions('sqs') if region not in available_regions: log.error( 'SQS misconfigured, expected region, got %s from %s' % (region, queue_url) ) # Connect to the SQS queue. # Credentials are specified in EC2 as an IAM role on prod/stage/dev. # If you're testing locally see boto3 docs for how to specify: # http://boto3.readthedocs.io/en/latest/guide/configuration.html sqs = boto3.client('sqs', region_name=region) # Poll for messages indefinitely. while True: response = sqs.receive_message( QueueUrl=queue_url, WaitTimeSeconds=settings.FXA_SQS_AWS_WAIT_TIME, MaxNumberOfMessages=10, ) msgs = response.get('Messages', []) if response else [] for message in msgs: try: process_fxa_event(message.get('Body', '')) # This intentionally deletes the event even if it was some # unrecognized type. Not point leaving a backlog. if 'ReceiptHandle' in message: sqs.delete_message( QueueUrl=queue_url, ReceiptHandle=message['ReceiptHandle'] ) except Exception as exc: log.exception('Error while processing message: %s' % exc) except Exception as exc: log.exception('Error while processing account events: %s' % exc) raise exc
def process_sqs_queue(queue_url): log = getLogger('accounts.sqs') log.info('Processing account events from %s', queue_url) try: region = queue_url.split('.')[1] available_regions = (boto3._get_default_session() .get_available_regions('sqs')) if region not in available_regions: log.error('SQS misconfigured, expected region, got %s from %s' % ( region, queue_url)) # Connect to the SQS queue. # Credentials are specified in EC2 as an IAM role on prod/stage/dev. # If you're testing locally see boto3 docs for how to specify: # http://boto3.readthedocs.io/en/latest/guide/configuration.html sqs = boto3.client('sqs', region_name=region) # Poll for messages indefinitely. while True: response = sqs.receive_message( QueueUrl=queue_url, WaitTimeSeconds=settings.FXA_SQS_AWS_WAIT_TIME, MaxNumberOfMessages=10) msgs = response.get('Messages', []) if response else [] for message in msgs: try: process_fxa_event(message.get('Body', '')) # This intentionally deletes the event even if it was some # unrecognized type. Not point leaving a backlog. if 'ReceiptHandle' in message: sqs.delete_message( QueueUrl=queue_url, ReceiptHandle=message['ReceiptHandle']) except Exception as exc: log.exception('Error while processing message: %s' % exc) except Exception as exc: log.exception('Error while processing account events: %s' % exc) raise exc
import zipfile from base64 import b64encode from django.conf import settings from django.utils.encoding import force_str from PIL import Image from olympia.core import logger from olympia.lib.safe_xml import lxml from . import compare from .models import Version log = logger.getLogger('z.versions.utils') def get_next_version_number(addon): if not addon: return '1.0' last_version = Version.unfiltered.filter(addon=addon).order_by('id').last() version_dict = compare.version_dict(last_version.version) version_counter = 1 while True: next_version = '%s.0' % (version_dict['major'] + version_counter) if not Version.unfiltered.filter(addon=addon, version=next_version).exists(): return next_version else:
import argparse from datetime import datetime from importlib import import_module from django.conf import settings from django.core.management.base import BaseCommand, CommandError from olympia.core import logger log = logger.getLogger('z.cron') class Command(BaseCommand): help = 'Run one of the predefined cron jobs' def add_arguments(self, parser): # We handle the case where 0 arguments are given specifically below, # so use nargs='?' for the first argument. parser.add_argument('name', nargs='?') parser.add_argument('cron_args', nargs=argparse.REMAINDER, default=[]) def handle(self, *args, **options): if not options['name']: log.error('Cron called without args') raise CommandError('These jobs are available:\n%s' % '\n'.join(sorted(settings.CRON_JOBS.keys()))) name, args_and_kwargs = options['name'], options['cron_args'] args = [arg for arg in args_and_kwargs if '=' not in arg] kwargs = dict((kwarg.split('=', maxsplit=1)
from django.core.management.base import BaseCommand from olympia import amo from olympia.core.logger import getLogger from olympia.versions.compare import version_dict from olympia.versions.models import AppVersion, Version log = getLogger('z.fix_langpacks_with_max_version_star') class Command(BaseCommand): help = 'Fix language packs that have a max version compatibility set to *' def find_affected_langpacks(self): qs = Version.unfiltered.filter(addon__type=amo.ADDON_LPAPP, apps__max__version='*').distinct() return qs def fix_max_appversion_for_version(self, version): for app in (amo.FIREFOX, amo.ANDROID): if app not in version.compatible_apps: log.info( 'Version %s for addon %s min version is not compatible ' 'with %s, skipping this version for that app.', version, version.addon, app.pretty) continue if version.compatible_apps[app].max.version != '*': log.info( 'Version %s for addon %s max version is not "*" for %s ' 'app, skipping this version for that app.', version, version.addon, app.pretty)
from PIL import Image from rest_framework.utils.encoders import JSONEncoder from django.db.transaction import non_atomic_requests from olympia.core.logger import getLogger from olympia.amo import ADDON_ICON_SIZES, search from olympia.amo.pagination import ESPaginator from olympia.amo.urlresolvers import linkify_with_outgoing, reverse from olympia.translations.models import Translation from olympia.users.models import UserNotification from olympia.users.utils import UnsubscribeCode from olympia.lib import unicodehelper log = getLogger('z.amo') def render(request, template, ctx=None, status=None, content_type=None): rendered = loader.render_to_string(template, ctx, request=request) return HttpResponse(rendered, status=status, content_type=content_type) def from_string(string): return engines['jinja2'].from_string(string) def render_xml_to_string(request, template, context=None): from olympia.amo.templatetags.jinja_helpers import strip_controls if context is None:
from PIL import Image from rest_framework.utils.encoders import JSONEncoder from django.db.transaction import non_atomic_requests from olympia.core.logger import getLogger from olympia.amo import ADDON_ICON_SIZES, search from olympia.amo.pagination import ESPaginator from olympia.amo.urlresolvers import linkify_with_outgoing, reverse from olympia.translations.models import Translation from olympia.users.models import UserNotification from olympia.users.utils import UnsubscribeCode from olympia.lib import unicodehelper log = getLogger('z.amo') def render(request, template, ctx=None, status=None, content_type=None): rendered = loader.render_to_string(template, ctx, request=request) return HttpResponse(rendered, status=status, content_type=content_type) def from_string(string): return engines['jinja2'].from_string(string) def render_xml_to_string(request, template, context=None): from olympia.amo.templatetags.jinja_helpers import strip_controls if context is None:
from PIL import Image from olympia import amo from olympia.amo.templatetags.jinja_helpers import user_media_path from olympia.amo.utils import StopWatch from olympia.core import get_user, logger from olympia.files.models import FileUpload from olympia.files.utils import get_filepath, parse_addon from olympia.lib.crypto.signing import sign_file from olympia.lib.safe_xml import lxml from . import compare from .models import Version log = logger.getLogger('z.versions.utils') def get_next_version_number(addon): if not addon: return '1.0' last_version = ( Version.unfiltered.filter(addon=addon).order_by('version_int').last()) version_int_parts = compare.dict_from_int(last_version.version_int) version_counter = 1 while True: next_version = '%s.0' % (version_int_parts['major'] + version_counter) if not Version.unfiltered.filter(addon=addon, version=next_version).exists(): return next_version
import argparse from importlib import import_module from django.conf import settings from django.core.management.base import BaseCommand, CommandError from olympia.core import logger log = logger.getLogger('z.cron') class Command(BaseCommand): help = 'Run one of the predefined cron jobs' def add_arguments(self, parser): # We handle the case where 0 arguments are given specifically below, # so use nargs='?' for the first argument. parser.add_argument('name', nargs='?') parser.add_argument('cron_args', nargs=argparse.REMAINDER, default=[]) def handle(self, *args, **options): if not options['name']: log.error("Cron called without args") raise CommandError('These jobs are available:\n%s' % '\n'.join( sorted(settings.CRON_JOBS.keys()))) name, args = options['name'], options['cron_args'] path = settings.CRON_JOBS.get(name) if not path: