Exemple #1
0
def xpi_build_from_model(rev_pk,
                         mod_codes={},
                         att_codes={},
                         hashtag=None,
                         tqueued=None):
    """ Get object and build xpi
    """
    if not hashtag:
        log.critical("No hashtag provided")
        return
    tstart = time.time()
    if tqueued:
        tinqueue = (tstart - tqueued) * 1000
        statsd.timing('xpi.build.queued', tinqueue)
        log.info('[xpi:%s] Addon job picked from queue (%dms)' %
                 (hashtag, tinqueue))
    revision = PackageRevision.objects.get(pk=rev_pk)
    log.debug('[xpi:%s] Building %s' % (hashtag, revision))
    # prepare changed modules and attachments
    modules = []
    attachments = []
    for mod in revision.modules.all():
        if str(mod.pk) in mod_codes:
            mod.code = mod_codes[str(mod.pk)]
            modules.append(mod)
    for att in revision.attachments.all():
        if str(att.pk) in att_codes:
            att.code = att_codes[str(att.pk)]
            attachments.append(att)
    revision.build_xpi(modules=modules,
                       attachments=attachments,
                       hashtag=hashtag,
                       tstart=tstart)
Exemple #2
0
def check_redis():
    redis = caching.invalidation.get_redis_backend()
    try:
        return redis.info(), None
    except Exception, e:
        log.critical('Failed to chat with redis: (%s)' % e)
        return None, e
Exemple #3
0
def update_monolith_stats(metric, date, **kw):
    log.info('Updating monolith statistics (%s) for (%s)' % (metric, date))

    jobs = _get_monolith_jobs(date)[metric]

    for job in jobs:
        try:
            # Only record if count is greater than zero.
            count = job['count']()
            if count:
                value = {'count': count}
                if 'dimensions' in job:
                    value.update(job['dimensions'])

                MonolithRecord.objects.create(recorded=date,
                                              key=metric,
                                              value=json.dumps(value))

                log.debug('Monolith stats details: (%s) has (%s) for (%s). '
                          'Value: %s' % (metric, count, date, value))
            else:
                log.debug('Monolith stat (%s) did not record due to falsy '
                          'value (%s) for (%s)' % (metric, count, date))

        except Exception as e:
            log.critical('Update of monolith table failed: (%s): %s' %
                         ([metric, date], e))
Exemple #4
0
def monitor(request):

    # For each check, a boolean pass/fail status to show in the template
    status_summary = {}
    status = 200

    # Check all memcached servers
    scheme, servers, _ = parse_backend_uri(settings.CACHE_BACKEND)
    memcache_results = []
    status_summary['memcache'] = True
    if 'memcached' in scheme:
        hosts = servers.split(';')
        for host in hosts:
            ip, port = host.split(':')
            try:
                s = socket.socket()
                s.connect((ip, int(port)))
            except Exception, e:
                result = False
                status_summary['memcache'] = False
                log.critical('Failed to connect to memcached (%s): %s' %
                                                                    (host, e))
            else:
                result = True
            finally:
Exemple #5
0
def xpi_build_from_model(rev_pk, mod_codes={}, att_codes={}, hashtag=None, tqueued=None):
    """ Get object and build xpi
    """
    if not hashtag:
        log.critical("No hashtag provided")
        return
    tstart = time.time()
    if tqueued:
        tinqueue = (tstart - tqueued) * 1000
        statsd.timing('xpi.build.queued', tinqueue)
        log.info('[xpi:%s] Addon job picked from queue (%dms)' % (hashtag, tinqueue))
    revision = PackageRevision.objects.get(pk=rev_pk)
    log.debug('[xpi:%s] Building %s' % (hashtag, revision))
    # prepare changed modules and attachments
    modules = []
    attachments = []
    for mod in revision.modules.all():
        if str(mod.pk) in mod_codes:
            mod.code = mod_codes[str(mod.pk)]
            modules.append(mod)
    for att in revision.attachments.all():
        if str(att.pk) in att_codes:
            att.code = att_codes[str(att.pk)]
            attachments.append(att)
    revision.build_xpi(
            modules=modules,
            attachments=attachments,
            hashtag=hashtag,
            tstart=tstart)
Exemple #6
0
def update_monolith_stats(metric, date, **kw):
    log.info('Updating monolith statistics (%s) for (%s)' % (metric, date))

    jobs = _get_monolith_jobs(date)[metric]

    for job in jobs:
        try:
            # Only record if count is greater than zero.
            count = job['count']()
            if count:
                value = {'count': count}
                if 'dimensions' in job:
                    value.update(job['dimensions'])

                MonolithRecord.objects.create(recorded=date, key=metric,
                                              value=json.dumps(value))

                log.debug('Monolith stats details: (%s) has (%s) for (%s). '
                          'Value: %s' % (metric, count, date, value))
            else:
                log.debug('Monolith stat (%s) did not record due to falsy '
                          'value (%s) for (%s)' % (metric, count, date))

        except Exception as e:
            log.critical('Update of monolith table failed: (%s): %s'
                         % ([metric, date], e))
Exemple #7
0
def monitor(request):
    status = True
    data = {}

    # Check Read/Write
    filepaths = [
         (settings.UPLOAD_DIR, os.R_OK | os.W_OK, 'We want read + write.'),
    ]

    if hasattr(settings, 'XPI_TARGETDIR'):
        filepaths.append((settings.XPI_TARGETDIR, os.R_OK | os.W_OK,
                          'We want read + write. Should be a shared directory '
                          'on multiserver installations'))

    for sdk in SDK.objects.all():
        filepaths.append((sdk.get_source_dir(), os.R_OK,
                          'We want read on %s' % sdk.version),)

    filepath_results = []
    filepath_status = True

    for path, perms, notes in filepaths:
        path_exists = os.path.isdir(path)
        path_perms = os.access(path, perms)
        filepath_status = filepath_status and path_exists and path_perms
        if not filepath_status and status:
            status = False
        filepath_results.append((path, path_exists, path_perms, notes))

    # free space on XPI_TARGETDIR disk
    x_path = '%s/' % settings.XPI_TARGETDIR
    s_path = '%s/' % settings.SDKDIR_PREFIX
    x = os.statvfs(x_path)
    s = os.statvfs(s_path)
    data['free'] = {
            'xpi_targetdir %s' % x_path: (x.f_bavail * x.f_frsize) / 1024,
            'sdkdir_prefix %s' % s_path: (s.f_bavail * s.f_frsize) / 1024
            }

    data['filepaths'] = filepath_results

    # Check celery
    try:
        data['celery_responses'] = CeleryResponse.objects.all()
    except:
        status = False

    # Check ElasticSearch
    try:
        es = get_es()
        data['es_health'] = es.cluster_health()
        data['es_health']['version'] = es.collect_info()['server']['version']['number']
        if data['es_health']['status'] =='red':
            status = False
            log.warning('ElasticSearch cluster health was red.')
    except Exception, e:
        status = False
        log.critical('Failed to connect to ElasticSearch: %s' % e)
Exemple #8
0
def monitor(request):
    status = True
    data = {}

    # Check Read/Write
    filepaths = [
        (settings.UPLOAD_DIR, os.R_OK | os.W_OK, 'We want read + write.'),
    ]

    if hasattr(settings, 'XPI_TARGETDIR'):
        filepaths.append((settings.XPI_TARGETDIR, os.R_OK | os.W_OK,
                          'We want read + write. Should be a shared directory '
                          'on multiserver installations'))

    for sdk in SDK.objects.all():
        filepaths.append((sdk.get_source_dir(), os.R_OK,
                          'We want read on %s' % sdk.version), )

    filepath_results = []
    filepath_status = True

    for path, perms, notes in filepaths:
        path_exists = os.path.isdir(path)
        path_perms = os.access(path, perms)
        filepath_status = filepath_status and path_exists and path_perms
        if not filepath_status and status:
            status = False
        filepath_results.append((path, path_exists, path_perms, notes))

    # free space on XPI_TARGETDIR disk
    x_path = '%s/' % settings.XPI_TARGETDIR
    s_path = '%s/' % settings.SDKDIR_PREFIX
    x = os.statvfs(x_path)
    s = os.statvfs(s_path)
    data['free'] = [('xpi_targetdir %s' % x_path, x.f_bavail * x.f_frsize),
                    ('sdkdir_prefix %s' % s_path, s.f_bavail * s.f_frsize)]

    data['filepaths'] = filepath_results

    # Check celery
    try:
        data['celery_responses'] = CeleryResponse.objects.all()
    except:
        status = False

    # Check ElasticSearch
    try:
        es = get_es()
        data['es_health'] = es.cluster_health()
        data['es_health']['version'] = es.collect_info(
        )['server']['version']['number']
        if data['es_health']['status'] == 'red':
            status = False
            log.warning('ElasticSearch cluster health was red.')
    except Exception, e:
        status = False
        log.critical('Failed to connect to ElasticSearch: %s' % e)
Exemple #9
0
def zip_source(pk, hashtag, tqueued=None, **kw):
    if not hashtag:
        log.critical("[zip] No hashtag provided")
        return
    tstart = time.time()
    if tqueued:
        tinqueue = (tstart - tqueued) * 1000
        statsd.timing('zip.queued', tinqueue)
        log.info('[zip:%s] Addon job picked from queue (%dms)' % (hashtag, tinqueue))
    log.debug("[zip:%s] Compressing" % pk)
    PackageRevision.objects.get(pk=pk).zip_source(hashtag=hashtag, tstart=tstart)
    log.debug("[zip:%s] Compressed" % pk)
Exemple #10
0
def get_amo_cursor():
    import MySQLdb
    try:
        auth_conn = MySQLdb.connect(
            host=settings.AUTH_DATABASE['HOST'],
            user=settings.AUTH_DATABASE['USER'],
            passwd=settings.AUTH_DATABASE['PASSWORD'],
            db=settings.AUTH_DATABASE['NAME'])
    except Exception, err:
        log.critical("Authentication database connection failure: %s"
                % str(err))
        raise
Exemple #11
0
def build(sdk_dir, package_dir, filename, hashtag, tstart=None, options=None, temp_dir=None):
    """Build xpi from SDK with prepared packages in sdk_dir.

    :params:
        * sdk_dir (String) SDK directory
        * package_dir (string) dir of the Add-on package
        * filename (string) XPI will be build with this name
        * hashtag (string) XPI will be copied to a file which name is creted
          using the unique hashtag
        * t1 (integer) time.time() of the process started

    :returns: (list) ``cfx xpi`` response where ``[0]`` is ``stdout`` and
              ``[1]`` ``stderr``
    """

    t1 = time.time()

    if not temp_dir:
        temp_dir = sdk_dir

    # create XPI
    os.chdir(package_dir)

    cfx = [
        settings.PYTHON_EXEC,
        "%s/bin/cfx" % sdk_dir,
        "--binary=%s" % settings.XULRUNNER_BINARY,
        "--keydir=%s/%s" % (sdk_dir, settings.KEYDIR),
        "xpi",
    ]
    if options:
        cfx.append(options)

    log.debug(cfx)

    info_targetfilename = "%s.json" % hashtag
    info_targetpath = os.path.join(settings.XPI_TARGETDIR, info_targetfilename)

    env = dict(
        PATH="%s/bin:%s" % (sdk_dir, os.environ["PATH"]),
        VIRTUAL_ENV=sdk_dir,
        CUDDLEFISH_ROOT=sdk_dir,
        PYTHONPATH=os.path.join(sdk_dir, "python-lib"),
    )
    try:
        process = subprocess.Popen(cfx, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
        response = process.communicate()
    except subprocess.CalledProcessError, err:
        info_write(info_targetpath, "error", str(err), hashtag)
        log.critical("[xpi:%s] Failed to build xpi: %s.  Command(%s)" % (hashtag, str(err), cfx))
        shutil.rmtree(temp_dir)
        raise
Exemple #12
0
def copy(request, revision_id):
    """
    Copy package - create a duplicate of the Package, set user as author
    """
    source = get_object_with_related_or_404(PackageRevision, pk=revision_id)
    log.debug("[copy: %s] Copying started from (%s)" % (revision_id, source))

    # save package
    try:
        package = source.package.copy(request.user)
    except IntegrityError, err:
        log.critical(("[copy: %s] Package copy failed") % revision_id)
        return HttpResponseForbidden("You already have a %s with that name" % escape(source.package.get_type_name()))
Exemple #13
0
def copy(request, revision_id):
    """
    Copy package - create a duplicate of the Package, set user as author
    """
    source = get_object_with_related_or_404(PackageRevision, pk=revision_id)
    log.debug('[copy: %s] Copying started from (%s)' % (revision_id, source))

    # save package
    try:
        package = source.package.copy(request.user)
    except IntegrityError, err:
        log.critical(("[copy: %s] Package copy failed") % revision_id)
        return HttpResponseForbidden('You already have a %s with that name' %
                                     escape(source.package.get_type_name()))
Exemple #14
0
def build(sdk_dir, package_dir, filename, hashtag):
    """Build xpi from source in sdk_dir."""

    t1 = time.time()

    # create XPI
    os.chdir(package_dir)

    # @TODO xulrunner should be a config variable
    cfx = [settings.PYTHON_EXEC, '%s/bin/cfx' % sdk_dir,
           '--binary=/usr/bin/xulrunner',
           '--keydir=%s/%s' % (sdk_dir, settings.KEYDIR), 'xpi']

    env = dict(PATH='%s/bin:%s' % (sdk_dir, os.environ['PATH']),
               VIRTUAL_ENV=sdk_dir,
               CUDDLEFISH_ROOT=sdk_dir,
               PYTHONPATH=os.path.join(sdk_dir, 'python-lib'))
    try:
        process = subprocess.Popen(cfx, shell=False, stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE, env=env)
        response = process.communicate()
    except subprocess.CalledProcessError:
        log.critical("Failed to build xpi: %s.  Command(%s)" % (
                     subprocess.CalledProcessError, cfx))
        return HttpResponseServerError
    if response[1]:
        log.critical("Failed to build xpi.\nError: %s" % response[1])
        return HttpResponseForbidden(response[1])

    # move the XPI created to the XPI_TARGETDIR
    xpi_targetfilename = "%s.xpi" % hashtag
    xpi_targetpath = os.path.join(settings.XPI_TARGETDIR, xpi_targetfilename)
    xpi_path = os.path.join(package_dir, "%s.xpi" % filename)
    log.debug(response)
    log.debug("%s, %s" % (xpi_path, xpi_targetpath))
    shutil.copy(xpi_path, xpi_targetpath)
    shutil.rmtree(sdk_dir)

    ret = [xpi_targetfilename]
    ret.extend(response)

    t2 = time.time()

    log.info('[xpi:%s] Created xpi: %s (time: %0.3fms)' % (hashtag,
                                                           xpi_targetpath,
                                                           ((t2 - t1) * 1000)))

    return ret
Exemple #15
0
def update_global_totals(job, date, **kw):
    log.info('Updating global statistics totals (%s) for (%s)' % (job, date))

    jobs = _get_daily_jobs(date)
    jobs.update(_get_metrics_jobs(date))

    num = jobs[job]()

    q = """REPLACE INTO global_stats (`name`, `count`, `date`)
           VALUES (%s, %s, %s)"""
    p = [job, num or 0, date]

    try:
        cursor = connection.cursor()
        cursor.execute(q, p)
    except Exception, e:
        log.critical('Failed to update global stats: (%s): %s' % (p, e))
Exemple #16
0
def build(sdk_dir, package_dir, filename, hashtag, tstart=None):
    """Build xpi from SDK with prepared packages in sdk_dir.

    :params:
        * sdk_dir (String) SDK directory
        * package_dir (string) dir of the Add-on package
        * filename (string) XPI will be build with this name
        * hashtag (string) XPI will be copied to a file which name is creted
          using the unique hashtag
        * t1 (integer) time.time() of the process started

    :returns: (list) ``cfx xpi`` response where ``[0]`` is ``stdout`` and
              ``[1]`` ``stderr``
    """

    t1 = time.time()

    # create XPI
    os.chdir(package_dir)

    # @TODO xulrunner should be a config variable
    cfx = [settings.PYTHON_EXEC, '%s/bin/cfx' % sdk_dir,
           '--binary=/usr/bin/xulrunner',
           '--keydir=%s/%s' % (sdk_dir, settings.KEYDIR), 'xpi',
           '--strip-xpi']

    log.debug(cfx)

    info_targetfilename = "%s.json" % hashtag
    info_targetpath = os.path.join(settings.XPI_TARGETDIR, info_targetfilename)

    env = dict(PATH='%s/bin:%s' % (sdk_dir, os.environ['PATH']),
               VIRTUAL_ENV=sdk_dir,
               CUDDLEFISH_ROOT=sdk_dir,
               PYTHONPATH=os.path.join(sdk_dir, 'python-lib'))
    try:
        process = subprocess.Popen(cfx, shell=False, stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE, env=env)
        response = process.communicate()
    except subprocess.CalledProcessError, err:
        info_write(info_targetpath, 'error', str(err), hashtag)
        log.critical("[xpi:%s] Failed to build xpi: %s.  Command(%s)" % (
                     hashtag, str(err), cfx))
        shutil.rmtree(sdk_dir)
        raise
Exemple #17
0
def update_webtrend(url, date, **kw):
    resp = requests.get(url, auth=(settings.WEBTRENDS_USERNAME,
                                   settings.WEBTRENDS_PASSWORD))
    if resp.status_code != 200:
        log.critical('Failed to fetch webtrends stats. url: %s status_code: %s'
                     % (url, resp.status_code))
        return
    else:
        data = resp.json
    key, val = data['data'][0]['measures'].items()[0]
    p = ['webtrends_' + key, val, date]
    try:
        cursor = connection.cursor()
        cursor.execute('REPLACE INTO global_stats (name, count, date) '
                       'values (%s, %s, %s)', p)
        transaction.commit_unless_managed()
    except Exception, e:
        log.critical('Failed to update global stats: (%s): %s' % (p, e))
Exemple #18
0
def update_global_totals(job, date, **kw):
    log.info("[%s] Updating global statistics totals (%s) for (%s)" %
                   (update_global_totals.rate_limit, job, date))

    jobs = _get_daily_jobs(date)
    jobs.update(_get_metrics_jobs(date))

    num = jobs[job]()

    q = """REPLACE INTO
                global_stats(`name`, `count`, `date`)
            VALUES
                (%s, %s, %s)"""
    p = [job, num or 0, date]

    try:
        cursor = connection.cursor()
        cursor.execute(q, p)
        transaction.commit_unless_managed()
    except Exception, e:
        log.critical("Failed to update global stats: (%s): %s" % (p, e))
Exemple #19
0
def all_zip(request, pk):
    """Zip all and return a file."""
    if not pk:
        log.critical("[zip] No package_id provided")
        return
    package = Package.objects.get(pk=pk)
    zips = []
    # Zip all revisions of the package
    for revision in package.revisions.all():
        zips.append(revision.zip_source(hashtag=revision.get_cache_hashtag()))
    # Zip all zipped revisions into one file
    zip_targetname = "package-%d.zip" % package.pk
    zip_targetpath = os.path.join(settings.XPI_TARGETDIR, zip_targetname)
    with closing(ZipFile(zip_targetpath, 'w', ZIP_DEFLATED)) as z:
        for fn in zips:
            z.write(fn, os.path.basename(fn))
    log.info('[zipall:%s] Downloading All zipped' % pk)

    response = serve(request, zip_targetpath, '/', show_indexes=False)
    response['Content-Disposition'] = ('attachment; filename="%s"' % zip_targetname)
    return response
Exemple #20
0
def update_google_analytics(date, **kw):
    creds_data = getattr(settings, 'GOOGLE_ANALYTICS_CREDENTIALS', None)
    if not creds_data:
        log.critical('Failed to update global stats: '
                     'GOOGLE_ANALYTICS_CREDENTIALS not set')
        return

    creds = OAuth2Credentials(
        *[creds_data[k] for k in
          ('access_token', 'client_id', 'client_secret',
           'refresh_token', 'token_expiry', 'token_uri',
           'user_agent')])
    h = httplib2.Http()
    creds.authorize(h)
    service = build('analytics', 'v3', http=h)
    domain = getattr(settings,
                     'GOOGLE_ANALYTICS_DOMAIN', None) or settings.DOMAIN
    profile_id = get_profile_id(service, domain)
    if profile_id is None:
        log.critical('Failed to update global stats: could not access a Google'
                     ' Analytics profile for ' + domain)
        return
    datestr = date.strftime('%Y-%m-%d')
    try:
        data = service.data().ga().get(ids='ga:' + profile_id,
                                       start_date=datestr,
                                       end_date=datestr,
                                       metrics='ga:visits').execute()
        # Storing this under the webtrends stat name so it goes on the
        # same graph as the old webtrends data.
        p = ['webtrends_DailyVisitors', data['rows'][0][0], date]
    except Exception, e:
        log.critical(
            'Fetching stats data for %s from Google Analytics failed: %s' % e)
        return
Exemple #21
0
def xpi_build_from_model(rev_pk, mod_codes={}, att_codes={}, hashtag=None):
    """ Get object and build xpi
    """
    if not hashtag:
        log.critical("No hashtag provided")
        return
    revision = PackageRevision.objects.get(pk=rev_pk)
    # prepare changed modules and attachments
    modules = []
    attachments = []
    for mod in revision.modules.all():
        if str(mod.pk) in mod_codes:
            mod.code = mod_codes[str(mod.pk)]
            modules.append(mod)
    for att in revision.attachments.all():
        if str(att.pk) in att_codes:
            att.code = att_codes[str(att.pk)]
            attachments.append(att)
    revision.build_xpi(
            modules=modules,
            attachments=attachments,
            hashtag=hashtag)
Exemple #22
0
def all_zip(request, pk):
    """Zip all and return a file."""
    if not pk:
        log.critical("[zip] No package_id provided")
        return
    package = Package.objects.get(pk=pk)
    zips = []
    # Zip all revisions of the package
    for revision in package.revisions.all():
        zips.append(revision.zip_source(hashtag=revision.get_cache_hashtag()))
    # Zip all zipped revisions into one file
    zip_targetname = "package-%d.zip" % package.pk
    zip_targetpath = os.path.join(settings.XPI_TARGETDIR, zip_targetname)
    with closing(ZipFile(zip_targetpath, 'w', ZIP_DEFLATED)) as z:
        for fn in zips:
            z.write(fn, os.path.basename(fn))
    log.info('[zipall:%s] Downloading All zipped' % pk)

    response = serve(request, zip_targetpath, '/', show_indexes=False)
    response['Content-Disposition'] = ('attachment; filename="%s"' %
                                       zip_targetname)
    return response
Exemple #23
0
            for name in ['doc', 'lib', 'data', 'tests']:
                _extract(f, name, resource_dir_prefix)
        # Add all dependencies to the manifest
        # This is a flat list - hierarchy might be different from original
        self.manifest['dependencies'].extend(dependencies)
        # Add icon files to manifest
        for f in main_dir_files:
            if 'icon' in f:
                self.manifest[f.split('.')[0]] = f

        # create add-on's package.json file
        log.debug('Writing manifest %s, %s' % (os.path.join(
                sdk_dir, 'packages', package_name, 'package.json'),
                self.manifest))
        package_dir = os.path.join(
                    sdk_dir, 'packages', package_name)
        try:
            with open(os.path.join(package_dir, 'package.json'), 'w') as manifest:
                manifest.write(simplejson.dumps(self.manifest))
        except:
            log.critical("Manifest couldn't be exported to %s" % package_path)
            raise

        return sdk_dir

    def cleanup(self):
        """closes all files opened during the repackaging
        """
        self.xpi_zip.close()
        self.xpi_temp.close()
Exemple #24
0
                                       end_date=datestr,
                                       metrics='ga:visits').execute()
        # Storing this under the webtrends stat name so it goes on the
        # same graph as the old webtrends data.
        p = ['webtrends_DailyVisitors', data['rows'][0][0], date]
    except Exception, e:
        log.critical(
            'Fetching stats data for %s from Google Analytics failed: %s' % e)
        return
    try:
        cursor = connection.cursor()
        cursor.execute('REPLACE INTO global_stats (name, count, date) '
                       'values (%s, %s, %s)', p)
        transaction.commit_unless_managed()
    except Exception, e:
        log.critical('Failed to update global stats: (%s): %s' % (p, e))
        return

    log.debug('Committed global stats details: (%s) has (%s) for (%s)'
              % tuple(p))


@task
def update_global_totals(job, date, **kw):
    log.info("Updating global statistics totals (%s) for (%s)" %
                   (job, date))

    jobs = _get_daily_jobs(date)
    jobs.update(_get_metrics_jobs(date))

    num = jobs[job]()
Exemple #25
0
    # Check memcached
    memcache = getattr(settings, 'CACHES', {}).get('default')
    memcache_results = []
    if memcache and 'memcached' in memcache['BACKEND']:
        hosts = memcache['LOCATION']
        if not isinstance(hosts, (tuple, list)):
            hosts = [hosts]
        for host in hosts:
            ip, port = host.split(':')
            try:
                s = socket.socket()
                s.connect((ip, int(port)))
            except Exception, e:
                status = False
                result = False
                log.critical('Failed to connect to memcached (%s): %s' %
                             (host, e))
            else:
                result = True
            finally:
                s.close()
            memcache_results.append((ip, port, result))
        if len(memcache_results) < 2:
            status = False
            log.warning('You should have 2+ memcache servers. '
                        'You have %d.' % len(memcache_results))

    if not memcache_results:
        status = False
        log.info('Memcached is not configured.')
    data['memcached'] = memcache_results
Exemple #26
0
def build(sdk_dir,
          package_dir,
          filename,
          hashtag,
          tstart=None,
          options=None,
          temp_dir=None):
    """Build xpi from SDK with prepared packages in sdk_dir.

    :params:
        * sdk_dir (String) SDK directory
        * package_dir (string) dir of the Add-on package
        * filename (string) XPI will be build with this name
        * hashtag (string) XPI will be copied to a file which name is creted
          using the unique hashtag
        * t1 (integer) time.time() of the process started

    :returns: (list) ``cfx xpi`` response where ``[0]`` is ``stdout`` and
              ``[1]`` ``stderr``
    """

    t1 = time.time()

    if not temp_dir:
        temp_dir = sdk_dir

    # create XPI
    os.chdir(package_dir)

    cfx = [
        settings.PYTHON_EXEC,
        '%s/bin/cfx' % sdk_dir,
        '--binary=%s' % settings.XULRUNNER_BINARY,
        '--keydir=%s/%s' % (sdk_dir, settings.KEYDIR), 'xpi'
    ]
    if options:
        cfx.append(options)

    log.debug(cfx)

    info_targetfilename = "%s.json" % hashtag
    info_targetpath = os.path.join(settings.XPI_TARGETDIR, info_targetfilename)

    env = dict(PATH='%s/bin:%s' % (sdk_dir, os.environ['PATH']),
               DISPLAY=os.environ.get('DISPLAY', ':0'),
               VIRTUAL_ENV=sdk_dir,
               CUDDLEFISH_ROOT=sdk_dir,
               PYTHONPATH=os.path.join(sdk_dir, 'python-lib'))
    try:
        process = subprocess.Popen(cfx,
                                   shell=False,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   env=env)
        response = process.communicate()
    except subprocess.CalledProcessError, err:
        info_write(info_targetpath, 'error', str(err), hashtag)
        log.critical("[xpi:%s] Failed to build xpi: %s.  Command(%s)" %
                     (hashtag, str(err), cfx))
        shutil.rmtree(temp_dir)
        raise
Exemple #27
0
def extract_po(project, locale, path, entities=False):
    """Extract .po (gettext) file with path and save or update in DB."""

    try:
        po = polib.pofile(path)

        relative_path = get_relative_path(path, locale)
        if relative_path[-1] == 't':
            relative_path = relative_path[:-1]

        resource, created = Resource.objects.get_or_create(project=project,
                                                           path=relative_path,
                                                           format='po')

        if entities:
            for order, entry in enumerate(po):
                if not entry.obsolete:
                    save_entity(resource=resource,
                                string=entry.msgid,
                                string_plural=entry.msgid_plural,
                                comment=entry.comment,
                                order=order,
                                source=entry.occurrences)

            update_entity_count(resource)

        else:
            for entry in (po.translated_entries() + po.fuzzy_entries()):
                if not entry.obsolete:

                    # Entities without plurals
                    if len(entry.msgstr) > 0:
                        try:
                            e = Entity.objects.get(resource=resource,
                                                   string=entry.msgid)
                            save_translation(entity=e,
                                             locale=locale,
                                             string=entry.msgstr,
                                             fuzzy='fuzzy' in entry.flags)

                        except Entity.DoesNotExist:
                            continue

                    # Pluralized entities
                    elif len(entry.msgstr_plural) > 0:
                        try:
                            e = Entity.objects.get(resource=resource,
                                                   string=entry.msgid)
                            for k in entry.msgstr_plural:
                                save_translation(entity=e,
                                                 locale=locale,
                                                 string=entry.msgstr_plural[k],
                                                 plural_form=k,
                                                 fuzzy='fuzzy' in entry.flags)

                        except Entity.DoesNotExist:
                            continue

            update_stats(resource, locale)

        log.debug("[" + locale.code + "]: " + path + " saved to DB.")
    except Exception as e:
        log.critical('PoExtractError for %s: %s' % (path, e))
Exemple #28
0
    :attr: amo_id (int) id of the add-on in AMO
    :attr: amo_file_id (int) id of the file uploaded to AMO
    :returns: dict
    """
    url = get_addon_amo_api_url(amo_id, amo_file_id)
    log.debug("AMOAPI: receiving add-on info from \"%s\"" % url)
    req = urllib2.Request(url)
    try:
        page = urllib2.urlopen(req, timeout=settings.URLOPEN_TIMEOUT)
    except urllib2.HTTPError, error:
        if '404' in str(error):
            return {'deleted': True}
    except Exception, error:
        msg = "AMOAPI: ERROR receiving add-on info from \"%s\"%s%s"
        log.critical(msg % (url, '\n', str(error)))
        return {'error': msg % (url, ' : ', str(error))}
    amo_xml = etree.fromstring(page.read())
    amo_data = {}
    for element in amo_xml.iter():
        if element.tag in ('status', 'rating', 'version', 'slug'):
            amo_data[element.tag] = element.text
        if element.tag == 'status':
            amo_data['status_code'] = int(element.get('id'))
    # return dict
    return amo_data


def fetch_amo_user(email):
    log.debug('#'*80)
    amo = AMOOAuth(domain=settings.AMOOAUTH_DOMAIN,
Exemple #29
0
    except subprocess.CalledProcessError, err:
        info_write(info_targetpath, 'error', str(err), hashtag)
        log.critical("[xpi:%s] Failed to build xpi: %s.  Command(%s)" %
                     (hashtag, str(err), cfx))
        shutil.rmtree(temp_dir)
        raise
    if (waffle.switch_is_active('SDKErrorInStdOutWorkaround')
            and not os.path.exists(
                os.path.join(package_dir, '%s.xpi' % filename))):
        badresponse = response[0]
        response = ['', '']
        response[1] = badresponse
    xpi_path = os.path.join(package_dir, "%s.xpi" % filename)
    if process.returncode != 0:
        info_write(info_targetpath, 'error', response[1], hashtag)
        log.critical("[xpi:%s] Failed to build xpi., stderr: %s" %
                     (hashtag, response[1]))
        shutil.rmtree(temp_dir)
        return response

    t2 = time.time()

    # XPI: move the XPI created to the XPI_TARGETDIR (local to NFS)
    xpi_targetfilename = "%s.xpi" % hashtag
    xpi_targetpath = os.path.join(settings.XPI_TARGETDIR, xpi_targetfilename)
    try:
        shutil.copy(xpi_path, xpi_targetpath)
    except IOError, err:
        info_write(info_targetpath, 'error', 'XPI file can not be copied.',
                   hashtag)
        log.critical("[xpi:%s] Failed to copy xpi.\n%s" % (hashtag, str(err)))
        shutil.rmtree(temp_dir)
Exemple #30
0
               VIRTUAL_ENV=sdk_dir,
               CUDDLEFISH_ROOT=sdk_dir,
               PYTHONPATH=os.path.join(sdk_dir, 'python-lib'))
    try:
        process = subprocess.Popen(cfx, shell=False, stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE, env=env)
        response = process.communicate()
    except subprocess.CalledProcessError, err:
        info_write(info_targetpath, 'error', str(err), hashtag)
        log.critical("[xpi:%s] Failed to build xpi: %s.  Command(%s)" % (
                     hashtag, str(err), cfx))
        shutil.rmtree(sdk_dir)
        raise
    if response[1]:
        info_write(info_targetpath, 'error', response[1], hashtag)
        log.critical("[xpi:%s] Failed to build xpi." % hashtag)
        shutil.rmtree(sdk_dir)
        return response

    t2 = time.time()

    # XPI: move the XPI created to the XPI_TARGETDIR (local to NFS)
    xpi_path = os.path.join(package_dir, "%s.xpi" % filename)
    xpi_targetfilename = "%s.xpi" % hashtag
    xpi_targetpath = os.path.join(settings.XPI_TARGETDIR, xpi_targetfilename)
    shutil.copy(xpi_path, xpi_targetpath)
    shutil.rmtree(sdk_dir)

    ret = [xpi_targetfilename]
    ret.extend(response)
Exemple #31
0
    :attr: amo_id (int) id of the add-on in AMO
    :attr: amo_file_id (int) id of the file uploaded to AMO
    :returns: dict
    """
    url = get_addon_amo_api_url(amo_id, amo_file_id)
    log.debug("AMOAPI: receiving add-on info from \"%s\"" % url)
    req = urllib2.Request(url)
    try:
        page = urllib2.urlopen(req, timeout=settings.URLOPEN_TIMEOUT)
    except urllib2.HTTPError, error:
        if '404' in str(error):
            return {'deleted': True}
    except Exception, error:
        msg = "AMOAPI: ERROR receiving add-on info from \"%s\"%s%s"
        log.critical(msg % (url, '\n', str(error)))
        return {'error': msg % (url, ' : ', str(error))}
    amo_xml = etree.fromstring(page.read())
    amo_data = {}
    for element in amo_xml.iter():
        if element.tag in ('status', 'rating', 'version', 'slug'):
            amo_data[element.tag] = element.text
        if element.tag == 'status':
            amo_data['status_code'] = int(element.get('id'))
    # return dict
    return amo_data


def fetch_amo_user(email):
    log.debug('#' * 80)
    amo = AMOOAuth(domain=settings.AMOOAUTH_DOMAIN,
Exemple #32
0
                resource_dir_prefix = uri_prefix_1
            for name in ['docs', 'lib', 'data', 'tests']:
                _extract(f, name, resource_dir_prefix)
        # Add all dependencies to the manifest
        # This is a flat list - hierarchy might be different from original
        self.manifest['dependencies'].extend(dependencies)
        # Add icon files to manifest
        for f in main_dir_files:
            if 'icon' in f:
                self.manifest[f.split('.')[0]] = f

        # create add-on's package.json file
        log.debug('Writing manifest %s, %s' % (os.path.join(
            sdk_dir, 'packages', package_name, 'package.json'), self.manifest))
        package_dir = os.path.join(sdk_dir, 'packages', package_name)
        try:
            with open(os.path.join(package_dir, 'package.json'),
                      'w') as manifest:
                manifest.write(simplejson.dumps(self.manifest))
        except:
            log.critical("Manifest couldn't be exported to %s" % package_path)
            raise

        return sdk_dir

    def cleanup(self):
        """closes all files opened during the repackaging
        """
        self.xpi_zip.close()
        self.xpi_temp.close()
Exemple #33
0
                                       metrics='ga:visits').execute()
        # Storing this under the webtrends stat name so it goes on the
        # same graph as the old webtrends data.
        p = ['webtrends_DailyVisitors', data['rows'][0][0], date]
    except Exception, e:
        log.critical(
            'Fetching stats data for %s from Google Analytics failed: %s' % e)
        return
    try:
        cursor = connection.cursor()
        cursor.execute(
            'REPLACE INTO global_stats (name, count, date) '
            'values (%s, %s, %s)', p)
        transaction.commit_unless_managed()
    except Exception, e:
        log.critical('Failed to update global stats: (%s): %s' % (p, e))
        return

    log.debug('Committed global stats details: (%s) has (%s) for (%s)' %
              tuple(p))


@task
def update_global_totals(job, date, **kw):
    log.info("Updating global statistics totals (%s) for (%s)" % (job, date))

    jobs = _get_daily_jobs(date)
    jobs.update(_get_metrics_jobs(date))

    num = jobs[job]()
Exemple #34
0
                                       metrics='ga:visits').execute()
        # Storing this under the webtrends stat name so it goes on the
        # same graph as the old webtrends data.
        p = ['webtrends_DailyVisitors', data['rows'][0][0], date]
    except Exception, e:
        log.critical(
            'Fetching stats data for %s from Google Analytics failed: %s' % e)
        return
    try:
        cursor = connection.cursor()
        cursor.execute(
            'REPLACE INTO global_stats (name, count, date) '
            'values (%s, %s, %s)', p)
        transaction.commit_unless_managed()
    except Exception, e:
        log.critical('Failed to update global stats: (%s): %s' % (p, e))
        return

    log.debug('Committed global stats details: (%s) has (%s) for (%s)' %
              tuple(p))


@task
def update_global_totals(job, date, **kw):
    log.info('Updating global statistics totals (%s) for (%s)' % (job, date))

    jobs = _get_daily_jobs(date)
    jobs.update(_get_metrics_jobs(date))

    num = jobs[job]()
Exemple #35
0
                                       end_date=datestr,
                                       metrics='ga:visits').execute()
        # Storing this under the webtrends stat name so it goes on the
        # same graph as the old webtrends data.
        p = ['webtrends_DailyVisitors', data['rows'][0][0], date]
    except Exception, e:
        log.critical(
            'Fetching stats data for %s from Google Analytics failed: %s' % e)
        return
    try:
        cursor = connection.cursor()
        cursor.execute('REPLACE INTO global_stats (name, count, date) '
                       'values (%s, %s, %s)', p)
        transaction.commit_unless_managed()
    except Exception, e:
        log.critical('Failed to update global stats: (%s): %s' % (p, e))
        return

    log.debug('Committed global stats details: (%s) has (%s) for (%s)'
              % tuple(p))


@task
def update_global_totals(job, date, **kw):
    log.info('Updating global statistics totals (%s) for (%s)' % (job, date))

    jobs = _get_daily_jobs(date)
    jobs.update(_get_metrics_jobs(date))

    num = jobs[job]()
Exemple #36
0
        response = process.communicate()
    except subprocess.CalledProcessError, err:
        info_write(info_targetpath, 'error', str(err), hashtag)
        log.critical("[xpi:%s] Failed to build xpi: %s.  Command(%s)" % (
                     hashtag, str(err), cfx))
        shutil.rmtree(temp_dir)
        raise
    if (waffle.switch_is_active('SDKErrorInStdOutWorkaround') and
            not os.path.exists(os.path.join(package_dir, '%s.xpi' % filename))):
        badresponse = response[0]
        response = ['', '']
        response[1] = badresponse
    xpi_path = os.path.join(package_dir, "%s.xpi" % filename)
    if process.returncode != 0:
        info_write(info_targetpath, 'error', response[1], hashtag)
        log.critical("[xpi:%s] Failed to build xpi., stderr: %s" % (
            hashtag, response[1]))
        shutil.rmtree(temp_dir)
        return response

    t2 = time.time()

    # XPI: move the XPI created to the XPI_TARGETDIR (local to NFS)
    xpi_targetfilename = "%s.xpi" % hashtag
    xpi_targetpath = os.path.join(settings.XPI_TARGETDIR, xpi_targetfilename)
    try:
        shutil.copy(xpi_path, xpi_targetpath)
    except IOError, err:
        info_write(info_targetpath, 'error',
                'XPI file can not be copied.',
                hashtag)
        log.critical("[xpi:%s] Failed to copy xpi.\n%s" % (hashtag, str(err)))
Exemple #37
0
                                   stderr=subprocess.PIPE, env=env)
        response = process.communicate()
    except subprocess.CalledProcessError, err:
        info_write(info_targetpath, 'error', str(err), hashtag)
        log.critical("[xpi:%s] Failed to build xpi: %s.  Command(%s)" % (
                     hashtag, str(err), cfx))
        shutil.rmtree(sdk_dir)
        raise
    if (waffle.switch_is_active('SDKErrorInStdOutWorkaround') and
            not os.path.exists(os.path.join(package_dir, '%s.xpi' % filename))):
        badresponse = response[0]
        response = ['', '']
        response[1] = badresponse
    if response[1]:
        info_write(info_targetpath, 'error', response[1], hashtag)
        log.critical("[xpi:%s] Failed to build xpi." % hashtag)
        shutil.rmtree(sdk_dir)
        return response

    t2 = time.time()

    # XPI: move the XPI created to the XPI_TARGETDIR (local to NFS)
    xpi_path = os.path.join(package_dir, "%s.xpi" % filename)
    xpi_targetfilename = "%s.xpi" % hashtag
    xpi_targetpath = os.path.join(settings.XPI_TARGETDIR, xpi_targetfilename)
    try:
        shutil.copy(xpi_path, xpi_targetpath)
    except IOError, err:
        info_write(info_targetpath, 'error',
                'XPI file can not be copied.',
                hashtag)
Exemple #38
0
                                       start_date=datestr,
                                       end_date=datestr,
                                       metrics='ga:visits').execute()
        # Storing this under the webtrends stat name so it goes on the
        # same graph as the old webtrends data.
        p = ['webtrends_DailyVisitors', data['rows'][0][0], date]
    except Exception, e:
        log.critical(
            'Fetching stats data for %s from Google Analytics failed: %s' % e)
        return
    try:
        cursor = connection.cursor()
        cursor.execute('REPLACE INTO global_stats (name, count, date) '
                       'values (%s, %s, %s)', p)
    except Exception, e:
        log.critical('Failed to update global stats: (%s): %s' % (p, e))
        return

    log.debug('Committed global stats details: (%s) has (%s) for (%s)'
              % tuple(p))


@task
def update_global_totals(job, date, **kw):
    log.info('Updating global statistics totals (%s) for (%s)' % (job, date))

    jobs = _get_daily_jobs(date)
    jobs.update(_get_metrics_jobs(date))

    num = jobs[job]()
Exemple #39
0
    log.debug(cfx)

    env = dict(PATH='%s/bin:%s' % (sdk_dir, os.environ['PATH']),
               VIRTUAL_ENV=sdk_dir,
               CUDDLEFISH_ROOT=sdk_dir,
               PYTHONPATH=os.path.join(sdk_dir, 'python-lib'))
    try:
        process = subprocess.Popen(cfx, shell=False, stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE, env=env)
        response = process.communicate()
    except subprocess.CalledProcessError, err:
        log.critical("Failed to build xpi: %s.  Command(%s)" % (
                     str(err), cfx))
        raise
    if response[1] and not force_guid:
        log.critical("Failed to build xpi.\nError: %s" % response[1])
        return response

    xpi_path = os.path.join(package_dir, "%s.xpi" % filename)

    # move the XPI created to the XPI_TARGETDIR
    xpi_targetfilename = "%s.xpi" % hashtag
    xpi_targetpath = os.path.join(settings.XPI_TARGETDIR, xpi_targetfilename)
    shutil.copy(xpi_path, xpi_targetpath)
    shutil.rmtree(sdk_dir)

    ret = [xpi_targetfilename]
    ret.extend(response)

    t2 = time.time()
Exemple #40
0
def extract_po(project, locale, paths, entities=False):
    """Extract .po (gettext) files from paths and save or update in DB."""

    for path in paths:
        try:
            po = polib.pofile(path)
            escape = polib.escape

            relative_path = get_relative_path(path, locale)
            if relative_path[-1] == 't':
                relative_path = relative_path[:-1]

            resource, created = Resource.objects.get_or_create(
                project=project, path=relative_path)

            if entities:
                for entry in po:
                    if not entry.obsolete:
                        save_entity(resource=resource,
                                    string=escape(entry.msgid),
                                    string_plural=escape(entry.msgid_plural),
                                    comment=entry.comment,
                                    source=entry.occurrences)

                update_entity_count(resource, project)

            else:
                for entry in (po.translated_entries() + po.fuzzy_entries()):
                    if not entry.obsolete:

                        # Entities without plurals
                        if len(escape(entry.msgstr)) > 0:
                            try:
                                e = Entity.objects.get(
                                    resource=resource,
                                    string=escape(entry.msgid))
                                save_translation(
                                    entity=e,
                                    locale=locale,
                                    string=escape(entry.msgstr),
                                    fuzzy='fuzzy' in entry.flags)

                            except Entity.DoesNotExist:
                                continue

                        # Pluralized entities
                        elif len(entry.msgstr_plural) > 0:
                            try:
                                e = Entity.objects.get(
                                    resource=resource,
                                    string=escape(entry.msgid))
                                for k in entry.msgstr_plural:
                                    save_translation(
                                        entity=e,
                                        locale=locale,
                                        string=escape(entry.msgstr_plural[k]),
                                        plural_form=k,
                                        fuzzy='fuzzy' in entry.flags)

                            except Entity.DoesNotExist:
                                continue

                update_stats(resource, locale)

            log.debug("[" + locale.code + "]: " + path + " saved to DB.")
        except Exception as e:
            log.critical('PoExtractError for %s: %s' % (path, e))
Exemple #41
0
    # Check memcached
    memcache = getattr(settings, 'CACHES', {}).get('default')
    memcache_results = []
    if memcache and 'memcached' in memcache['BACKEND']:
        hosts = memcache['LOCATION']
        if not isinstance(hosts, (tuple, list)):
            hosts = [hosts]
        for host in hosts:
            ip, port = host.split(':')
            try:
                s = socket.socket()
                s.connect((ip, int(port)))
            except Exception, e:
                status = False
                result = False
                log.critical('Failed to connect to memcached (%s): %s'
                             % (host, e))
            else:
                result = True
            finally:
                s.close()
            memcache_results.append((ip, port, result))
        if len(memcache_results) < 2:
            status = False
            log.warning('You should have 2+ memcache servers. '
                        'You have %d.' % len(memcache_results))

    if not memcache_results:
        status = False
        log.info('Memcached is not configured.')
    data['memcached'] = memcache_results