def sitekey_frame(environ, start_response): template_path, template_file = os.path.split( get_config().get('testpages', 'sitekeyFrameTemplate'), ) template = get_template(template_file, template_path=template_path) http_path = request_path(environ) http_host = environ['HTTP_HOST'] http_ua = environ['HTTP_USER_AGENT'] key = M2Crypto.EVP.load_key(get_config().get('testpages', 'sitekeyPath')) key.sign_init() key.sign_update('\x00'.join([http_path, http_host, http_ua])) public_key = base64.b64encode(key.as_der()) signature = base64.b64encode(key.final()) start_response('200 OK', [ ('Content-Type', 'text/html; charset=utf-8'), ('X-Adblock-Key', '%s_%s' % (public_key, signature)), ]) return [template.render({ 'public_key': public_key, 'signature': signature, 'http_path': http_path, 'http_host': http_host, 'http_ua': http_ua, }).encode('utf-8')]
def writeUpdateManifest(links): """ writes an update manifest for all extensions and Android apps """ extensions = {'gecko': [], 'android': [], 'safari': [], 'ie': []} for repo in Configuration.getRepositoryConfigurations(): if repo.type not in extensions or not links.has_section(repo.repositoryName): continue data = readMetadata(repo, links.get(repo.repositoryName, 'version')) data['updateURL'] = links.get(repo.repositoryName, 'downloadURL') if data['updateURL'].startswith(repo.downloadsURL): data['updateURL'] += "?update" extensions[repo.type].append(data) if len(extensions['android']) > 1: print >>sys.stderr, 'Warning: more than one Android app defined, update manifest only works for one' for repoType in extensions.iterkeys(): manifestPath = get_config().get('extensions', '%sUpdateManifestPath' % repoType) if repoType == 'ie': writeIEUpdateManifest(manifestPath, extensions[repoType]) else: # ABP for Android used to have its own update manifest format. We need to # generate both that and the new one in the libadblockplus format as long # as a significant amount of users is on an old version. if repoType == 'android': newManifestPath = get_config().get("extensions", "androidNewUpdateManifestPath") writeAndroidUpdateManifest(newManifestPath, extensions[repoType]) template = get_template(get_config().get('extensions', '%sUpdateManifest' % repoType)) template.stream({'extensions': extensions[repoType]}).dump(manifestPath)
def get_db(): database = get_config().get('reports', 'database') dbuser = get_config().get('reports', 'dbuser') dbpasswd = get_config().get('reports', 'dbpassword') if os.name == 'nt': return MySQLdb.connect(user=dbuser, passwd=dbpasswd, db=database, use_unicode=True, charset='utf8', named_pipe=True) else: return MySQLdb.connect(user=dbuser, passwd=dbpasswd, db=database, use_unicode=True, charset='utf8')
def hook(ui, repo, node=None, **kwargs): ctx = repo[node] remote = [get_config().get('irchook', 'remote_command'), os.path.basename(repo.root), str(ctx.branch()), str(ctx.user()), str(ctx), str(ctx.description())] remote = ' '.join(map(lambda s: pipes.quote(s), remote)) command = ['ssh', get_config().get('irchook', 'remote_host'), remote] subprocess.call(command)
def hook(ui=None, repo=None, **kwargs): setupStderr() root = repo.root if repo != None else get_config().get('hg', 'auth_repository') result = generate_data(root) with open(get_config().get('hg', 'auth_file'), 'wb') as file: for s in result: file.write(s)
def saveReport(guid, reportData, isNew=False): cursor = get_db().cursor() screenshot = reportData.get('screenshot', None) if screenshot != None: reportData['hasscreenshot'] = 2 if reportData.get('screenshotEdited', False) else 1 try: saveScreenshot(guid, screenshot) except (TypeError, UnicodeEncodeError): reportData['hasscreenshot'] = 0 del reportData['screenshot'] knownIssues = len(reportData.get('knownIssues', [])) contact = getUserId(reportData.get('email', None)) if reportData.get('email', None) else None dumpstr = marshal.dumps(reportData) if contact != None and isNew: executeQuery(cursor, 'INSERT INTO #PFX#users (id, reports) VALUES (%s, 1) ON DUPLICATE KEY UPDATE reports = reports + 1', contact) executeQuery(cursor, '''INSERT INTO #PFX#reports (guid, type, ctime, site, comment, status, contact, hasscreenshot, knownissues, dump) VALUES (%(guid)s, %(type)s, FROM_UNIXTIME(%(ctime)s), %(site)s, %(comment)s, %(status)s, %(contact)s, %(hasscreenshot)s, %(knownissues)s, _binary %(dump)s) ON DUPLICATE KEY UPDATE type = %(type)s, site = %(site)s, comment = %(comment)s, status = %(status)s, hasscreenshot = %(hasscreenshot)s, knownissues = %(knownissues)s, dump = _binary %(dump)s''', {'guid': guid, 'type': reportData.get('type', None), 'ctime': reportData['time'], 'site': reportData.get('siteName', None), 'comment': reportData.get('comment', None), 'status': reportData.get('status', None), 'contact': contact, 'hasscreenshot': reportData.get('hasscreenshot', 0), 'knownissues': knownIssues, 'dump': dumpstr}) if len(reportData['subscriptions']) > 0: for sn in reportData['subscriptions']: executeQuery(cursor, 'SELECT id FROM #PFX#subscriptions WHERE url = %s', sn['id']) id = cursor.fetchone() if id != None: def filterMatch(f): return any(u == sn['id'] for u in f.get('subscriptions', [])) hasMatches = any(filterMatch(f) for f in reportData.get('filters', [])) executeQuery(cursor, 'INSERT IGNORE INTO #PFX#sublists (report, list, hasmatches) VALUES (%s, %s, %s)', (guid, id[0], hasMatches)) get_db().commit() reportData['guid'] = guid if contact: # TODO: The mail anonymization should happen in the template, not here origEmail = reportData['email'] email = reportData['email'] email = re.sub(r' at ', r'@', email) email = re.sub(r' dot ', r'.', email) reportData['email'] = anonymizeMail(email) reportData['uid'] = contact file = os.path.join(get_config().get('reports', 'dataPath'), guid[0], guid[1], guid[2], guid[3], guid + '.html') dir = os.path.dirname(file) if not os.path.exists(dir): os.makedirs(dir) template = get_template(get_config().get('reports', 'webTemplate')) template.stream(reportData).dump(file, encoding='utf-8') if contact: reportData['email'] = origEmail
def handleRequest(environ, start_response): setupStderr(environ["wsgi.errors"]) if environ["REQUEST_METHOD"].upper() != "POST" or not environ.get("CONTENT_TYPE", "").startswith( "application/x-www-form-urlencoded" ): return showError("Unsupported request method", start_response) try: request_body_length = int(environ["CONTENT_LENGTH"]) except: return showError("Invalid or missing Content-Length header", start_response) request_body = environ["wsgi.input"].read(request_body_length) params = {} for key, value in parse_qsl(request_body): params[key] = value.decode("utf-8") guid = params.get("guid", "").lower() if not re.match(r"^[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}$", guid): return showError("Invalid or missing report GUID", start_response) reportData = getReport(guid) if reportData == None: return showError("Report does not exist", start_response) secret = calculateReportSecret(guid) if params.get("secret", "") != secret and params.get("secret", "") != calculateReportSecret_compat(guid): return showError("Wrong secret value", start_response) reportData["status"] = params.get("status", "") if len(reportData["status"]) > 1024: reportData["status"] = reportData["status"][:1024] oldusefulness = reportData.get("usefulness", "0") reportData["usefulness"] = params.get("usefulness", "0") if "email" in reportData: updateUserUsefulness(getUserId(reportData["email"]), reportData["usefulness"], oldusefulness) saveReport(guid, reportData) if params.get("notify", "") and "email" in reportData: email = reportData["email"] email = re.sub(r" at ", r"@", email) email = re.sub(r" dot ", r".", email) if re.match(r"^[\w.%+-]+@[\w.%+-]+(\.[\w.%+-]+)+", email): sendUpdateNotification( {"email": email, "url": get_config().get("reports", "urlRoot") + guid, "status": reportData["status"]} ) newURL = get_config().get("reports", "urlRoot") + guid newURL += "?updated=" + str(int(random.uniform(0, 10000))) newURL += "#secret=" + secret start_response("302 Found", [("Location", newURL.encode("utf-8"))]) return []
def handleRequest(environ, start_response): setupStderr(environ['wsgi.errors']) if environ['REQUEST_METHOD'].upper() != 'POST' or not environ.get('CONTENT_TYPE', '').startswith('application/x-www-form-urlencoded'): return showError('Unsupported request method', start_response) try: request_body_length = int(environ['CONTENT_LENGTH']) except: return showError('Invalid or missing Content-Length header', start_response) request_body = environ['wsgi.input'].read(request_body_length) params = {} for key, value in parse_qsl(request_body): params[key] = value.decode('utf-8') guid = params.get('guid', '').lower() if not re.match(r'^[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}$', guid): return showError('Invalid or missing report GUID', start_response) reportData = getReport(guid) if reportData == None: return showError('Report does not exist', start_response) secret = calculateReportSecret(guid) if params.get('secret', '') != secret and params.get('secret', '') != calculateReportSecret_compat(guid): return showError('Wrong secret value', start_response) reportData['status'] = params.get('status', '') if len(reportData['status']) > 1024: reportData['status'] = reportData['status'][:1024] oldusefulness = reportData.get('usefulness', '0') reportData['usefulness'] = params.get('usefulness', '0') if ('email' in reportData): updateUserUsefulness(getUserId(reportData['email']), reportData['usefulness'], oldusefulness) saveReport(guid, reportData) if params.get('notify', '') and 'email' in reportData: email = reportData['email'] email = re.sub(r' at ', r'@', email) email = re.sub(r' dot ', r'.', email) if re.match(r'^[\w.%+-]+@[\w.%+-]+(\.[\w.%+-]+)+', email): sendUpdateNotification({ 'email': email, 'url': get_config().get('reports', 'urlRoot') + guid, 'status': reportData['status'], }) newURL = get_config().get('reports', 'urlRoot') + guid newURL += '?updated=' + str(int(random.uniform(0, 10000))) newURL += '#secret=' + secret start_response('302 Found', [('Location', newURL.encode('utf-8'))]) return []
def _get_db(): database = get_config().get("crawler", "database") dbuser = get_config().get("crawler", "dbuser") dbpasswd = get_config().get("crawler", "dbpassword") if os.name == "nt": return MySQLdb.connect(user=dbuser, passwd=dbpasswd, db=database, use_unicode=True, charset="utf8", named_pipe=True) else: return MySQLdb.connect(user=dbuser, passwd=dbpasswd, db=database, use_unicode=True, charset="utf8")
def removeReport(guid): cursor = get_db().cursor() executeQuery(cursor, 'DELETE FROM #PFX#reports WHERE guid = %s', guid) get_db().commit() file = os.path.join(get_config().get('reports', 'dataPath'), guid[0], guid[1], guid[2], guid[3], guid + '.html') if os.path.isfile(file): os.remove(file) file = os.path.join(get_config().get('reports', 'dataPath'), guid[0], guid[1], guid[2], guid[3], guid + '.png') if os.path.isfile(file): os.remove(file)
def hook(ui, repo, node=None, **kwargs): ctx = repo[node] commit_identifiers = ctx.bookmarks() if not commit_identifiers or ctx.branch() != 'default': commit_identifiers.append(ctx.branch()) remote = [get_config().get('irchook', 'remote_command'), os.path.basename(repo.root), ','.join(commit_identifiers), str(ctx.user()), str(ctx), str(ctx.description())] remote = ' '.join(map(lambda s: pipes.quote(s), remote)) command = ['ssh', get_config().get('irchook', 'remote_host'), remote] subprocess.call(command)
def build(self): """ run the build command in the tempdir """ baseDir = os.path.join(self.config.nightliesDirectory, self.basename) if not os.path.exists(baseDir): os.makedirs(baseDir) outputFile = '%s-%s%s' % (self.basename, self.version, self.config.packageSuffix) self.path = os.path.join(baseDir, outputFile) self.updateURL = urlparse.urljoin(self.config.nightliesURL, self.basename + '/' + outputFile + '?update') if self.config.type == 'android': apkFile = open(self.path, 'wb') try: try: port = get_config().get('extensions', 'androidBuildPort') except ConfigParser.NoOptionError: port = '22' command = ['ssh', '-p', port, get_config().get('extensions', 'androidBuildHost')] command.extend(map(pipes.quote, [ '/home/android/bin/makedebugbuild.py', '--revision', self.buildNum, '--version', self.version, '--stdout' ])) subprocess.check_call(command, stdout=apkFile, close_fds=True) except: # clear broken output if any if os.path.exists(self.path): os.remove(self.path) raise else: env = os.environ spiderMonkeyBinary = self.config.spiderMonkeyBinary if spiderMonkeyBinary: env = dict(env, SPIDERMONKEY_BINARY=spiderMonkeyBinary) command = [os.path.join(self.tempdir, 'build.py'), '-t', self.config.type, 'build', '-b', self.buildNum] if self.config.type != 'gecko': command.extend(['-k', self.config.keyFile]) command.append(self.path) subprocess.check_call(command, env=env) if not os.path.exists(self.path): raise Exception("Build failed, output file hasn't been created") linkPath = os.path.join(baseDir, '00latest%s' % self.config.packageSuffix) if hasattr(os, 'symlink'): if os.path.exists(linkPath): os.remove(linkPath) os.symlink(os.path.basename(self.path), linkPath) else: shutil.copyfile(self.path, linkPath)
def uploadToMozillaAddons(self): import urllib3 header = { 'alg': 'HS256', # HMAC-SHA256 'typ': 'JWT', } issued = int(time.time()) payload = { 'iss': get_config().get('extensions', 'amo_key'), 'jti': random.random(), 'iat': issued, 'exp': issued + 60, } input = '{}.{}'.format( base64.b64encode(json.dumps(header)), base64.b64encode(json.dumps(payload)) ) signature = hmac.new(get_config().get('extensions', 'amo_secret'), msg=input, digestmod=hashlib.sha256).digest() token = '{}.{}'.format(input, base64.b64encode(signature)) upload_url = ('https://addons.mozilla.org/api/v3/addons/{}/' 'versions/{}/').format(self.extensionID, self.version) with open(self.path, 'rb') as file: data, content_type = urllib3.filepost.encode_multipart_formdata({ 'upload': ( os.path.basename(self.path), file.read(), 'application/x-xpinstall' ) }) request = urllib2.Request(upload_url, data=data) request.add_header('Content-Type', content_type) request.add_header('Authorization', 'JWT ' + token) request.get_method = lambda: 'PUT' try: urllib2.urlopen(request).close() except urllib2.HTTPError as e: try: logging.error(e.read()) finally: e.close() raise
def sitekey_frame(environ, start_response): template_path, template_file = os.path.split(get_config().get("testpages", "sitekeyFrameTemplate")) template = get_template(template_file, template_path=template_path) key = M2Crypto.EVP.load_key(get_config().get("testpages", "sitekeyPath")) key.sign_init() key.sign_update("\x00".join((request_path(environ), environ["HTTP_HOST"], environ["HTTP_USER_AGENT"]))) public_key = base64.b64encode(key.as_der()) signature = base64.b64encode(key.final()) start_response( "200 OK", [("Content-Type", "text/html; charset=utf-8"), ("X-Adblock-Key", "%s_%s" % (public_key, signature))] ) return [template.render({"public_key": public_key, "signature": signature}).encode("utf-8")]
def parse_source((mirror_name, server_type, log_file)): try: geo = pygeoip.GeoIP(get_config().get("stats", "geoip_db"), pygeoip.MEMORY_CACHE) geov6 = pygeoip.GeoIP(get_config().get("stats", "geoipv6_db"), pygeoip.MEMORY_CACHE) ignored = set() fileobj = open_stats_file(log_file) try: data = parse_fileobj(mirror_name, fileobj, geo, geov6, ignored) finally: fileobj.close() return server_type, log_file, data, ignored except: print >>sys.stderr, "Unable to process log file '%s'" % log_file traceback.print_exc() return None, None, None, None
def load_notifications(): repo = get_config().get('notifications', 'repository') command = ['hg', '-R', repo, 'archive', '-r', 'default', '-t', 'tar', '-p', '.', '-X', os.path.join(repo, '.hg_archival.txt'), '-'] data = subprocess.check_output(command) notifications = [] with tarfile.open(mode='r:', fileobj=StringIO(data)) as archive: for fileinfo in archive: name = fileinfo.name if name.startswith('./'): name = name[2:] if fileinfo.type == tarfile.REGTYPE: data = codecs.getreader('utf8')(archive.extractfile(fileinfo)) try: notification = _parse_notification(data, name) if not 'inactive' in notification: current_time = datetime.datetime.now() start = notification.pop('start', current_time) end = notification.pop('end', current_time) if not start <= current_time <= end: notification['inactive'] = True notifications.append(notification) except: traceback.print_exc() return notifications
def authenticate(f, environ, start_response, config_section): if 'HTTP_AUTHORIZATION' in environ: auth = environ['HTTP_AUTHORIZATION'].split() if len(auth) == 2: if auth[0].lower() == 'basic': username, password = base64.b64decode(auth[1]).split(':') config = get_config() expected_username = config.get(config_section, 'basic_auth_username') expected_password = config.get(config_section, 'basic_auth_password') if username == expected_username and password == expected_password: return f(environ, start_response) realm = get_config().get('DEFAULT', 'basic_auth_realm') start_response('401 UNAUTHORIZED', [('WWW-Authenticate', 'Basic realm="%s"' % realm)]) return ''
def submit_email(environ, start_response, data): email = data.get('email', '').strip() try: email = encode_email_address(email) except ValueError: return send_simple_response( start_response, 400, 'Please enter a valid email address.' ) config = get_config() params = [('email', email), ('signature', sign(config, email))] lang = data.get('lang') if lang: params.append(('lang', lang)) sendMail( config.get('submit_email', 'verification_email_template'), { 'recipient': email, 'verification_url': '%s?%s' % ( urljoin(wsgiref.util.application_uri(environ), VERIFICATION_PATH), urlencode(params) ) } ) return send_simple_response( start_response, 200, 'A confirmation email has been sent. Please check ' 'your email and click the confirmation link.' )
def executeQuery(cursor, query, args=None): tablePrefix = get_config().get('reports', 'dbprefix') query = re.sub(r'#PFX#', tablePrefix, query) cursor.execute('SET NAMES utf8mb4') cursor.execute('SET CHARACTER SET utf8mb4') cursor.execute('SET character_set_connection=utf8mb4') cursor.execute(query, args)
def authenticate(f, environ, start_response, config_section): if "HTTP_AUTHORIZATION" in environ: auth = environ["HTTP_AUTHORIZATION"].split() if len(auth) == 2: if auth[0].lower() == "basic": username, password = base64.b64decode(auth[1]).split(":") config = get_config() expected_username = config.get(config_section, "basic_auth_username") expected_password = config.get(config_section, "basic_auth_password") if username == expected_username and password == expected_password: return f(environ, start_response) realm = get_config().get("DEFAULT", "basic_auth_realm") start_response("401 UNAUTHORIZED", [("WWW-Authenticate", 'Basic realm="%s"' % realm)]) return ""
def handleRequest(environ, start_response): setupStderr(environ['wsgi.errors']) if not environ.get('HTTP_X_ADBLOCK_PLUS'): return showError('Please use Adblock Plus to submit crashes', start_response) if environ['REQUEST_METHOD'].upper() != 'POST' or not environ.get('CONTENT_TYPE', '').startswith('text/xml'): return showError('Unsupported request method', start_response) params = parse_qs(environ.get('QUERY_STRING', '')) requestVersion = params.get('version', ['0'])[0] if requestVersion != '1': return showError('Unsupported request version', start_response) try: request_body_size = int(environ.get('CONTENT_LENGTH', 0)) except (ValueError): return showError('No content', start_response) dir = get_config().get('crashes', 'dataPath') if not os.path.exists(dir): os.makedirs(dir) filename = None try: fd, filename = mkstemp('.xml.tmp', 'crash_', dir) file = os.fdopen(fd, 'wb') file.write(environ['wsgi.input'].read(request_body_size)) file.close() os.rename(filename, os.path.splitext(filename)[0]); except Exception, e: if filename != None and os.path.isfile(filename): os.remove(filename) raise e
def writeUpdateManifest(self): """ Writes update.rdf file for the current build """ baseDir = os.path.join(self.config.nightliesDirectory, self.basename) if self.config.type == 'safari': manifestPath = os.path.join(baseDir, 'updates.plist') templateName = 'safariUpdateManifest' elif self.config.type == 'android': manifestPath = os.path.join(baseDir, 'updates.xml') templateName = 'androidUpdateManifest' else: return if not os.path.exists(baseDir): os.makedirs(baseDir) # ABP for Android used to have its own update manifest format. We need to # generate both that and the new one in the libadblockplus format as long # as a significant amount of users is on an old version. if self.config.type == 'android': newManifestPath = os.path.join(baseDir, 'update.json') writeAndroidUpdateManifest(newManifestPath, [{ 'basename': self.basename, 'version': self.version, 'updateURL': self.updateURL }]) template = get_template(get_config().get('extensions', templateName)) template.stream({'extensions': [self]}).dump(manifestPath)
def handleRequest(environ, start_response): setupStderr(environ['wsgi.errors']) start_response('200 OK', [('Content-Type', 'text/plain; charset=utf-8')]) if environ['REQUEST_METHOD'].upper() != 'POST' or not environ.get('CONTENT_TYPE', '').startswith('application/x-www-form-urlencoded'): return 'Unsupported request method' try: request_body_length = int(environ['CONTENT_LENGTH']) except: return 'Invalid or missing Content-Length header' request_body = environ['wsgi.input'].read(request_body_length) params = {} for key, value in parse_qsl(request_body): params[key] = value.decode('utf-8').strip() if not 'name' in params or params['name'] == '': return 'No name entered' if not 'email' in params or params['email'] == '': return 'No email address entered' if not 'subject' in params or params['subject'] == '': return 'No subject entered' if not 'message' in params or params['message'] == '': return 'No message entered' if not re.match(r'^\w[\w.+!-]+@\w[\w.-]+\.[a-zA-Z]{2,6}$', params['email']): return 'Invalid email address' sendMail(get_config().get('formmail', 'template'), params) return 'Message sent'
def adblockbrowser_updates(environ, start_response): config = get_config() builds_dir = config.get('extensions', 'downloadsDirectory') builds_url = config.get('extensions', 'downloadsURL').rstrip('/') return _handle_request(environ, start_response, builds_dir, builds_url)
def updateIndex(self, versions): """ Updates index page listing all existing versions """ baseDir = os.path.join(self.config.nightliesDirectory, self.basename) if not os.path.exists(baseDir): os.makedirs(baseDir) outputFile = "index.html" outputPath = os.path.join(baseDir, outputFile) links = [] for version in versions: packageFile = self.basename + '-' + version + self.config.packageSuffix changelogFile = self.basename + '-' + version + '.changelog.xhtml' if not os.path.exists(os.path.join(baseDir, packageFile)): # Oops continue link = { 'version': version, 'download': packageFile, 'mtime': os.path.getmtime(os.path.join(baseDir, packageFile)), 'size': os.path.getsize(os.path.join(baseDir, packageFile)) } if os.path.exists(os.path.join(baseDir, changelogFile)): link['changelog'] = changelogFile links.append(link) template = get_template(get_config().get('extensions', 'nightlyIndexPage')) template.stream({'config': self.config, 'links': links}).dump(outputPath)
def verify_email(environ, start_response): config = get_config() params = dict(parse_qsl(environ.get('QUERY_STRING', ''))) try: filename = config.get('submit_email', params['product'] + '_filename') except (KeyError, ConfigParser.NoOptionError): return send_simple_response(start_response, 400, 'Unknown product') email = params.get('email', '') signature = params.get('signature', '') if sign(config, email) != signature: return send_simple_response( start_response, 403, 'Invalid signature in verification request.', ) with open(filename, 'ab', 0) as file: fcntl.lockf(file, fcntl.LOCK_EX) try: print >>file, email finally: fcntl.lockf(file, fcntl.LOCK_UN) location = config.get('submit_email', 'successful_verification_redirect_location') location = location.format(lang=quote(params.get('lang') or 'en', '')) start_response('303 See Other', [('Location', location)]) return []
def saveScreenshot(guid, screenshot): prefix = 'data:image/png;base64,' if not screenshot.startswith(prefix): raise TypeError('Screenshot is not a PNG image') data = base64.b64decode(screenshot[len(prefix):]) file = os.path.join(get_config().get('reports', 'dataPath'), guid[0], guid[1], guid[2], guid[3], guid + '.png') dir = os.path.dirname(file) if not os.path.exists(dir): os.makedirs(dir) f = open(file, 'wb') f.write(data) f.close() if get_config().has_option('reports', 'pngOptimizerPath'): cmd = get_config().get('reports', 'pngOptimizerPath').split() cmd.append(file) subprocess.call(cmd)
def handleRequest(environ, start_response): if not environ.get('HTTP_X_ADBLOCK_PLUS'): return showError('Please use Adblock Plus to submit reports', start_response) if environ['REQUEST_METHOD'].upper() != 'POST' or not environ.get('CONTENT_TYPE', '').startswith('text/xml'): return showError('Unsupported request method', start_response) params = parse_qs(environ.get('QUERY_STRING', '')) requestVersion = params.get('version', ['0'])[0] if requestVersion != '1': return showError('Unsupported request version', start_response) guid = params.get('guid', [''])[0].lower() if not re.match(r'^[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}$', guid): return showError('Invalid or missing GUID', start_response) path = os.path.join(get_config().get('reports', 'dataPath'), guid + '.xml') if os.path.exists(path) or os.path.exists(path + '.tmp'): return showError('Duplicate GUID', start_response) try: request_size = int(environ['CONTENT_LENGTH']) except (KeyError, ValueError): return showError('Invalid or missing Content-Length header', start_response, '411 Length Required') dir = os.path.dirname(path) if not os.path.exists(dir): os.makedirs(dir) try: file = open(path + '.tmp', 'wb') data = environ['wsgi.input'].read(request_size) file.write(data) file.close() knownIssues = knownIssuesParser.findMatches(data.splitlines(), params.get('lang', ['en-US'])[0]) os.rename(path + '.tmp', path) except Exception as e: if os.path.isfile(path + '.tmp'): os.remove(path + '.tmp') raise e template = get_template(get_config().get('reports', 'submitResponseTemplate')) start_response('200 OK', [('Content-Type', 'application/xhtml+xml; charset=utf-8')]) return [template.render({'url': get_config().get('reports', 'urlRoot') + guid, 'knownIssues': knownIssues}).encode('utf-8')]
def build(self): """ run the build command in the tempdir """ baseDir = os.path.join(self.config.nightliesDirectory, self.basename) if not os.path.exists(baseDir): os.makedirs(baseDir) outputFile = "%s-%s%s" % (self.basename, self.version, self.config.packageSuffix) outputPath = os.path.join(baseDir, outputFile) self.updateURL = urlparse.urljoin(self.config.nightliesURL, self.basename + '/' + outputFile + '?update') if self.config.type == 'android': apkFile = open(outputPath, 'wb') try: try: port = get_config().get('extensions', 'androidBuildPort') except ConfigParser.NoOptionError: port = '22' buildCommand = ['ssh', '-p', port, get_config().get('extensions', 'androidBuildHost')] buildCommand.extend(map(pipes.quote, ['/home/android/bin/makedebugbuild.py', '--revision', self.revision, '--version', self.version, '--stdout'])) subprocess.check_call(buildCommand, stdout=apkFile, close_fds=True) except: # clear broken output if any if os.path.exists(outputPath): os.remove(outputPath) raise elif self.config.type == 'chrome' or self.config.type == 'opera': import buildtools.packagerChrome as packager packager.createBuild(self.tempdir, type=self.config.type, outFile=outputPath, buildNum=self.revision, keyFile=self.config.keyFile, experimentalAPI=self.config.experimental) elif self.config.type == 'safari': import buildtools.packagerSafari as packager packager.createBuild(self.tempdir, type=self.config.type, outFile=outputPath, buildNum=self.revision, keyFile=self.config.keyFile) else: import buildtools.packagerGecko as packager packager.createBuild(self.tempdir, outFile=outputPath, buildNum=self.revision, keyFile=self.config.keyFile) if not os.path.exists(outputPath): raise Exception("Build failed, output file hasn't been created") linkPath = os.path.join(baseDir, '00latest%s' % self.config.packageSuffix) if hasattr(os, 'symlink'): if os.path.exists(linkPath): os.remove(linkPath) os.symlink(os.path.basename(outputPath), linkPath) else: shutil.copyfile(outputPath, linkPath)
def config(hg_dir, nightlydir): """Set and return config obj for NightlyBuild""" config = get_config() config.type = 'safari' config.revision = 'safari' config.repositoryName = 'adblockplusnightly' config.repository = nightlydir.strpath return config
merge_objects(existing, file_data, factor) dir = os.path.dirname(path) try: os.makedirs(dir) except OSError as e: if e.errno != errno.EEXIST: raise with codecs.open(path, 'wb', encoding='utf-8') as fileobj: json.dump(existing, fileobj, indent=2, sort_keys=True) def parse_source(factor, lock, (mirror_name, server_type, log_file)): try: geo = pygeoip.GeoIP(get_config().get('stats', 'geoip_db'), pygeoip.MEMORY_CACHE) geov6 = pygeoip.GeoIP(get_config().get('stats', 'geoipv6_db'), pygeoip.MEMORY_CACHE) ignored = set() fileobj = StatsFile(log_file) try: data = parse_fileobj(mirror_name, fileobj, geo, geov6, ignored) finally: fileobj.close() lock.acquire() try: save_stats(server_type, data, factor) finally:
def get_main_page_template(): return get_template_environment().get_template(get_config().get('stats', 'mainPageTemplate'))
def get_file_overview_template(): return get_template_environment().get_template(get_config().get('stats', 'fileOverviewTemplate'))
def changegroup_hook(ui, repo, node, **kwargs): config = get_config() first_rev = repo[node].rev() pushed_commits = repo[first_rev:] refs = _collect_references(ui, pushed_commits) _post_comments(ui, repo, config, refs)
merge_objects(existing, file_data, factor) dir = os.path.dirname(path) try: os.makedirs(dir) except OSError, e: if e.errno != errno.EEXIST: raise with codecs.open(path, "wb", encoding="utf-8") as fileobj: json.dump(existing, fileobj, indent=2, sort_keys=True) def parse_source(factor, lock, (mirror_name, server_type, log_file)): try: geo = pygeoip.GeoIP(get_config().get("stats", "geoip_db"), pygeoip.MEMORY_CACHE) geov6 = pygeoip.GeoIP(get_config().get("stats", "geoipv6_db"), pygeoip.MEMORY_CACHE) ignored = set() fileobj = StatsFile(log_file) try: data = parse_fileobj(mirror_name, fileobj, geo, geov6, ignored) finally: fileobj.close() lock.acquire() try: save_stats(server_type, data, factor) finally:
def get_config_items(): config = get_config() default_keys = set(config.defaults()) for name, value in config.items('formmail2'): if name not in default_keys: yield name, value
def saveReport(guid, reportData, isNew=False): cursor = get_db().cursor() screenshot = reportData.get('screenshot', None) if screenshot != None: reportData['hasscreenshot'] = 2 if reportData.get( 'screenshotEdited', False) else 1 try: saveScreenshot(guid, screenshot) except (TypeError, UnicodeEncodeError): reportData['hasscreenshot'] = 0 del reportData['screenshot'] knownIssues = len(reportData.get('knownIssues', [])) contact = getUserId(reportData.get('email', None)) if reportData.get( 'email', None) else None dumpstr = marshal.dumps(reportData) if contact != None and isNew: executeQuery( cursor, 'INSERT INTO #PFX#users (id, reports) VALUES (%s, 1) ON DUPLICATE KEY UPDATE reports = reports + 1', contact) executeQuery( cursor, '''INSERT INTO #PFX#reports (guid, type, ctime, site, comment, status, contact, hasscreenshot, knownissues, dump) VALUES (%(guid)s, %(type)s, FROM_UNIXTIME(%(ctime)s), %(site)s, %(comment)s, %(status)s, %(contact)s, %(hasscreenshot)s, %(knownissues)s, _binary %(dump)s) ON DUPLICATE KEY UPDATE type = %(type)s, site = %(site)s, comment = %(comment)s, status = %(status)s, hasscreenshot = %(hasscreenshot)s, knownissues = %(knownissues)s, dump = _binary %(dump)s''', { 'guid': guid, 'type': reportData.get('type', None), 'ctime': reportData['time'], 'site': reportData.get('siteName', None), 'comment': reportData.get('comment', None), 'status': reportData.get('status', None), 'contact': contact, 'hasscreenshot': reportData.get('hasscreenshot', 0), 'knownissues': knownIssues, 'dump': dumpstr }) if len(reportData['subscriptions']) > 0: for sn in reportData['subscriptions']: executeQuery(cursor, 'SELECT id FROM #PFX#subscriptions WHERE url = %s', sn['id']) id = cursor.fetchone() if id != None: def filterMatch(f): return any(u == sn['id'] for u in f.get('subscriptions', [])) hasMatches = any( filterMatch(f) for f in reportData.get('filters', [])) executeQuery( cursor, 'INSERT IGNORE INTO #PFX#sublists (report, list, hasmatches) VALUES (%s, %s, %s)', (guid, id[0], hasMatches)) get_db().commit() reportData['guid'] = guid if contact: # TODO: The mail anonymization should happen in the template, not here origEmail = reportData['email'] email = reportData['email'] email = re.sub(r' at ', r'@', email) email = re.sub(r' dot ', r'.', email) reportData['email'] = anonymizeMail(email) reportData['uid'] = contact file = os.path.join(get_config().get('reports', 'dataPath'), guid[0], guid[1], guid[2], guid[3], guid + '.html') dir = os.path.dirname(file) if not os.path.exists(dir): os.makedirs(dir) template = get_template(get_config().get('reports', 'webTemplate')) template.stream(reportData).dump(file, encoding='utf-8') if contact: reportData['email'] = origEmail
def getUserId(email): return hmac.new(get_config().get('reports', 'secret'), email.encode('utf-8')).hexdigest()
def calculateReportSecret_compat(guid): hash = hashlib.md5() hash.update(get_config().get('reports', 'secret')) hash.update(guid) return hash.hexdigest()
def calculateReportSecret(guid): return hmac.new(get_config().get('reports', 'secret'), guid).hexdigest()
def sendUpdateNotification(templateData): sendMail(get_config().get('reports', 'notificationTemplate'), templateData)
def mailDigest(templateData): sendMail(get_config().get('reports', 'mailDigestTemplate'), templateData)
request_size= int(environ['CONTENT_LENGTH']) except (KeyError, ValueError): return showError('Invalid or missing Content-Length header', start_response, '411 Length Required') dir = os.path.dirname(path) if not os.path.exists(dir): os.makedirs(dir) try: file = open(path + '.tmp', 'wb') data = environ['wsgi.input'].read(request_size) file.write(data) file.close() knownIssues = knownIssuesParser.findMatches(data.splitlines(), params.get('lang', ['en-US'])[0]) os.rename(path + '.tmp', path); except Exception, e: if os.path.isfile(path + '.tmp'): os.remove(path + '.tmp') raise e template = get_template(get_config().get('reports', 'submitResponseTemplate')) start_response('200 OK', [('Content-Type', 'application/xhtml+xml; charset=utf-8')]) return [template.render({'url': get_config().get('reports', 'urlRoot') + guid, 'knownIssues': knownIssues}).encode('utf-8')] def showError(message, start_response, response_code='400 Processing Error'): template = get_template(get_config().get('reports', 'errorTemplate')) start_response(response_code, [('Content-Type', 'application/xhtml+xml; charset=utf-8')]) return [template.render({'message': message}).encode('utf-8')]
def generate_pages(datadir, outputdir): for server_type, server_type_dir in get_names(datadir, True): baseURL = get_config().get('stats', 'baseURL_' + server_type) filedata = {} current_month = None for month, month_dir in get_names(server_type_dir, True): if current_month == None or month > current_month: current_month = month for filename, path in get_names(month_dir, False): filename = re.sub(r'\.json$', '', filename) with codecs.open(path, 'rb', encoding='utf-8') as file: data = json.load(file) overview_url = '../../overview-' + common.filename_encode(filename + '.html') filtered_urls = {} for field in common.fields: if field['name'] not in data: continue # Create filtered views for the first thirty values of a field if they # have filtered data. sorted_field = get_template_environment().filters['sortfield'](data[field['name']], field) for name, value in sorted_field[0:get_default_count(field)]: if filter(lambda k: k not in ('hits', 'bandwidth'), value.iterkeys()): outputfile = os.path.join(outputdir, common.filename_encode(server_type), common.filename_encode(month), common.filename_encode(filename), 'filtered-%s-%s.html' % ( common.filename_encode(field['name']), common.filename_encode(name), )) generate_file_stats(outputfile, month, baseURL + filename, overview_url, value, filter={'field': field, 'value': name}) if not field['name'] in filtered_urls: filtered_urls[field['name']] = {} filtered_urls[field['name']][name] = os.path.basename(outputfile) outputfile = os.path.join(outputdir, common.filename_encode(server_type), common.filename_encode(month), common.filename_encode(filename), 'index.html') generate_file_stats(outputfile, month, baseURL + filename, overview_url, data, filtered_urls=filtered_urls) if filename not in filedata: filedata[filename] = {} month_url = '%s/%s/%s' % (common.filename_encode(month), common.filename_encode(filename), 'index.html') filedata[filename][month] = {'url': month_url, 'hits': data['hits'], 'bandwidth': data['bandwidth']} monthdata = {} for filename, data in filedata.iteritems(): outputfile = os.path.join(outputdir, common.filename_encode(server_type), 'overview-' + common.filename_encode(filename + '.html')) generate_file_overview(outputfile, baseURL + filename, data) if current_month in data: monthdata[filename] = dict(data[current_month]) outputfile = os.path.join(outputdir, common.filename_encode(server_type), 'index.html') generate_main_page(outputfile, current_month, baseURL, monthdata)
url = getattr(subscription, key) if url != None: site = urlparse(url).netloc s['links'].append({ 'url': url, 'title': key[0].upper() + key[1:], 'result': urls[url], 'siteResult': site in sites and sites[site], }) for (title, url, complete) in subscription.variants: site = urlparse(url).netloc s['links'].append({ 'url': url, 'title': title, 'result': urls[url], 'siteResult': site in sites and sites[site], }) return result if __name__ == '__main__': setupStderr() subscriptions = checkSubscriptions() outputFile = get_config().get('subscriptions', 'statusPage') template = get_template(get_config().get('subscriptions', 'statusTemplate')) template.stream({ 'subscriptions': subscriptions }).dump(outputFile, encoding='utf-8')
def download_from_mozilla_addons(self, buildtype, version, app_id): config = get_config() iss = config.get('extensions', 'amo_key') secret = config.get('extensions', 'amo_secret') url = ('https://addons.mozilla.org/api/v3/addons/{}/' 'versions/{}/').format(app_id, version) request = self.generate_mozilla_jwt_request( iss, secret, url, 'GET', ) response = json.load(urllib2.urlopen(request)) filename = '{}-{}.xpi'.format(self.basename, version) self.path = os.path.join( config.get('extensions', 'nightliesDirectory'), self.basename, filename, ) necessary = ['passed_review', 'reviewed', 'processed', 'valid'] if all(response[x] for x in necessary): download_url = response['files'][0]['download_url'] checksum = response['files'][0]['hash'] request = self.generate_mozilla_jwt_request( iss, secret, download_url, 'GET', ) try: response = urllib2.urlopen(request) except urllib2.HTTPError as e: logging.error(e.read()) # Verify the extension's integrity file_content = response.read() sha256 = hashlib.sha256(file_content) returned_checksum = '{}:{}'.format(sha256.name, sha256.hexdigest()) if returned_checksum != checksum: logging.error('Checksum could not be verified: {} vs {}' ''.format(checksum, returned_checksum)) with open(self.path, 'w') as fp: fp.write(file_content) self.update_link = os.path.join( config.get('extensions', 'nightliesURL'), self.basename, filename, ) self.remove_from_downloads_lockfile(self.config.type, 'version', version) elif not response['passed_review'] or not response['valid']: # When the review failed for any reason, we want to know about it logging.error(json.dumps(response, indent=4)) self.remove_from_downloads_lockfile(self.config.type, 'version', version)
'index.html') generate_file_stats(outputfile, month, baseURL + filename, overview_url, data, filtered_urls=filtered_urls) if filename not in filedata: filedata[filename] = {} month_url = '%s/%s/%s' % (common.filename_encode(month), common.filename_encode(filename), 'index.html') filedata[filename][month] = {'url': month_url, 'hits': data['hits'], 'bandwidth': data['bandwidth']} monthdata = {} for filename, data in filedata.iteritems(): outputfile = os.path.join(outputdir, common.filename_encode(server_type), 'overview-' + common.filename_encode(filename + '.html')) generate_file_overview(outputfile, baseURL + filename, data) if current_month in data: monthdata[filename] = dict(data[current_month]) outputfile = os.path.join(outputdir, common.filename_encode(server_type), 'index.html') generate_main_page(outputfile, current_month, baseURL, monthdata) if __name__ == '__main__': setupStderr() datadir = get_config().get('stats', 'dataDirectory') outputdir = get_config().get('stats', 'outputDirectory') generate_pages(datadir, outputdir)
def showError(message, start_response): template = get_template(get_config().get('reports', 'errorTemplate')) start_response('400 Processing Error', [('Content-Type', 'application/xhtml+xml; charset=utf-8')]) return [template.render({'message': message}).encode('utf-8')]
def build(self): """ run the build command in the tempdir """ baseDir = os.path.join(self.config.nightliesDirectory, self.basename) if not os.path.exists(baseDir): os.makedirs(baseDir) outputFile = "%s-%s%s" % (self.basename, self.version, self.config.packageSuffix) self.path = os.path.join(baseDir, outputFile) self.updateURL = urlparse.urljoin( self.config.nightliesURL, self.basename + '/' + outputFile + '?update') if self.config.type == 'android': apkFile = open(self.path, 'wb') try: try: port = get_config().get('extensions', 'androidBuildPort') except ConfigParser.NoOptionError: port = '22' buildCommand = [ 'ssh', '-p', port, get_config().get('extensions', 'androidBuildHost') ] buildCommand.extend( map(pipes.quote, [ '/home/android/bin/makedebugbuild.py', '--revision', self.revision, '--version', self.version, '--stdout' ])) subprocess.check_call(buildCommand, stdout=apkFile, close_fds=True) except: # clear broken output if any if os.path.exists(self.path): os.remove(self.path) raise elif self.config.type == 'chrome': import buildtools.packagerChrome as packager packager.createBuild(self.tempdir, type=self.config.type, outFile=self.path, buildNum=self.revision, keyFile=self.config.keyFile) elif self.config.type == 'safari': import buildtools.packagerSafari as packager packager.createBuild(self.tempdir, type=self.config.type, outFile=self.path, buildNum=self.revision, keyFile=self.config.keyFile) else: import buildtools.packagerGecko as packager packager.createBuild(self.tempdir, outFile=self.path, buildNum=self.revision, keyFile=self.config.keyFile) if not os.path.exists(self.path): raise Exception("Build failed, output file hasn't been created") linkPath = os.path.join(baseDir, '00latest%s' % self.config.packageSuffix) if hasattr(os, 'symlink'): if os.path.exists(linkPath): os.remove(linkPath) os.symlink(os.path.basename(self.path), linkPath) else: shutil.copyfile(self.path, linkPath)
def executeQuery(cursor, query, args=None): tablePrefix = get_config().get('reports', 'dbprefix') query = re.sub(r'#PFX#', tablePrefix, query) cursor.execute(query, args)
data = json.load(f) # Keep track of the current log file in global variable in case we need to # identify it later if there's a problem. (This works because the files are # processed lazily.) _last_log_file = log_file except IOError: sys.exit("Could not read log file %s" % log_file) return data if __name__ == "__main__": if not len(sys.argv) == 2: print "Usage: python -m sitescripts.filterhits.bin.reprocess_logs /path/to/logs" sys.exit(1) interval = get_config().get("filterhitstats", "interval") def read_update(f): return geometrical_mean.update(interval, read_data(f)) if sys.argv[1].endswith(".log"): sql = read_update(sys.argv[1]) else: sql = itertools.chain.from_iterable( itertools.imap(read_update, log_files(sys.argv[1]))) db_connection = db.connect() try: db.write(db_connection, sql) except:
if __name__ == '__main__': setupStderr() if len(sys.argv) < 2: raise Exception('No interval specified') interval = sys.argv[1] if not (interval in ['all', 'week', 'day']): raise Exception('Invalid interval') if interval == 'week' and len(sys.argv) < 3: raise Exception('No weekday specified') weekDay = int(sys.argv[2]) if interval == 'week' else -1 currentTime = time() startTime = 0 if interval == 'week': startTime = currentTime - 7 * 24 * 60 * 60 elif interval == 'day': startTime = currentTime - 24 * 60 * 60 fakeSubscription = { 'url': 'https://fake.adblockplus.org', 'name': get_config().get('reports', 'defaultSubscriptionName'), 'email': get_config().get('reports', 'defaultSubscriptionRecipient') } subscriptions, subscriptionList = loadSubscriptions() subscriptionList.append(fakeSubscription) reports = scanReports() sendNotifications(reports)
recipients.add(defemail) emails[defemail].append(report) # Generate new digests digests = set() for email, reports in emails.iteritems(): if len(reports) == 0: continue file = getDigestPath(dir, email) template = get_template(get_config().get('reports', 'htmlDigestTemplate')) template.stream({'email': email, 'reports': reports}).dump(file, encoding='utf-8') digests.add(file) # Remove not updated digests which are more then 2 weeks old for filename in os.listdir(dir): file = os.path.join(dir, filename) if os.path.isfile(file) and file not in digests and re.match(r'^[\da-f]{32}\.html$', filename) and os.stat(file).st_mtime < currentTime - 14*24*60*60: os.remove(file) def getSubscriptionInfo(subscription): sub = { 'name': subscription.name, 'type': subscription.type } return sub if __name__ == '__main__': setupStderr() currentTime = time() updateDigests(get_config().get('reports', 'digestPath'))
def get_file_stats_template(): return get_template_environment().get_template(get_config().get('stats', 'filePageTemplate'))
except UnicodeDecodeError: return send_simple_response(start_response, 400, 'Invalid form data encoding') return func(environ, start_response, data) return wrapper def multiplex(environ, start_response): try: path = environ['PATH_INFO'] try: handler = handlers[path] except KeyError: handler = handlers[re.sub(r'[^/]+$', '', path)] except KeyError: start_response('404 Not Found', [('Content-Type', 'text/plain')]) return ['Not Found'] return handler(environ, start_response) for module in set(get_config().options('multiplexer')) - set( get_config().defaults()): module_path = get_config().get('multiplexer', module) if module_path: imp.load_source(module, module_path) else: importlib.import_module(module)
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. import codecs import json import time from sitescripts.notifications.parser import load_notifications from sitescripts.utils import get_config, setupStderr def generate_notifications(path): notifications = load_notifications() # Ignoring notifications with variants here - we can only process those in a # URL handler. notifications = [x for x in notifications if "variants" in x] output = { "notifications": notifications, "version": time.strftime("%Y%m%d%H%M", time.gmtime()) } with codecs.open(path, "wb", encoding="utf-8") as file: json.dump(output, file, ensure_ascii=False, indent=2, separators=(',', ': '), sort_keys=True) if __name__ == "__main__": setupStderr() output = get_config().get("notifications", "output") generate_notifications(output)
def run(self): """ Run the nightly build process for one extension """ try: if self.config.type == 'ie': # We cannot build IE builds, simply list the builds already in # the directory. Basename has to be deduced from the repository name. self.basename = os.path.basename(self.config.repository) else: # copy the repository into a temporary directory self.copyRepository() self.buildNum = self.getCurrentBuild() # get meta data from the repository if self.config.type == 'android': self.readAndroidMetadata() elif self.config.type == 'chrome': self.readChromeMetadata() elif self.config.type == 'safari': self.readSafariMetadata() elif self.config.type in {'gecko', 'gecko-webext'}: self.readGeckoMetadata() elif self.config.type == 'edge': self.read_edge_metadata() else: raise Exception('Unknown build type {}' % self.config.type) # create development build self.build() # write out changelog self.writeChangelog(self.getChanges()) # write update manifest self.writeUpdateManifest() # retire old builds versions = self.retireBuilds() if self.config.type == 'ie': self.writeIEUpdateManifest(versions) # update index page self.updateIndex(versions) # update nightlies config self.config.latestRevision = self.revision if (self.config.type in {'gecko', 'gecko-webext'} and self.config.galleryID and get_config().has_option('extensions', 'amo_key')): self.uploadToMozillaAddons() elif self.config.type == 'chrome' and self.config.clientID and self.config.clientSecret and self.config.refreshToken: self.uploadToChromeWebStore() elif self.config.type == 'edge' and self.config.clientID and self.config.clientSecret and self.config.refreshToken and self.config.tenantID: self.upload_to_windows_store() finally: # clean up if self.tempdir: shutil.rmtree(self.tempdir, ignore_errors=True)
result[name] = { 'source': None, 'target': None, 'user': None, 'group': None, 'postsync': None, 'ignore': [] } if isinstance(result[name][setting], list): result[name][setting] = get_config().get('filesync', option).split(' ') else: result[name][setting] = get_config().get('filesync', option) return result if __name__ == '__main__': setupStderr() syncState = ConfigParser.SafeConfigParser() syncStateFile = get_config().get('filesync', 'syncData') if os.path.exists(syncStateFile): syncState.read(syncStateFile) settings = readSyncSettings() for name, value in settings.iteritems(): syncFiles(name, value, syncState) file = open(syncStateFile, 'wb') syncState.write(file)
return hmac.new(get_config().get('reports', 'secret'), email.encode('utf-8')).hexdigest() def getDigestId(email): hash = hashlib.md5() hash.update(email.encode('utf-8')) return hash.hexdigest() def getDigestPath(dir, email): return os.path.join(dir, getDigestId(email) + '.html') def getDigestSecret(id, (year, week, weekday)): mac = hmac.new(get_config().get('reports', 'secret'), id) mac.update(str(year)) mac.update(str(week)) return mac.hexdigest() def getDigestSecret_compat(id, (year, week, weekday)): hash = hashlib.md5() hash.update(get_config().get('reports', 'secret')) hash.update(id) hash.update(str(year)) hash.update(str(week)) return hash.hexdigest() @cached(600)
def handleRequest(environ, start_response): setupStderr(environ['wsgi.errors']) if environ['REQUEST_METHOD'].upper() != 'POST' or not environ.get( 'CONTENT_TYPE', '').startswith('application/x-www-form-urlencoded'): return showError('Unsupported request method', start_response) try: request_body_length = int(environ['CONTENT_LENGTH']) except: return showError('Invalid or missing Content-Length header', start_response) request_body = environ['wsgi.input'].read(request_body_length) params = {} for key, value in parse_qsl(request_body): params[key] = value.decode('utf-8') guid = params.get('guid', '').lower() if not re.match( r'^[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}$', guid): return showError('Invalid or missing report GUID', start_response) reportData = getReport(guid) if reportData == None: return showError('Report does not exist', start_response) secret = calculateReportSecret(guid) if params.get('secret', '') != secret and params.get( 'secret', '') != calculateReportSecret_compat(guid): return showError('Wrong secret value', start_response) reportData['status'] = params.get('status', '') if len(reportData['status']) > 1024: reportData['status'] = reportData['status'][:1024] oldusefulness = reportData.get('usefulness', '0') reportData['usefulness'] = params.get('usefulness', '0') if ('email' in reportData): updateUserUsefulness(getUserId(reportData['email']), reportData['usefulness'], oldusefulness) saveReport(guid, reportData) if params.get('notify', '') and 'email' in reportData: email = reportData['email'] email = re.sub(r' at ', r'@', email) email = re.sub(r' dot ', r'.', email) if re.match(r'^[\w.%+-]+@[\w.%+-]+(\.[\w.%+-]+)+', email): sendUpdateNotification({ 'email': email, 'url': get_config().get('reports', 'urlRoot') + guid, 'status': reportData['status'], }) newURL = get_config().get('reports', 'urlRoot') + guid newURL += '?updated=' + str(int(random.uniform(0, 10000))) newURL += '#secret=' + secret start_response('302 Found', [('Location', newURL.encode('utf-8'))]) return []