예제 #1
0
    def respond_to_client_hello(self, message):
        """ Establishing new connection to id_b, send a 128 bit response consisting of
        8 bytes challenge, and a H_k(id_a, id_b, R_a) truncated to 8 bytes.
        """
        # Verify that incoming packet has correct length
        if not len(message) == 18:
            _logger.info('Wrong length of client hello')
            return

        # Verify incoming MAC
        expected_mac = handshake_mac(self.shared_key, message[:-8])
        if not constant_time_compare(message[-8:], expected_mac):
            _logger.info('Incorrect mac for client hello')
            return

        # Check that version is supported
        client_version = decode_version(message[1:2])
        if not client_version == self.version:
            # reply with supported version, and copy of client's message
            _logger.info('Unsupported version of client hello')
            msg = (six.int2byte(Message.version_not_supported) +
                encode_version(self.version) +
                message[2:10])
            mac = handshake_mac(self.shared_key, msg)
            self._send(msg + mac)
            return

        self.R_a = rng(8)
        self.R_b = message[2:10]

        msg = six.int2byte(Message.server_hello) + self.R_a
        mac = handshake_mac(self.shared_key, msg, self.R_b)
        self.state = ServerState.wait_for_sa_proposal
        self._send(msg + mac)
예제 #2
0
파일: models.py 프로젝트: streamr/marvin
    def verify_auth_data(self, auth_data):
        """ Verify that an auth_data is valid for this user.

        In pracice this means checking that the password in the given data is still valid
        and has not expired yet.
        """
        return constant_time_compare(auth_data['p'].encode('ascii'), self.password_hash[-10:].encode('ascii'))
예제 #3
0
    def respond_to_client_hello(self, message):
        """ Establishing new connection to id_b, send a 128 bit response consisting of
        8 bytes challenge, and a H_k(id_a, id_b, R_a) truncated to 8 bytes.
        """
        # Verify that incoming packet has correct length
        if not len(message) == 18:
            _logger.info('Wrong length of client hello')
            return

        # Verify incoming MAC
        expected_mac = handshake_mac(self.shared_key, message[:-8])
        if not constant_time_compare(message[-8:], expected_mac):
            _logger.info('Incorrect mac for client hello')
            return

        # Check that version is supported
        client_version = decode_version(message[1:2])
        if not client_version == self.version:
            # reply with supported version, and copy of client's message
            _logger.info('Unsupported version of client hello')
            msg = (six.int2byte(Message.version_not_supported) +
                   encode_version(self.version) + message[2:10])
            mac = handshake_mac(self.shared_key, msg)
            self._send(msg + mac)
            return

        self.R_a = rng(8)
        self.R_b = message[2:10]

        msg = six.int2byte(Message.server_hello) + self.R_a
        mac = handshake_mac(self.shared_key, msg, self.R_b)
        self.state = ServerState.wait_for_sa_proposal
        self._send(msg + mac)
예제 #4
0
파일: views.py 프로젝트: Alex-Zeng/ZDHBlog
def load_token(token):
    # 通过loads()方法来解析浏览器发送过来的token,从而进行初步的验证
    key = current_app.config.get("SECRET_KEY", "The securet key by C~C!")

    try:
        s = URLSafeSerializer(key)
        id, name, password, browser_id, life_time = s.loads(token)
    except BadData:
        print("token had been modified!")
        return None

    # 判断浏览器信息是否改变
    bi = create_browser_id()
    if not constant_time_compare(str(bi), str(browser_id)):
        print("the user environment had changed, so token has been expired!")
        return None

    # 校验密码
    user = User.query.get(id)
    if user:
        # 能loads出id,name等信息,说明已经成功登录过,那么cache中就应该有token的缓存
        token_cache = simple_cache.get(token)
        if not token_cache:  # 此处找不到有2个原因:1.cache中因为超时失效(属于正常情况);2.cache机制出错(属于异常情况)。
            print("the token is not found in cache.")
            return None
        if str(password) != str(user.password):
            print("the password in token is not matched!")
            simple_cache.delete(token)
            return None
        else:
            simple_cache.set(token, 1, timeout=life_time)  # 刷新超时时长
    else:
        print('the user is not found, the token is invalid!')
        return None
    return user
예제 #5
0
def download(slug_candidate):
    global filename, filesize, q, download_count
    global REQUEST_DOWNLOAD, REQUEST_PROGRESS

    if not constant_time_compare(slug, slug_candidate):
        abort(404)

    # each download has a unique id
    download_id = download_count
    download_count += 1

    # prepare some variables to use inside generate() function below
    # which is outsie of the request context
    shutdown_func = request.environ.get('werkzeug.server.shutdown')
    path = request.path

    # tell GUI the download started
    add_request(REQUEST_DOWNLOAD, path, { 'id':download_id })

    dirname = os.path.dirname(filename)
    basename = os.path.basename(filename)

    def generate():
        chunk_size = 102400 # 100kb

        fp = open(filename, 'rb')
        done = False
        while not done:
            chunk = fp.read(102400)
            if chunk == '':
                done = True
            else:
                yield chunk

                # tell GUI the progress
                downloaded_bytes = fp.tell()
                percent = round((1.0 * downloaded_bytes / filesize) * 100, 2);
                sys.stdout.write("\r{0}, {1}%          ".format(human_readable_filesize(downloaded_bytes), percent))
                sys.stdout.flush()
                add_request(REQUEST_PROGRESS, path, { 'id':download_id, 'bytes':downloaded_bytes })

        fp.close()
        sys.stdout.write("\n")

        # download is finished, close the server
        global stay_open
        if not stay_open:
            print "Closing automatically because download finished"
            if shutdown_func is None:
                raise RuntimeError('Not running with the Werkzeug Server')
            shutdown_func()

    r = Response(generate())
    r.headers.add('Content-Length', filesize)
    r.headers.add('Content-Disposition', 'attachment', filename=basename)
    return r
예제 #6
0
def load_token(token):
    try:
        max_age = app.config['REMEMBER_COOKIE_DURATION'].total_seconds()
        user_id, hash_a = login_serializer.loads(token, max_age=max_age)
    except BadData:
        return None
    user = User.query.get(user_id)
    if user is not None:
        hash_a = hash_a.encode('utf-8')
        hash_b = sha1(user.password).hexdigest()
        if constant_time_compare(hash_a, hash_b):
            return user
    return None
예제 #7
0
파일: csrf.py 프로젝트: quru/qis
    def is_csrf_token_bad(token, csrf_secret):
        try:
            jsw = JSONWebSignatureSerializer(app.secret_key)
            tobj = jsw.loads(token)

            nonce_int = bytes_to_int(b64decode(_str_to_bytes(tobj["n"])))
            key_int = bytes_to_int(b64decode(_str_to_bytes(tobj["k"])))

            user_secret = int_to_bytes(nonce_int ^ key_int)

            return not constant_time_compare(
                user_secret,
                csrf_secret
            )
        except Exception:
            return True
예제 #8
0
파일: csrf.py 프로젝트: quru/flask-csrf
    def is_csrf_token_bad(token, csrf_secret):
        try:
            jsw = JSONWebSignatureSerializer(app.secret_key)
            tobj = jsw.loads(token)

            nonce_int = bytes_to_int(b64decode(tobj["n"]))
            key_int = bytes_to_int(b64decode(tobj["k"]))

            user_secret = int_to_bytes(nonce_int ^ key_int)

            return not constant_time_compare(
                user_secret,
                csrf_secret
            )
        except Exception:
            return True
예제 #9
0
    def respond_to_rekey_completed(self, data):
        # Verify length
        if len(data) != 1 + self._mac_length:
            _logger.info('Invalid length of rekey completed, was %d', len(data))
            return

        # Verify MAC
        expected_mac = self.get_mac(data[:-self._mac_length], key=self.new_master_key)
        if not constant_time_compare(data[-self._mac_length:], expected_mac):
            _logger.info('Invalid MAC on rekey completed')
            return

        self.channel.shared_key = self.new_master_key
        self.state = ClientState.terminated
        del self.new_master_key
        del self.pkey
예제 #10
0
def index(slug_candidate):
    global filename, filesize, filehash, slug, strings, REQUEST_LOAD, onionshare_dir

    if not constant_time_compare(slug, slug_candidate):
        abort(404)

    add_request(REQUEST_LOAD, request.path)
    return render_template_string(
        open('{0}/index.html'.format(onionshare_dir)).read(),
        slug=slug,
        filename=os.path.basename(filename),
        filehash=filehash,
        filesize=filesize,
        filesize_human=human_readable_filesize(filesize),
        strings=strings
    )
예제 #11
0
    def respond_to_server_hello(self, data):
        # Verify length
        if not len(data) == 17:
            _logger.info('Invalid length of SERVER_HELLO')
            return

        # Verify MAC
        expected_mac = handshake_mac(self.shared_key, data[:-8], self.R_b)
        if not constant_time_compare(data[-8:], expected_mac):
            _logger.info('Invalid mac on SERVER_HELLO')
            return

        self.R_a = data[1:-8]
        sa_msg = six.int2byte(Message.sa_proposal)
        self.generate_and_set_session_key()
        sa_mac = handshake_mac(self.shared_key, sa_msg, self.R_a)
        self._send(sa_msg + sa_mac)
        self.state = ClientState.wait_for_sa
예제 #12
0
    def respond_to_server_hello(self, data):
        # Verify length
        if not len(data) == 17:
            _logger.info('Invalid length of SERVER_HELLO')
            return

        # Verify MAC
        expected_mac = handshake_mac(self.shared_key, data[:-8], self.R_b)
        if not constant_time_compare(data[-8:], expected_mac):
            _logger.info('Invalid mac on SERVER_HELLO')
            return

        self.R_a = data[1:-8]
        sa_msg = six.int2byte(Message.sa_proposal)
        self.generate_and_set_session_key()
        sa_mac = handshake_mac(self.shared_key, sa_msg, self.R_a)
        self._send(sa_msg + sa_mac)
        self.state = ClientState.wait_for_sa
예제 #13
0
    def respond_to_rekey_completed(self, data):
        # Verify length
        if len(data) != 1 + self._mac_length:
            _logger.info('Invalid length of rekey completed, was %d',
                         len(data))
            return

        # Verify MAC
        expected_mac = self.get_mac(data[:-self._mac_length],
                                    key=self.new_master_key)
        if not constant_time_compare(data[-self._mac_length:], expected_mac):
            _logger.info('Invalid MAC on rekey completed')
            return

        self.channel.shared_key = self.new_master_key
        self.state = ClientState.terminated
        del self.new_master_key
        del self.pkey
예제 #14
0
    def respond_to_rekey_confirm(self, message):
        # Verify length
        if not len(message) == self._mac_length + 1:
            _logger.info('Invalid length of rekey confirm')
            return

        # Verify MAC
        msg, sent_mac = message[:-self._mac_length], message[-self._mac_length:]
        expected_mac = self.get_mac(msg, key=self.new_master_key)
        if not constant_time_compare(sent_mac, expected_mac):
            _logger.info('Invalid MAC of rekey confirm')
            return

        # Update shared_key
        self.shared_key = self.new_master_key
        msg = six.int2byte(Message.rekey_completed)
        self.state = ServerState.rekey_confirmed
        full_msg = msg + self.get_mac(msg, key=self.shared_key)
        self._send(full_msg)
        del self.new_master_key
        del self.pkey
예제 #15
0
파일: security.py 프로젝트: streamr/marvin
def is_correct_pw(password, password_hash):
    """ Check that the given password hashes to the given password_hash.

    :param password: The password to test
    :param password_hash: A method$salt$hash string we can extract the method and params used in the
        original hashing from.
    """
    # pylint: disable=invalid-name
    method, salt, hashed_pw = password_hash.split('$')
    # For now we only support scrypt hashing, so we can ignore the algorithm,
    # and we know the number of params. This should be rewritten if we want to
    # support other algorithms or variable length params
    (_, N, p, r) = method.split(':')
    password_bytes = password.encode('utf-8')
    salt_bytes = salt.encode('utf-8')
    (N, p, r) = (int(N), int(p), int(r))
    hashed_bytes = scrypt.hash(password_bytes, salt_bytes, N, p, r)
    b64hash = base64.b64encode(hashed_bytes)
    b64hash_bytes = force_bytes(b64hash)
    hashed_pw_bytes = force_bytes(hashed_pw)
    return constant_time_compare(hashed_pw_bytes, b64hash_bytes)
예제 #16
0
    def handle_webhook(self):
        """
        https://clearbit.com/docs?python#webhooks
        """

        request_signature = request.headers.get('x-request-signature')
        if request_signature is None:
            abort(BAD_REQUEST)

        algorithm, signature = request_signature.split('=')
        if not all((algorithm == 'sha1', signature)):
            abort(BAD_REQUEST)

        digest = hmac.new(self.api_key.encode(), request.data,
                          hashlib.sha1).hexdigest()
        if not itsdangerous.constant_time_compare(digest, signature):
            abort(BAD_REQUEST)

        clearbit_result.send(self, result=request.get_json())

        return self.app.response_class(status=OK)
예제 #17
0
    def respond_to_rekey_confirm(self, message):
        # Verify length
        if not len(message) == self._mac_length + 1:
            _logger.info('Invalid length of rekey confirm')
            return

        # Verify MAC
        msg, sent_mac = message[:-self._mac_length], message[-self.
                                                             _mac_length:]
        expected_mac = self.get_mac(msg, key=self.new_master_key)
        if not constant_time_compare(sent_mac, expected_mac):
            _logger.info('Invalid MAC of rekey confirm')
            return

        # Update shared_key
        self.shared_key = self.new_master_key
        msg = six.int2byte(Message.rekey_completed)
        self.state = ServerState.rekey_confirmed
        full_msg = msg + self.get_mac(msg, key=self.shared_key)
        self._send(full_msg)
        del self.new_master_key
        del self.pkey
예제 #18
0
    def handle_request(self, path):

        # Verify the signature.
        query = dict(request.args.iteritems())
        old_sig = str(query.pop('s', None))
        if not old_sig:
            abort(404)
        signer = Signer(current_app.secret_key)
        new_sig = signer.get_signature('%s?%s' % (path, urlencode(sorted(query.iteritems()), True)))
        if not constant_time_compare(old_sig, new_sig):
            abort(404)
        
        # Expand kwargs.
        query = dict((SHORT_TO_LONG.get(k, k), v) for k, v in query.iteritems())

        remote_url = query.get('url')
        if remote_url:

            # This is redundant for newly built URLs, but not for those which
            # have already been generated and cached.
            parsed = urlparse(remote_url)
            if parsed.scheme not in ALLOWED_SCHEMES:
                abort(404)

            # Download the remote file.
            makedirs(current_app.config['IMAGES_CACHE'])
            path = os.path.join(
                current_app.config['IMAGES_CACHE'],
                hashlib.md5(remote_url).hexdigest() + os.path.splitext(parsed.path)[1]
            )

            if not os.path.exists(path):
                log.info('downloading %s' % remote_url)
                tmp_path = path + '.tmp-' + str(os.getpid())
                fh = open(tmp_path, 'wb')
                fh.write(urlopen(remote_url).read())
                fh.close()
                call(['mv', tmp_path, path])
        else:
            path = self.find_img(path)
            if not path:
                abort(404) # Not found.

        raw_mtime = os.path.getmtime(path)
        mtime = datetime.datetime.utcfromtimestamp(raw_mtime)
        # log.debug('last_modified: %r' % mtime)
        # log.debug('if_modified_since: %r' % request.if_modified_since)
        if request.if_modified_since and request.if_modified_since >= mtime:
            return '', 304
        
        mode = query.get('mode')

        transform = query.get('transform')
        transform = re.split(r'[;,_/ ]', transform) if transform else None

        background = query.get('background')
        width = query.get('width')
        width = int(width) if width else None
        height = query.get('height')
        height = int(height) if height else None
        quality = query.get('quality')
        quality = int(quality) if quality else 75
        format = (query.get('format', '') or os.path.splitext(path)[1][1:] or 'jpeg').lower()
        format = {'jpg' : 'jpeg'}.get(format, format)
        has_version = 'version' in query
        use_cache = query.get('cache', True)

        if use_cache:
            cache_key_parts = [path, mode, width, height, quality, format, background]
            if transform:
                cache_key_parts.append(transform)
            cache_key = hashlib.md5(repr(tuple(cache_key_parts))).hexdigest()
            cache_dir = os.path.join(current_app.config['IMAGES_CACHE'], cache_key[:2])
            cache_path = os.path.join(cache_dir, cache_key + '.' + format)
            cache_mtime = os.path.getmtime(cache_path) if os.path.exists(cache_path) else None
        
        mimetype = 'image/%s' % format
        cache_timeout = 31536000 if has_version else current_app.config['IMAGES_MAX_AGE']

        if not use_cache or not cache_mtime or cache_mtime < raw_mtime:
            
            log.info('resizing %r for %s' % (path, query))
            img = image.open(path)
            img = self.resize(img,
                width=width,
                height=height,
                mode=mode,
                background=background,
                transform=transform,
            )

            if not use_cache:
                fh = StringIO()
                img.save(fh, format, quality=quality)
                return fh.getvalue(), 200, [
                    ('Content-Type', mimetype),
                    ('Cache-Control', cache_timeout),
                ]
            
            makedirs(cache_dir)
            cache_file = open(cache_path, 'wb')
            img.save(cache_file, format, quality=quality)
            cache_file.close()
        
        return send_file(cache_path, mimetype=mimetype, cache_timeout=cache_timeout)
예제 #19
0
    def handle_request(self, path):

        # Verify the signature.
        query = dict(iteritems(request.args))
        old_sig = str(query.pop('s', None))
        if not old_sig:
            abort(404)
        signer = Signer(current_app.secret_key)
        new_sig = signer.get_signature(
            '%s?%s' % (path, urlencode(sorted(iteritems(query)), True)))
        if not constant_time_compare(str(old_sig), str(new_sig)):
            log.warning("Signature mismatch: url's {} != expected {}".format(
                old_sig, new_sig))
            abort(404)

        # Expand kwargs.

        query = dict((SHORT_TO_LONG.get(k, k), v) for k, v in iteritems(query))
        remote_url = query.get('url')
        if remote_url:

            # This is redundant for newly built URLs, but not for those which
            # have already been generated and cached.
            parsed = urlparse(remote_url)
            if parsed.scheme not in ALLOWED_SCHEMES:
                abort(404)

            # Download the remote file.
            makedirs(current_app.config['IMAGES_CACHE'])
            path = os.path.join(
                current_app.config['IMAGES_CACHE'],
                hashlib.md5(encode_str(remote_url)).hexdigest() +
                os.path.splitext(parsed.path)[1])

            if not os.path.exists(path):
                log.info('downloading %s' % remote_url)
                tmp_path = path + '.tmp-' + str(os.getpid())
                try:
                    remote_file = urlopen(remote_url).read()
                except HTTPError as e:
                    # abort with remote error code (403 or 404 most times)
                    # log.debug('HTTP Error: %r' % e)
                    abort(e.code)
                else:
                    fh = open(tmp_path, 'wb')
                    fh.write(remote_file)
                    fh.close()
                call(['mv', tmp_path, path])
        else:
            path = self.find_img(path)
            if not path:
                abort(404)  # Not found.

        raw_mtime = os.path.getmtime(path)
        mtime = datetime.datetime.utcfromtimestamp(raw_mtime).replace(
            microsecond=0)
        # log.debug('last_modified: %r' % mtime)
        # log.debug('if_modified_since: %r' % request.if_modified_since)
        if request.if_modified_since and request.if_modified_since >= mtime:
            return '', 304

        mode = query.get('mode')

        transform = query.get('transform')
        transform = re.split(r'[;,_/ ]', transform) if transform else None

        background = query.get('background')
        width = query.get('width')
        width = int(width) if width else None
        height = query.get('height')
        height = int(height) if height else None
        quality = query.get('quality')
        quality = int(quality) if quality else 75
        format = (query.get('format', '') or os.path.splitext(path)[1][1:]
                  or 'jpeg').lower()
        format = {'jpg': 'jpeg'}.get(format, format)
        has_version = 'version' in query
        use_cache = query.get('cache', True)
        enlarge = query.get('enlarge', False)

        sharpen = query.get('sharpen')
        sharpen = re.split(r'[+:;,_/ ]', sharpen) if sharpen else None

        if use_cache:

            # The parts in this initial list were parameters cached in version 1.
            # In order to avoid regenerating all images when a new feature is
            # added, we append (feature_name, value) tuples to the end.
            cache_key_parts = [
                path, mode, width, height, quality, format, background
            ]
            if transform:
                cache_key_parts.append(('transform', transform))
            if sharpen:
                cache_key_parts.append(('sharpen', sharpen))
            if enlarge:
                cache_key_parts.append(('enlarge', enlarge))

            cache_key = hashlib.md5(
                repr(tuple(cache_key_parts)).encode('utf-8')).hexdigest()
            cache_dir = os.path.join(current_app.config['IMAGES_CACHE'],
                                     cache_key[:2])
            cache_path = os.path.join(cache_dir, cache_key + '.' + format)
            cache_mtime = os.path.getmtime(cache_path) if os.path.exists(
                cache_path) else None

        mimetype = 'image/%s' % format
        cache_timeout = 31536000 if has_version else current_app.config[
            'IMAGES_MAX_AGE']

        if not use_cache or not cache_mtime or cache_mtime < raw_mtime:

            log.info('resizing %r for %s' % (path, query))
            image = Image.open(path)
            image = self.resize(
                image,
                background=background,
                enlarge=enlarge,
                height=height,
                mode=mode,
                transform=transform,
                width=width,
            )
            image = self.post_process(
                image,
                sharpen=sharpen,
            )

            if not use_cache:
                fh = StringIO()
                image.save(fh, format, quality=quality)
                return fh.getvalue(), 200, [
                    ('Content-Type', mimetype),
                    ('Cache-Control', str(cache_timeout)),
                ]

            makedirs(cache_dir)
            cache_file = open(cache_path, 'wb')
            image.save(cache_file, format, quality=quality)
            cache_file.close()

        return send_file(cache_path,
                         mimetype=mimetype,
                         cache_timeout=cache_timeout)
예제 #20
0
    def handle_request(self, path):

        query = dict(request.args.iteritems())
        if current_app.secret_key != 'nokey' :
            # Verify the signature.
            old_sig = str(query.pop('s', None))
            if not old_sig:
                abort(404)
            signer = Signer(current_app.secret_key)
            new_sig = signer.get_signature('%s?%s' % (path, urlencode(sorted(query.iteritems()), True)))
            if not constant_time_compare(old_sig, new_sig):
                abort(404)
        
        remote_url = query.get('u')
        if remote_url:

            # This is redundant for newly built URLs, but not for those which
            # have already been generated and cached.
            parsed = urlparse(remote_url)
            if parsed.scheme not in ALLOWED_SCHEMES:
                abort(404)

            # Download the remote file.
            makedirs(current_app.config['IMAGES_CACHE'])
            path = os.path.join(
                current_app.config['IMAGES_CACHE'],
                hashlib.md5(remote_url).hexdigest() + os.path.splitext(remote_url)[1]
            )

            if not os.path.exists(path):
                log.info('downloading %s' % remote_url)
                tmp_path = path + '.tmp-' + str(os.getpid())
                fh = open(tmp_path, 'wb')
                fh.write(urlopen(remote_url).read())
                fh.close()
                call(['mv', tmp_path, path])
        else:
            path = self.find_img(path)
            if not path:
                abort(404) # Not found.

        raw_mtime = os.path.getmtime(path)
        mtime = datetime.datetime.utcfromtimestamp(raw_mtime)
        # log.debug('last_modified: %r' % mtime)
        # log.debug('if_modified_since: %r' % request.if_modified_since)
        if request.if_modified_since and request.if_modified_since >= mtime:
            return '', 304
        
        
        mode = query.get('m')
        background = query.get('b')
        width = query.get('w')
        width = int(width) if width else None
        height = query.get('h')
        height = int(height) if height else None
        quality = query.get('q')
        quality = int(quality) if quality else 75
        format = query.get('f', '').lower() or os.path.splitext(path)[1][1:] or 'jpeg'
        format = {'jpg' : 'jpeg'}.get(format, format)
        has_version = 'v' in query
                
        cache_key = hashlib.md5(repr((
            path, mode, width, height, quality, format, background
        ))).hexdigest()

        cache_dir = os.path.join(current_app.config['IMAGES_CACHE'], cache_key[:2])
        cache_path = os.path.join(cache_dir, cache_key + '.' + format)

        cache_mtime = os.path.getmtime(cache_path) if os.path.exists(cache_path) else None
        
        if not cache_mtime or cache_mtime < raw_mtime:
            
            log.info('resizing %r for %s' % (path, query))
            
            img = image.open(path)
            img = self.resize(img, width=width, height=height, mode=mode, background=background)
            
            makedirs(cache_dir)
            cache_file = open(cache_path, 'wb')
            img.save(cache_file, format, quality=quality)
            cache_file.close()
        
        return send_file(cache_path,
            mimetype='image/%s' % format,
            cache_timeout=31536000 if has_version else current_app.config['IMAGES_MAX_AGE'],
        )
예제 #21
0
    def handle_request(self, path):

        # Verify the signature.
        query = dict(request.args.iteritems())
        old_sig = str(query.pop('s', None))
        if not old_sig:
            abort(404)
        signer = Signer(current_app.secret_key)
        new_sig = signer.get_signature(
            '%s?%s' % (path, urlencode(sorted(query.iteritems()), True)))
        if not constant_time_compare(old_sig, new_sig):
            abort(404)

        remote_url = query.get('u')
        if remote_url:

            # This is redundant for newly built URLs, but not for those which
            # have already been generated and cached.
            parsed = urlparse(remote_url)
            if parsed.scheme not in ALLOWED_SCHEMES:
                abort(404)

            # Download the remote file.
            makedirs(current_app.config['IMAGES_CACHE'])
            path = os.path.join(
                current_app.config['IMAGES_CACHE'],
                hashlib.md5(remote_url).hexdigest() +
                os.path.splitext(remote_url)[1])

            if not os.path.exists(path):
                log.info('downloading %s' % remote_url)
                tmp_path = path + '.tmp-' + str(os.getpid())
                fh = open(tmp_path, 'wb')
                fh.write(urlopen(remote_url).read())
                fh.close()
                call(['mv', tmp_path, path])
        else:
            path = self.find_img(path)
            if not path:
                abort(404)  # Not found.

        raw_mtime = os.path.getmtime(path)
        mtime = datetime.datetime.utcfromtimestamp(raw_mtime)
        # log.debug('last_modified: %r' % mtime)
        # log.debug('if_modified_since: %r' % request.if_modified_since)
        if request.if_modified_since and request.if_modified_since >= mtime:
            return '', 304

        mode = query.get('m')
        background = query.get('b')
        width = query.get('w')
        width = int(width) if width else None
        height = query.get('h')
        height = int(height) if height else None
        quality = query.get('q')
        quality = int(quality) if quality else 75
        format = query.get(
            'f', '').lower() or os.path.splitext(path)[1][1:] or 'jpeg'
        format = {'jpg': 'jpeg'}.get(format, format)
        has_version = 'v' in query

        cache_key = hashlib.md5(
            repr((path, mode, width, height, quality, format,
                  background))).hexdigest()

        cache_dir = os.path.join(current_app.config['IMAGES_CACHE'],
                                 cache_key[:2])
        cache_path = os.path.join(cache_dir, cache_key + '.' + format)

        cache_mtime = os.path.getmtime(cache_path) if os.path.exists(
            cache_path) else None

        if not cache_mtime or cache_mtime < raw_mtime:

            log.info('resizing %r for %s' % (path, query))

            img = image.open(path)
            img = self.resize(img,
                              width=width,
                              height=height,
                              mode=mode,
                              background=background)

            makedirs(cache_dir)
            cache_file = open(cache_path, 'wb')
            img.save(cache_file, format, quality=quality)
            cache_file.close()

        return send_file(
            cache_path,
            mimetype='image/%s' % format,
            cache_timeout=31536000
            if has_version else current_app.config['IMAGES_MAX_AGE'],
        )
예제 #22
0
    def handle_request(self, path):

        # Verify the signature.
        query = dict(iteritems(request.args))
        old_sig = str(query.pop('s', None))
        if not old_sig:
            abort(404)
        signer = Signer(current_app.secret_key)
        new_sig = signer.get_signature('%s?%s' % (path, urlencode(sorted(iteritems(query)), True)))
        if not constant_time_compare(str(old_sig), str(new_sig)):
            log.warning("Signature mismatch: url's {} != expected {}".format(old_sig, new_sig))
            abort(404)
        
        # Expand kwargs.

        query = dict((SHORT_TO_LONG.get(k, k), v) for k, v in iteritems(query))
        remote_url = query.get('url')
        if remote_url:

            # This is redundant for newly built URLs, but not for those which
            # have already been generated and cached.
            parsed = urlparse(remote_url)
            if parsed.scheme not in ALLOWED_SCHEMES:
                abort(404)

            # Download the remote file.
            makedirs(current_app.config['IMAGES_CACHE'])
            path = os.path.join(
                current_app.config['IMAGES_CACHE'],
                hashlib.md5(encode_str(remote_url)).hexdigest() + os.path.splitext(parsed.path)[1]
            )

            if not os.path.exists(path):
                log.info('downloading %s' % remote_url)
                tmp_path = path + '.tmp-' + str(os.getpid())
                try:
                    remote_file = urlopen(remote_url).read()
                except HTTPError as e:
                    # abort with remote error code (403 or 404 most times)
                    # log.debug('HTTP Error: %r' % e)
                    abort(e.code)
                else:
                    fh = open(tmp_path, 'wb')
                    fh.write(remote_file)
                    fh.close()
                call(['mv', tmp_path, path])
        else:
            path = self.find_img(path)
            if not path:
                abort(404) # Not found.

        raw_mtime = os.path.getmtime(path)
        mtime = datetime.datetime.utcfromtimestamp(raw_mtime).replace(microsecond=0)
        # log.debug('last_modified: %r' % mtime)
        # log.debug('if_modified_since: %r' % request.if_modified_since)
        if request.if_modified_since and request.if_modified_since >= mtime:
            return '', 304
        
        mode = query.get('mode')

        transform = query.get('transform')
        transform = re.split(r'[;,_/ ]', transform) if transform else None

        background = query.get('background')
        width = query.get('width')
        width = int(width) if width else None
        height = query.get('height')
        height = int(height) if height else None
        quality = query.get('quality')
        quality = int(quality) if quality else 75
        format = (query.get('format', '') or os.path.splitext(path)[1][1:] or 'jpeg').lower()
        format = {'jpg' : 'jpeg'}.get(format, format)
        has_version = 'version' in query
        use_cache = query.get('cache', True)
        enlarge = query.get('enlarge', False)

        sharpen = query.get('sharpen')
        sharpen = re.split(r'[+:;,_/ ]', sharpen) if sharpen else None

        if use_cache:

            # The parts in this initial list were parameters cached in version 1.
            # In order to avoid regenerating all images when a new feature is
            # added, we append (feature_name, value) tuples to the end.
            cache_key_parts = [path, mode, width, height, quality, format, background]
            if transform:
                cache_key_parts.append(('transform', transform))
            if sharpen:
                cache_key_parts.append(('sharpen', sharpen))
            if enlarge:
                cache_key_parts.append(('enlarge', enlarge))


            cache_key = hashlib.md5(repr(tuple(cache_key_parts)).encode('utf-8')).hexdigest()
            cache_dir = os.path.join(current_app.config['IMAGES_CACHE'], cache_key[:2])
            cache_path = os.path.join(cache_dir, cache_key + '.' + format)
            cache_mtime = os.path.getmtime(cache_path) if os.path.exists(cache_path) else None
        
        mimetype = 'image/%s' % format
        cache_timeout = 31536000 if has_version else current_app.config['IMAGES_MAX_AGE']

        if not use_cache or not cache_mtime or cache_mtime < raw_mtime:
            
            log.info('resizing %r for %s' % (path, query))
            image = Image.open(path)
            image = self.resize(image,
                background=background,
                enlarge=enlarge,
                height=height,
                mode=mode,
                transform=transform,
                width=width,
            )
            image = self.post_process(image,
                sharpen=sharpen,
            )

            if not use_cache:
                fh = StringIO()
                image.save(fh, format, quality=quality)
                return fh.getvalue(), 200, [
                    ('Content-Type', mimetype),
                    ('Cache-Control', str(cache_timeout)),
                ]
            
            makedirs(cache_dir)
            cache_file = open(cache_path, 'wb')
            image.save(cache_file, format, quality=quality)
            cache_file.close()
        
        return send_file(cache_path, mimetype=mimetype, cache_timeout=cache_timeout)
예제 #23
0
    def respond_to_sa_proposal(self, message):
        # Verify length
        if not 9 <= len(message) <= 255:
            _logger.info('Invalid length %d', len(message))
            return

        # Verify MAC
        msg, sig = message[:-8], message[-8:]
        expected_mac = handshake_mac(self.shared_key, msg, self.R_a)
        if not constant_time_compare(sig, expected_mac):
            _logger.info('Invalid mac on sa proposal')
            return

        msg_data = {}

        # Verify cbor data is valid (has 'macs' which is a list)
        if msg[1:]:
            try:
                msg_data = cbor.loads(msg[1:])
            except:
                _logger.info('Invalid cbor data given in sa proposal')
                return

        # Verify that the data loaded is a dict
        if not isinstance(msg_data, dict):
            _logger.info('SA proposal not a dict')
            return

        # Verify that key 'macs' is a list
        if not isinstance(msg_data.get('macs', []), list):
            _logger.info('macs given was not a list')
            return

        # Merge client parameters with defaults
        suggested_macs = set(['sha3_256'] + msg_data.get('macs', []))

        # Pick the first MAC from supported_macs that's supported by both parties
        selected_mac = 'sha3_256'
        for supported_mac in self.channel.supported_macs:
            if supported_mac in suggested_macs:
                selected_mac = supported_mac
                break

        # Verify that suggested MAC length is valid int
        suggested_mac_len = msg_data.get('mac_len', 8)
        if not isinstance(suggested_mac_len, six.integer_types):
            _logger.info('mac_len not int: %s', type(suggested_mac_len))
            return
        if not 4 <= suggested_mac_len <= 32:
            _logger.info(
                "suggested mac_len outside permitted range of 4-32 bytes")
            return

        # All jolly good, notify client of chosen MAC and signature length

        # Expand session key
        self.generate_and_set_session_key()

        sa = {
            'mac': selected_mac,
            'mac_len': suggested_mac_len,
        }
        msg = six.int2byte(Message.sa) + cbor.dumps(sa)
        mac = handshake_mac(self.session_key, msg)
        self.init_session_mac(key=self.session_key,
                              func_name=sa['mac'],
                              length=sa['mac_len'])
        self._send(msg + mac)

        # Initialize sequence numbers
        self.other_seq = self.my_seq = 0

        self.state = ServerState.established
예제 #24
0
    def respond_to_sa_proposal(self, message):
        # Verify length
        if not 9 <= len(message) <= 255:
            _logger.info('Invalid length %d', len(message))
            return

        # Verify MAC
        msg, sig = message[:-8], message[-8:]
        expected_mac = handshake_mac(self.shared_key, msg, self.R_a)
        if not constant_time_compare(sig, expected_mac):
            _logger.info('Invalid mac on sa proposal')
            return

        msg_data = {}

        # Verify cbor data is valid (has 'macs' which is a list)
        if msg[1:]:
            try:
                msg_data = cbor.loads(msg[1:])
            except:
                _logger.info('Invalid cbor data given in sa proposal')
                return

        # Verify that the data loaded is a dict
        if not isinstance(msg_data, dict):
            _logger.info('SA proposal not a dict')
            return

        # Verify that key 'macs' is a list
        if not isinstance(msg_data.get('macs', []), list):
            _logger.info('macs given was not a list')
            return

        # Merge client parameters with defaults
        suggested_macs = set(['sha3_256'] + msg_data.get('macs', []))

        # Pick the first MAC from supported_macs that's supported by both parties
        selected_mac = 'sha3_256'
        for supported_mac in self.channel.supported_macs:
            if supported_mac in suggested_macs:
                selected_mac = supported_mac
                break

        # Verify that suggested MAC length is valid int
        suggested_mac_len = msg_data.get('mac_len', 8)
        if not isinstance(suggested_mac_len, six.integer_types):
            _logger.info('mac_len not int: %s', type(suggested_mac_len))
            return
        if not 4 <= suggested_mac_len <= 32:
            _logger.info("suggested mac_len outside permitted range of 4-32 bytes")
            return

        # All jolly good, notify client of chosen MAC and signature length

        # Expand session key
        self.generate_and_set_session_key()

        sa = {
            'mac': selected_mac,
            'mac_len': suggested_mac_len,
        }
        msg = six.int2byte(Message.sa) + cbor.dumps(sa)
        mac = handshake_mac(self.session_key, msg)
        self.init_session_mac(key=self.session_key, func_name=sa['mac'], length=sa['mac_len'])
        self._send(msg + mac)

        # Initialize sequence numbers
        self.other_seq = self.my_seq = 0

        self.state = ServerState.established
예제 #25
0
 def verify_mac(self, message):
     expected_mac = self.get_mac(message[:-self._mac_length])
     return constant_time_compare(message[-self._mac_length:], expected_mac)
예제 #26
0
 def verify_mac(self, message):
     expected_mac = self.get_mac(message[:-self._mac_length])
     return constant_time_compare(message[-self._mac_length:], expected_mac)