Ejemplo n.º 1
0
    def test_upload_form(self):
        # make a correct one
        nonce = 'foobar'
        set_course = 'course_foo'
        username = '******'
        redirect_to = "http://www.example.com/"
        hmc = hmac.new(smart_bytes(settings.MEDIATHREAD_SECRET),
                       smart_bytes(
                           '%s:%s:%s' % (username, redirect_to, nonce)),
                       hashlib.sha1
                       ).hexdigest()

        response = self.c.get(
            "/mediathread/",
            {
                'nonce': nonce,
                'as': username,
                'redirect_url': redirect_to,
                'set_course': set_course,
                'hmac': hmc
            }
        )
        self.assertNotContains(
            response,
            "invalid authentication token")
Ejemplo n.º 2
0
def set_email(username, email):
    """
    Set the email attribute in LDAP. Used for Officer email forwarding.
    Although LDAP allows multiple email attributes, we only allow one.
    """
    ldap_handle = initialize()
    if ldap_handle is None:
        return False

    username = smart_bytes(username)
    email = smart_bytes(email)

    udn = 'uid=%s,%s' % (username, settings.LDAP_BASE['PEOPLE'])
    try:
        email_results = ldap_handle.search_s(
            udn, settings.LDAP['SCOPE'], '(mail=*)')
    except ldap.LDAPError:
        return False

    if len(email_results) == 0:
        attr = [(ldap.MOD_ADD, 'mail', email)]
    else:
        attr = [(ldap.MOD_REPLACE, 'mail', email)]

    try:
        ldap_handle.modify_s(udn, attr)
    except ldap.LDAPError:
        return False
    return True
Ejemplo n.º 3
0
 def get_signature_hmac(self, data, message):
     signature_hmac = hmac.new(
         key=smart_bytes(settings.CMSCLOUD_SYNC_KEY),
         msg=smart_bytes(message),
         digestmod=hashlib.sha1,
     )
     return signature_hmac
Ejemplo n.º 4
0
def generate_sha1(string, salt=None):
    """
    Generates a sha1 hash for supplied string. Doesn't need to be very secure
    because it's not used for password checking. We got Django for that.

    :param string:
        The string that needs to be encrypted.

    :param salt:
        Optionally define your own salt. If none is supplied, will use a random
        string of 5 characters.

    :return: Tuple containing the salt and hash.

    """
    if not isinstance(string, (str, text_type)):
        string = str(string)

    if not salt:
        salt = sha1(str(random.random()).encode('utf-8')).hexdigest()[:5]

    salted_bytes = (smart_bytes(salt) + smart_bytes(string))
    hash_ = sha1(salted_bytes).hexdigest()

    return salt, hash_
Ejemplo n.º 5
0
def mod_user_group(username, group, action=ldap.MOD_ADD):
    """
    Adds or removes a user to/from an LDAP group. Default action is to add.
    Returns True on success, False otherwise (including errors)
    """
    if action not in [ldap.MOD_ADD, ldap.MOD_DELETE]:
        return False

    ldap_handle = initialize()
    if ldap_handle is None:
        return False

    username = smart_bytes(username)
    group = smart_bytes(group)

    gdn = 'cn=%s,%s' % (group, settings.LDAP_BASE['GROUP'])
    if get_group_member_attr(group) == 'memberUid':
        attr = [(action, 'memberUid', username)]
    else:
        udn = 'uid=%s,%s' % (username, settings.LDAP_BASE['PEOPLE'])
        attr = [(action, 'member', udn)]

    try:
        ldap_handle.modify_s(gdn, attr)
    except ldap.LDAPError:
        return False
    return True
Ejemplo n.º 6
0
    def delete(self):
        if settings.DEFAULT_FILE_STORAGE == "utils.backends.s3_boto.S3BotoStorage":
            bucket, key = open_s3()
            bucket.delete_key(smart_bytes(str(self.original_file)))
            bucket.delete_key(smart_bytes(str(self.profile_thumbnail)))
            bucket.delete_key(smart_bytes(str(self.search_thumbnail)))
        # TODO needs work
        """
        else:
            #Prevent trying to access undefined variable error if the files truly do not exist on the server
            absolute_original_path = None
            absolute_thumbnail_path = None

            #If we are going to delete the file from the database, we need to remove the file from the file system.
            #We need to remove both the thumbnail and the original photo
            if self.thumbnail_file:
                absolute_thumbnail_path = os.path.join(settings.MEDIA_ROOT, str(self.thumbnail_file))
                #Make sure all of the separators (the forward and backward slashes) are aligned correctly
                absolute_thumbnail_path = os.path.normpath(absolute_thumbnail_path)
            if self.original_file:
                absolute_original_path = os.path.join(settings.MEDIA_ROOT, str(self.original_file))
                #Make sure all of the separators (the forward and backward slashes) are aligned correctly
                absolute_original_path = os.path.normpath(absolute_original_path)

            if absolute_thumbnail_path and os.path.isfile(absolute_thumbnail_path):
                os.remove(absolute_thumbnail_path)

            if absolute_original_path and os.path.isfile(absolute_original_path):
                os.remove(absolute_original_path)
        """

        # Actually delete the Photo object from the DB
        super(Photo, self).delete()
Ejemplo n.º 7
0
def obfuscate(password):
    """
    Hashes a password using (salted) SHA1 algorithm.
    Also converts password to byte string before processing.
    A None or '' password is considered unusable.
    Unusable passwords will be corrupted with a UNUSABLE_PASSWORD_PREFIX and
    instead hash a random password.
    OpenLDAP 2.4.x only supports SHA1. Would be nice if they supported
    an algorithm that is more secure.
    """
    hash_prefix = LDAP_HASH_PREFIX
    # Set unusable password if None or empty string
    if not password:
        hash_prefix += UNUSABLE_PASSWORD_PREFIX
        password = get_random_string(
            length=UNUSABLE_PASSWORD_SUFFIX_LENGTH,
            allowed_chars=string.printable)
    try:
        salt = os.urandom(LDAP_SALT_LENGTH)
    except NotImplementedError:
        salt = ''.join(
            [chr(random.randint(0, 255)) for _ in range(LDAP_SALT_LENGTH)])
    hmsg = hashlib.new('sha1')
    # openldap SSHA uses this format:
    # '{SSHA}' + b64encode(sha1_digest('secret' + 'salt') + 'salt')
    hmsg.update(smart_bytes(password))
    hmsg.update(salt)
    pw_hash = base64.b64encode(hmsg.digest() + salt)
    return smart_bytes(hash_prefix + pw_hash)
Ejemplo n.º 8
0
    def urlencode(self, safe=None):
        """
        Returns an encoded string of all query string arguments.

        :arg safe: Used to specify characters which do not require quoting, for
            example::

                >>> q = QueryDict('', mutable=True)
                >>> q['next'] = '/a&b/'
                >>> q.urlencode()
                'next=%2Fa%26b%2F'
                >>> q.urlencode(safe='/')
                'next=/a%26b/'

        """
        output = []
        if safe:
            safe = smart_bytes(safe, self.encoding)
            encode = lambda k, v: '%s=%s' % ((quote(k, safe), quote(v, safe)))
        else:
            encode = lambda k, v: urlencode({k: v})
        for k, list_ in self.lists():
            k = smart_bytes(k, self.encoding)
            output.extend([encode(k, smart_bytes(v, self.encoding))
                           for v in list_])
        return '&'.join(output)
Ejemplo n.º 9
0
def generate_cache_key(prefix, *args, **kwargs):
    arg_str = ":".join(smart_bytes(a) for a in args)
    kwarg_str = ":".join("{}={}".format(smart_bytes(k), smart_bytes(v)) for k, v in kwargs.items())
    key_str = "{}::{}".format(arg_str, kwarg_str)
    argkwarg_str = md5(smart_bytes(key_str)).hexdigest()
    if not isinstance(prefix, basestring):
        prefix = "_".join(str(a) for a in prefix)
    return "{}__{}".format(prefix, argkwarg_str)
Ejemplo n.º 10
0
 def is_valid(self):
     verify = hmac.new(
         smart_bytes(settings.MEDIATHREAD_SECRET),
         smart_bytes(
             '%s:%s:%s' % (self.username, self.redirect_to, self.nonce)),
         hashlib.sha1
     ).hexdigest()
     return verify == self.hmc
Ejemplo n.º 11
0
def make_key(k, with_locale=True):
    """Generate the full key for ``k``, with a prefix."""
    key = encoding.smart_bytes('%s:%s' % (config.CACHE_PREFIX, k))
    if with_locale:
        key += encoding.smart_bytes(translation.get_language())
    # memcached keys must be < 250 bytes and w/o whitespace, but it's nice
    # to see the keys when using locmem.
    return hashlib.md5(key).hexdigest()
Ejemplo n.º 12
0
def make_key(key, with_locale=True, normalize=False):
    """Generate the full key for ``k``, with a prefix."""
    if with_locale:
        key = u'{key}:{lang}'.format(
            key=key, lang=translation.get_language())

    if normalize:
        return hashlib.md5(encoding.smart_bytes(key)).hexdigest()
    return encoding.smart_bytes(key)
Ejemplo n.º 13
0
    def get_topic_hash(self):
        topic_hash = self.cleaned_data.get('topic_hash', None)

        if topic_hash:
            return topic_hash

        return utils.get_hash((
            smart_bytes(self.cleaned_data['title']),
            smart_bytes('category-{}'.format(self.cleaned_data['category'].pk))))
Ejemplo n.º 14
0
    def _test_update(self, base_form, update_view=TransactionalUpdateView):
        self.assertEqual(0, TestModel.objects.count())
        tm1 = TestModel.objects.create(unique=1)
        tm2 = TestModel.objects.create(unique=2)

        def first(semaphore):
            try:
                first.as_expected = False
                view = get_acquiring_view(base_form, update_view, semaphore).as_view()
                request = self.factory.post("/", { 'id': tm1.pk, 'unique': '3' })
                first.response = view(request, pk=tm1.pk)
                first.response.render()
                first.as_expected = True
            finally:
                connection.close()

        def second(semaphore):
            try:
                second.as_expected = False
                view = get_releasing_view(base_form, update_view, semaphore).as_view()
                request = self.factory.post("/", { 'id': tm2.pk, 'unique': '3' })
                second.response = view(request, pk=tm2.pk)
                second.as_expected = True
            finally:
                connection.close()

        semaphore = threading.Semaphore()
        semaphore.acquire()
        first_thread = threading.Thread(target=first, args=[semaphore])
        second_thread = threading.Thread(target=second, args=[semaphore])
        first_thread.daemon = True
        second_thread.daemon = True

        first_thread.start()
        second_thread.start()
        second_thread.join()
        first_thread.join()

        # okay, so the validation checks should both have passed
        # before either thread got to saving, so we should get the
        # releasing view (ie the second thread) having succeeded and
        # the first displaying a validation error on its form.
        self.assertEqual(True, first.as_expected)
        self.assertEqual(True, second.as_expected)
        self.assertEqual(2, TestModel.objects.count())
        tm1 = TestModel.objects.get(pk=tm1.pk)
        self.assertEqual(1, tm1.unique)
        tm2 = TestModel.objects.get(pk=tm2.pk)
        self.assertEqual(3, tm2.unique)

        self.assertEqual(200, first.response.status_code)
        self.assertTrue(smart_bytes('errorlist') in first.response.content)
        self.assertTrue(smart_bytes('errorlist') not in second.response.content)
        self.assertEqual(302, second.response.status_code)
        self.assertEqual('/', second.response['Location'])
        return first, second
Ejemplo n.º 15
0
    def test_smart_bytes(self):
        class Test:
            def __str__(self):
                return 'ŠĐĆŽćžšđ'

        lazy_func = gettext_lazy('x')
        self.assertIs(smart_bytes(lazy_func), lazy_func)
        self.assertEqual(smart_bytes(Test()), b'\xc5\xa0\xc4\x90\xc4\x86\xc5\xbd\xc4\x87\xc5\xbe\xc5\xa1\xc4\x91')
        self.assertEqual(smart_bytes(1), b'1')
        self.assertEqual(smart_bytes('foo'), b'foo')
Ejemplo n.º 16
0
    def get_filtered_queryset(self, qs):
        filters = {}
        query_string = self.GET.get("query_string", None)

        if query_string:
            for item in query_string.split("&"):
                k, v = item.split("=")
                if k != "t":
                    filters[smart_bytes(k)] = prepare_lookup_value(smart_bytes(k), smart_bytes(v))
        return qs.filter(**filters)
Ejemplo n.º 17
0
def generate_signature(policy, secret_key):
    """
    Returns a Base64-encoded signature value that authorizes
    the form and proves that only you could have created it.
    This value is calculated by signing the Base64-encoded
    policy document with your AWS Secret Key.
    """
    hmac_signature = hmac.new(
        smart_bytes(secret_key), smart_bytes(policy), hashlib.sha1)

    return base64.b64encode(hmac_signature.digest())
Ejemplo n.º 18
0
def rename_user(username, new_username):
    """
    Renames a user. Only updates LDAP. Does not save model.
    Both arguments are mandatory.
    Returns a (bool, string) tuple for success and reason.
    """
    if not (username and new_username):
        return (False, 'Invalid username argument(s)')

    ldap_handle = initialize()
    if ldap_handle is None:
        return (False, 'LDAP connection failed')

    username = smart_bytes(username)
    new_username = smart_bytes(new_username)

    # Validate new username
    if USERNAME_REGEX.match(new_username) is None:
        return (False, 'Invalid username. ' + settings.USERNAME_HELPTEXT)

    # Make sure new username not yet taken
    if username_exists(new_username):
        return (False, 'Requested username is already taken')

    old_dn = 'uid=%s,%s' % (username, settings.LDAP_BASE['PEOPLE'])
    new_rdn = 'uid=%s' % (new_username)

    try:
        ldap_handle.rename_s(old_dn, new_rdn)
    except ldap.LDAPError:
        return (False, 'LDAP error while renaming user')

    # Search in posixGroups (i.e. cn=web) for old username and replace
    # with new username. groupOfNames are automatically changed by rename_s
    search_str = '(memberUid=%s)' % (username)
    try:
        group_results = ldap_handle.search_s(
            settings.LDAP_BASE['GROUP'], settings.LDAP['SCOPE'], search_str)
    except ldap.LDAPError:
        return (False, 'LDAP error while searching for group memberships')

    for (gdn, gattr) in group_results:
        new_g = copy.copy(gattr)
        new_g['memberUid'] = [member if member != username else new_username
                              for member in gattr['memberUid']]
        mod_g = ldap.modlist.modifyModlist(gattr, new_g)
        try:
            ldap_handle.modify_s(gdn, mod_g)
        except ldap.LDAPError:
            return (False, 'LDAP error while migrating groups')

    return (True, 'User %s renamed to %s' % (username, new_username))
Ejemplo n.º 19
0
 def to_python(self, value):
     """Overrides ``models.Field`` method. This is used to convert
     bytes (from serialization etc) to an instance of this class"""
     if value is None:
         return None
     elif isinstance(value, oauth2client.client.Credentials):
         return value
     else:
         try:
             return jsonpickle.decode(
                 base64.b64decode(encoding.smart_bytes(value)).decode())
         except ValueError:
             return pickle.loads(
                 base64.b64decode(encoding.smart_bytes(value)))
Ejemplo n.º 20
0
    def process_response(self, request, response):
        html_types = ('text/html', 'application/xhtml+xml')
        content_type = response.get('Content-Type', '').split(';')[0]
        content_encoding = response.get('Content-Encoding', '')
        if any((getattr(response, 'streaming', False),
                'gzip' in content_encoding,
                content_type not in html_types)):
            return response

        if settings.RESPONSIVE_COOKIE_NAME not in request.COOKIES \
                or getattr(request, 'INVALID_RESPONSIVE_COOKIE', False):
            expires = datetime.datetime.utcnow() + \
                datetime.timedelta(days=settings.RESPONSIVE_COOKIE_AGE)
            snippet = render_to_string('responsive/snippet.html', {
                'cookie_name': settings.RESPONSIVE_COOKIE_NAME,
                'cookie_age': 60 * 60 * 24 * settings.RESPONSIVE_COOKIE_AGE,  # convert to secs
                'cookie_expires': expires.strftime('%a, %d %b %Y %H:%M:%S GMT')
            })
            pattern = re.compile(b'<head>', re.IGNORECASE)
            response.content = pattern.sub(b'<head>' + smart_bytes(snippet), response.content)

            if response.get('Content-Length', None):
                response['Content-Length'] = len(response.content)

        patch_vary_headers(response, ('Cookie', ))
        return response
Ejemplo n.º 21
0
def default_username_algo(email):
    # store the username as a base64 encoded sha1 of the email address
    # this protects against data leakage because usernames are often
    # treated as public identifiers (so we can't use the email address).
    return base64.urlsafe_b64encode(
        hashlib.sha1(smart_bytes(email)).digest()
    ).rstrip(b'=')
Ejemplo n.º 22
0
    def test_large_upload(self):
        tdir = tempfile.gettempdir()

        file1 = tempfile.NamedTemporaryFile(suffix=".file1", dir=tdir)
        file1.write(b'a' * (2 ** 21))
        file1.seek(0)

        file2 = tempfile.NamedTemporaryFile(suffix=".file2", dir=tdir)
        file2.write(b'a' * (10 * 2 ** 20))
        file2.seek(0)

        post_data = {
            'name': 'Ringo',
            'file_field1': file1,
            'file_field2': file2,
            }

        for key in list(post_data):
            try:
                post_data[key + '_hash'] = hashlib.sha1(post_data[key].read()).hexdigest()
                post_data[key].seek(0)
            except AttributeError:
                post_data[key + '_hash'] = hashlib.sha1(smart_bytes(post_data[key])).hexdigest()

        response = self.client.post('/file_uploads/verify/', post_data)

        self.assertEqual(response.status_code, 200)
Ejemplo n.º 23
0
    def to_python(self, value):
        """
        :param value:
        :return:
        """
        if not self.deserialize:

            return super(JsonField, self).to_python(value)

        if value == "":
            return None

        try:
            if isinstance(value, six.text_type):
                value = json.loads(value, encoding=settings.DEFAULT_CHARSET)

            elif isinstance(value, six.string_types):
                value = json.loads(smart_text(value), encoding=settings.DEFAULT_CHARSET)

            elif type(value) == UnicodeType:
                # Fallback Unicode problem
                value = json.loads(smart_bytes(value), encoding=settings.DEFAULT_CHARSET)

        except ValueError:
            try:
                # Fallback: django serializer may cast dict or list value as string
                value = ast.literal_eval(value)
            except ValueError:
                raise
        return value
Ejemplo n.º 24
0
    def execute_command(self, command, content):
        pipe = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,
                                stdin=subprocess.PIPE, stderr=subprocess.PIPE)

        try:
            pipe.stdin.write(smart_bytes(content))
        except IOError as e:
            message = "Unable to pipe content to command: %s" % command
            raise CompressorError(message, e)
        pipe.stdin.close()

        compressed_content = pipe.stdout.read()
        pipe.stdout.close()

        error = pipe.stderr.read()
        pipe.stderr.close()

        if pipe.wait() != 0:
            if not error:
                error = "Unable to apply %s compressor" % self.__class__.__name__
            raise CompressorError(error)

        if self.verbose:
            print(error)
        return compressed_content
Ejemplo n.º 25
0
    def get_comment_hash(self):
        assert self.topic

        # This gets saved into
        # User.last_post_hash,
        # it does not matter whether
        # is a safe string or not
        comment_hash = self.cleaned_data.get('comment_hash', None)

        if comment_hash:
            return comment_hash

        return utils.get_hash((
            smart_bytes(self.cleaned_data['comment']),
            smart_bytes('thread-{}'.format(self.topic.pk))
        ))
Ejemplo n.º 26
0
def get_group_members(group):
    """
    Return a list of the members in the group.
    """
    ldap_handle = initialize()
    if ldap_handle is None:
        return False

    group = smart_bytes(group)
    searchstr = '(cn=%s)' % group
    try:
        entry = ldap_handle.search_s(settings.LDAP_BASE['GROUP'],
                                     settings.LDAP['SCOPE'],
                                     searchstr)
    except ldap.LDAPError:
        return False

    # Should only return one successful entry (since there should only be one
    # group that matches the group parameter used with this function)
    if len(entry) != 1 or len(entry[0]) != 2:
        return False

    member_attribute = get_group_member_attr(group)
    entry_properties = entry[0][1]
    if member_attribute in entry_properties:
        return entry_properties[member_attribute]
    else:
        # It is possible for a posixGroup to have no members, in which case
        # 'memberUid' will not be listed in the entry's properties, indicating
        # that there are no members (so return an empty list):
        return []
Ejemplo n.º 27
0
def has_usable_password(username):
    """
    Gets the user's password entry from LDAP.
    Returns False if it's an unusable password, or encounters LDAP errors
    """
    ldap_handle = initialize()
    if ldap_handle is None:
        return False

    username = smart_bytes(username)
    try:
        entry = ldap_handle.search_s(
            settings.LDAP_BASE['PEOPLE'],
            settings.LDAP['SCOPE'],
            '(uid=%s)' % username,
            ['userPassword'])
    except ldap.LDAPError:
        return False
    entry_len = len(entry)
    if entry_len != 1:
        mail_admins('LDAP Anomaly Detected',
                    'Found %d userPassword attributes for %s ' % (
                        entry_len,
                        username))
    # Invalid search result. Each search result must be a 2-tuple
    if len(entry[0]) != 2:
        return False
    pw_hash = get_property(entry[0][1], 'userPassword')
    return pw_hash and UNUSABLE_PASSWORD_PREFIX not in pw_hash
Ejemplo n.º 28
0
def get_group_member_attr(group):
    """
    Returns the group member attribute type for this group: "member" if this
    group is of the objectClass groupOfNames, or "memberUid" if it is of the
    objectClass posixGroup.
    """
    ldap_handle = initialize()
    if ldap_handle is None:
        return False

    group = smart_bytes(group)
    searchstr = '(cn=%s)' % group
    try:
        entry = ldap_handle.search_s(settings.LDAP_BASE['GROUP'],
                                     settings.LDAP['SCOPE'],
                                     searchstr)
    except ldap.LDAPError:
        return False

    # Should only return one successful entry (since there should only be one
    # group that matches the group parameter used with this function)
    if len(entry) != 1 or len(entry[0]) != 2:
        return False

    entry_properties = entry[0][1]
    group_classes = entry_properties['objectClass']
    if 'posixGroup' in group_classes:
        member_attribute = 'memberUid'
    elif 'groupOfNames' in group_classes:
        member_attribute = 'member'
    else:
        return False
    return member_attribute
Ejemplo n.º 29
0
def get_email(username):
    """
    Gets the user's email attribute from LDAP
    Returns the string (currently bytestring) or False if an error occurred.
    """
    ldap_handle = initialize()
    if ldap_handle is None:
        return False

    username = smart_bytes(username)
    udn = 'uid=%s,%s' % (username, settings.LDAP_BASE['PEOPLE'])
    try:
        entry = ldap_handle.search_s(
            udn, settings.LDAP['SCOPE'], '(mail=*)', ['mail'])
    except ldap.LDAPError:
        return False
    entry_len = len(entry)
    if entry_len == 0:
        mail_admins('LDAP Anomaly Detected',
                    'No search results found for %s in "get_email"' % username)
    elif entry_len > 1:
        entries = []
        for e in entry:
            entries.append(e[0])
        mail_admins('LDAP Anomaly Detected',
                    'Multiple search results for %s in "get_email":\n%s' % (
                        username, '\n'.join(entries)))
        return False
    # Invalid search result. Each search result must be a 2-tuple
    if len(entry[0]) != 2:
        return False
    return get_property(entry[0][1], 'mail') or False
Ejemplo n.º 30
0
Archivo: auth.py Proyecto: cloudera/hue
    def _verify_jws(self, payload, key):
        """Verify the given JWS payload with the given key and return the payload"""
        jws = JWS.from_compact(payload)

        try:
            alg = jws.signature.combined.alg.name
        except KeyError:
            msg = 'No alg value found in header'
            raise SuspiciousOperation(msg)

        if alg != self.OIDC_RP_SIGN_ALGO:
            msg = "The provider algorithm {!r} does not match the client's " \
                  "OIDC_RP_SIGN_ALGO.".format(alg)
            raise SuspiciousOperation(msg)

        if isinstance(key, six.string_types):
            # Use smart_bytes here since the key string comes from settings.
            jwk = JWK.load(smart_bytes(key))
        else:
            # The key is a json returned from the IDP JWKS endpoint.
            jwk = JWK.from_json(key)

        if not jws.verify(jwk):
            msg = 'JWS token verification failed.'
            raise SuspiciousOperation(msg)

        return jws.payload
Ejemplo n.º 31
0
def obj_2_hexa(obj):
    job = json.dumps(obj)
    dev = binascii.hexlify(smart_bytes(job))
    return smart_str(dev)
Ejemplo n.º 32
0
 def save_file(self, path, content):
     return self.storage.save(path, ContentFile(smart_bytes(content)))
Ejemplo n.º 33
0
    def log(self, **kwargs):
        """
        Simple Example:
            from tendenci.apps.event_logs.models import EventLog
            EventLog.objects.log()
        
        If you have a Tendenci Base Object, then use the following
        
            EventLog.objects.log(instance=obj_local_var)

        """
        request, user, instance = None, None, None

        stack = inspect.stack()

        # If the request is not present in the kwargs, we try to find it
        # by inspecting the stack. We dive 3 levels if necessary. - JMO 2012-05-14
        if 'request' in kwargs:
            request = kwargs['request']
        else:
            if 'request' in inspect.getargvalues(stack[1][0]).locals:
                request = inspect.getargvalues(stack[1][0]).locals['request']
            elif 'request' in inspect.getargvalues(stack[2][0]).locals:
                request = inspect.getargvalues(stack[2][0]).locals['request']
            elif 'request' in inspect.getargvalues(stack[3][0]).locals:
                request = inspect.getargvalues(stack[3][0]).locals['request']

        # If this eventlog is being triggered by something without a request, we
        # do not want to log it. This is usually some other form of logging
        # like Contributions or perhaps Versions in the future. - JMO 2012-05-14
        if not request:
            return None

        # skip if pingdom
        if 'pingdom.com' in request.META.get('HTTP_USER_AGENT', ''):
            return None

        event_log = self.model()

        # Set the following fields to blank
        event_log.guid = ""
        event_log.source = ""
        event_log.event_id = 0
        event_log.event_name = ""
        event_log.event_type = ""
        event_log.event_data = ""
        event_log.category = ""

        if 'instance' in kwargs:
            instance = kwargs['instance']
            ct = ContentType.objects.get_for_model(instance)
            event_log.content_type = ct
            event_log.object_id = instance.pk
            event_log.headline = unicode(instance)[:50]
            event_log.model_name = ct.name
            event_log.application = instance.__module__
            if hasattr(instance, 'guid'):
                event_log.uuid = instance.guid

        event_log.entity = None
        if 'entity' in kwargs:
            event_log.entity = kwargs['entity']

        # Allow a description to be added in special cases like impersonation
        event_log.description = ""
        if 'description' in kwargs:
            event_log.description = kwargs['description']

        # Application is the name of the app that the event is coming from
        #
        # We get the app name via inspecting. Due to our update_perms_and_save util
        # we must filter out perms as an actual source. This is ok since there are
        # no views within perms. - JMO 2012-05-14
        if 'application' in kwargs:
            event_log.application = kwargs['application']

        if not event_log.application:
            event_log.application = inspect.getmodule(stack[1][0]).__name__
            if "perms" in event_log.application.split('.'):
                event_log.application = inspect.getmodule(stack[2][0]).__name__
                if "perms" in event_log.application.split('.'):
                    event_log.application = inspect.getmodule(
                        stack[3][0]).__name__

        event_log.application = event_log.application.split('.')
        remove_list = [
            'tendenci', 'models', 'views', 'addons', 'core', 'apps', 'contrib'
        ]

        for item in remove_list:
            if item in event_log.application:
                event_log.application.remove(item)

        # Join on the chance that we are left with more than one item
        # in the list that we created
        event_log.application = ".".join(event_log.application)

        # Action is the name of the view that is being called
        #
        # We get it via the stack, but we filter out stacks that are named
        # 'save' or 'update_perms_and_save' to avoid getting the incorrect
        # view. We don't want to miss on a save method override or our own
        # updating. - JMO 2012-05-14
        if 'action' in kwargs:
            event_log.action = kwargs['action']
        else:
            event_log.action = stack[1][3]
            if stack[1][3] == "save":
                if stack[2][3] == "save" or stack[2][
                        3] == "update_perms_and_save":
                    if stack[3][3] == "update_perms_and_save":
                        event_log.action = stack[4][3]
                    else:
                        event_log.action = stack[3][3]
                else:
                    event_log.action = stack[2][3]

        if event_log.application == "base":
            event_log.application = "homepage"

        if 'user' in kwargs:
            user = kwargs['user']
        else:
            user = request.user

        # set up the user information
        if user:
            # check for impersonation and set the correct user, descriptions, etc
            impersonated_user = getattr(user, 'impersonated_user', None)
            if impersonated_user:
                if event_log.description:
                    event_log.description = '%s (impersonating %s)' % (
                        event_log.description,
                        impersonated_user.username,
                    )
                else:
                    event_log.description = '(impersonating %s)' % (
                        impersonated_user.username, )

            if isinstance(user, AnonymousUser):
                event_log.username = '******'
            else:
                event_log.user = user
                event_log.username = user.username
                event_log.email = user.email

        # setup request meta information
        if request:
            if hasattr(request, 'COOKIES'):
                event_log.session_id = request.COOKIES.get('sessionid', '')

            if hasattr(request, 'META'):
                # Check for HTTP_X_REAL_IP first in case we are
                # behind a load balancer
                event_log.user_ip_address = request.META.get(
                    'HTTP_X_FORWARDED_FOR',
                    request.META.get('REMOTE_ADDR', ''))
                if "," in event_log.user_ip_address:
                    event_log.user_ip_address = event_log.user_ip_address.split(
                        ",")[-1].replace(" ", "")

                event_log.user_ip_address = event_log.user_ip_address[-15:]
                event_log.http_referrer = smart_bytes(request.META.get(
                    'HTTP_REFERER', '')[:255],
                                                      errors='replace')
                event_log.http_user_agent = smart_bytes(request.META.get(
                    'HTTP_USER_AGENT', ''),
                                                        errors='replace')
                event_log.request_method = request.META.get(
                    'REQUEST_METHOD', '')
                event_log.query_string = request.META.get('QUERY_STRING', '')

                # take care of robots
                robot = Robot.objects.get_by_agent(event_log.http_user_agent)
                if robot:
                    event_log.robot = robot

            try:
                event_log.server_ip_address = gethostbyname(gethostname())
            except:
                try:
                    event_log.server_ip_address = settings.INTERNAL_IPS[0]
                except:
                    event_log.server_ip_address = '0.0.0.0'
            if hasattr(request, 'path'):
                event_log.url = request.path or ''

        # If we have an IP address, save the event_log
        if "." in event_log.user_ip_address:
            event_log.save()
            return event_log
        else:
            return None
Ejemplo n.º 34
0
def get_hexdigest(plaintext, length=None):
    digest = md5(smart_bytes(plaintext)).hexdigest()
    if length:
        return digest[:length]
    return digest
Ejemplo n.º 35
0
 def __str__(self):
     return smart_bytes(self._key)
Ejemplo n.º 36
0
 def __repr__(self):
     return smart_bytes(
         "<%s: %s (%s)>" %
         (self.__class__.__name__, self.name, self.content_type))
Ejemplo n.º 37
0
    def update_backend(self, label, using):
        backend = haystack_connections[using].get_backend()
        unified_index = haystack_connections[using].get_unified_index()

        for model in haystack_get_models(label):
            try:
                index = unified_index.get_index(model)
            except NotHandled:
                if self.verbosity >= 2:
                    self.stdout.write("Skipping '%s' - no index." % model)
                continue

            if self.workers > 0:
                # workers resetting connections leads to references to models / connections getting
                # stale and having their connection disconnected from under them. Resetting before
                # the loop continues and it accesses the ORM makes it better.
                close_old_connections()

            qs = index.build_queryset(using=using,
                                      start_date=self.start_date,
                                      end_date=self.end_date)

            total = qs.count()

            if self.verbosity >= 1:
                self.stdout.write(
                    u"Indexing %d %s" %
                    (total, force_text(model._meta.verbose_name_plural)))

            batch_size = self.batchsize or backend.batch_size

            if self.workers > 0:
                ghetto_queue = []

            for start in range(0, total, batch_size):
                end = min(start + batch_size, total)

                if self.workers == 0:
                    do_update(backend,
                              index,
                              qs,
                              start,
                              end,
                              total,
                              verbosity=self.verbosity,
                              commit=self.commit,
                              max_retries=self.max_retries)
                else:
                    ghetto_queue.append(
                        (model, start, end, total, using, self.start_date,
                         self.end_date, self.verbosity, self.commit,
                         self.max_retries))

            if self.workers > 0:
                pool = multiprocessing.Pool(self.workers)

                successful_tasks = pool.map(update_worker, ghetto_queue)

                if len(ghetto_queue) != len(successful_tasks):
                    self.stderr.write(
                        'Queued %d tasks but only %d completed' %
                        (len(ghetto_queue), len(successful_tasks)))
                    for i in ghetto_queue:
                        if i not in successful_tasks:
                            self.stderr.write('Incomplete task: %s' % repr(i))

                pool.close()
                pool.join()

            if self.remove:
                if self.start_date or self.end_date or total <= 0:
                    # They're using a reduced set, which may not incorporate
                    # all pks. Rebuild the list with everything.
                    qs = index.index_queryset().values_list('pk', flat=True)
                    database_pks = set(smart_bytes(pk) for pk in qs)

                    total = len(database_pks)
                else:
                    database_pks = set(
                        smart_bytes(pk)
                        for pk in qs.values_list('pk', flat=True))

                # Since records may still be in the search index but not the local database
                # we'll use that to create batches for processing.
                # See https://github.com/django-haystack/django-haystack/issues/1186
                index_total = SearchQuerySet(
                    using=backend.connection_alias).models(model).count()

                # Retrieve PKs from the index. Note that this cannot be a numeric range query because although
                # pks are normally numeric they can be non-numeric UUIDs or other custom values. To reduce
                # load on the search engine, we only retrieve the pk field, which will be checked against the
                # full list obtained from the database, and the id field, which will be used to delete the
                # record should it be found to be stale.
                index_pks = SearchQuerySet(
                    using=backend.connection_alias).models(model)
                index_pks = index_pks.values_list('pk', 'id')

                # We'll collect all of the record IDs which are no longer present in the database and delete
                # them after walking the entire index. This uses more memory than the incremental approach but
                # avoids needing the pagination logic below to account for both commit modes:
                stale_records = set()

                for start in range(0, index_total, batch_size):
                    upper_bound = start + batch_size

                    # If the database pk is no longer present, queue the index key for removal:
                    for pk, rec_id in index_pks[start:upper_bound]:
                        if smart_bytes(pk) not in database_pks:
                            stale_records.add(rec_id)

                if stale_records:
                    if self.verbosity >= 1:
                        self.stdout.write("  removing %d stale records." %
                                          len(stale_records))

                    for rec_id in stale_records:
                        # Since the PK was not in the database list, we'll delete the record from the search
                        # index:
                        if self.verbosity >= 2:
                            self.stdout.write("  removing %s." % rec_id)

                        backend.remove(rec_id, commit=self.commit)
Ejemplo n.º 38
0
    def login(self, request):
        email = request.data.get("email")
        password = request.data.get("password")
        if email is None or password is None:
            return Response(
                {'error': 'Please provide both email and password'},
                status=HTTP_400_BAD_REQUEST)
        ui = BannedUser.objects.filter(email=email)
        if ui:
            return Response(
                {'error': 'Your are currently banned please check your email'},
                status=HTTP_400_BAD_REQUEST)
        user = authenticate(request, username=email, password=password)

        if not user:
            u = User.objects.filter(email=email)
            if (u):
                try:
                    fail_infos = LoginFailInfos.objects.get(email=email)
                    if fail_infos.fail_times == 2:
                        usr_to_ban = User.objects.get(email=email)
                        b = BannedUser(email=email, user=usr_to_ban)
                        b.save()
                        user = User.objects.get(email=email)
                        uidb64 = urlsafe_base64_encode(smart_bytes(user.id))
                        token = PasswordResetTokenGenerator().make_token(user)
                        token_parameters = "?uidb64=" + uidb64 + ";token=" + token
                        link = "http://100.25.223.242:3000/forgot" + "/" + token_parameters
                        template = render_to_string(
                            'email_password_reset_template.html', {
                                'name': user.username,
                                'link': link
                            })
                        send_mail("Change your password", template,
                                  "*****@*****.**", [email])
                        fail_infos.delete()
                        return Response(
                            {'error': 'you are banned check your email'},
                            status=HTTP_404_NOT_FOUND)
                    fail_infos.fail_times = fail_infos.fail_times + 1
                    fail_infos.save()
                except:
                    usr_to_update = User.objects.get(email=email)
                    fi = LoginFailInfos(email=email,
                                        fail_times=1,
                                        user=usr_to_update)
                    fi.save()
            return Response({'error': 'Invalid Credentials'},
                            status=HTTP_404_NOT_FOUND)

        now = datetime.datetime.now().strftime(date_format)
        last_change_time = ""
        try:
            password_info = PasswordChangedDate.objects.get(email=email)
            last_change_time = password_info.last_change
        except:
            l = user.date_joined.strftime(date_format)
            last_change_time = l
            p = PasswordChangedDate(email=email, last_change=l, user=user)
            p.save()
        date_1 = datetime.datetime.strptime(last_change_time, date_format)
        date_2 = datetime.datetime.strptime(now, date_format)
        time_delta = (date_2 - date_1)
        total_seconds = time_delta.total_seconds()
        minutes = total_seconds // 60
        if minutes > 43200:
            template = render_to_string('password_change_warning.html',
                                        {'name': user.username})
            send_mail("Change your password", template, "*****@*****.**",
                      [str(user.email)])
        data = AuthUserSerializer(user).data
        return Response(data=data, status=status.HTTP_200_OK)
Ejemplo n.º 39
0
def _get_fernet():
    """Create a Fernet instance."""
    secret_key = base64.urlsafe_b64encode(smart_bytes(
        settings.SECRET_KEY[:32]))
    return Fernet(secret_key)
Ejemplo n.º 40
0
def encrypt(clear_value):
    """Encrypt a value using secret_key."""
    return smart_text(_get_fernet().encrypt(smart_bytes(clear_value)))
Ejemplo n.º 41
0
def decrypt(encrypted_value):
    """Decrypt a value using secret_key."""
    return smart_text(_get_fernet().decrypt(smart_bytes(encrypted_value)))
Ejemplo n.º 42
0
def ticket_from_message(message, queue, logger):
    # 'message' must be an RFC822 formatted message.
    message = email.message_from_string(message)
    subject = message.get('subject', _('Created from e-mail'))
    subject = decode_mail_headers(decodeUnknown(message.get_charset(),
                                                subject))
    for affix in STRIPPED_SUBJECT_STRINGS:
        subject = subject.replace(affix, "")
    subject = subject.strip()

    sender = message.get('from', _('Unknown Sender'))
    sender = decode_mail_headers(decodeUnknown(message.get_charset(), sender))
    sender_email = email.utils.parseaddr(sender)[1]

    for ignore in IgnoreEmail.objects.filter(
            Q(queues=queue) | Q(queues__isnull=True)):
        if ignore.test(sender_email):
            if ignore.keep_in_mailbox:
                # By returning 'False' the message will be kept in the mailbox,
                # and the 'True' will cause the message to be deleted.
                return False
            return True

    matchobj = re.match(r".*\[" + queue.slug + "-(?P<id>\d+)\]", subject)
    if matchobj:
        # This is a reply or forward.
        ticket = matchobj.group('id')
        logger.info("Matched tracking ID %s-%s" % (queue.slug, ticket))
    else:
        logger.info("No tracking ID matched.")
        ticket = None

    body = None
    counter = 0
    files = []

    for part in message.walk():
        if part.get_content_maintype() == 'multipart':
            continue

        name = part.get_param("name")
        if name:
            name = email.utils.collapse_rfc2231_value(name)

        if part.get_content_maintype() == 'text' and name is None:
            if part.get_content_subtype() == 'plain':
                body = EmailReplyParser.parse_reply(
                    decodeUnknown(part.get_content_charset(),
                                  part.get_payload(decode=True)))
                logger.debug("Discovered plain text MIME part")
            else:
                files.append(
                    SimpleUploadedFile(
                        _("email_html_body.html"),
                        encoding.smart_bytes(part.get_payload()), 'text/html'))
                logger.debug("Discovered HTML MIME part")
        else:
            if not name:
                ext = mimetypes.guess_extension(part.get_content_type())
                name = "part-%i%s" % (counter, ext)
            files.append(
                SimpleUploadedFile(name,
                                   encoding.smart_bytes(part.get_payload()),
                                   part.get_content_type()))
            logger.debug("Found MIME attachment %s" % name)

        counter += 1

    if not body:
        body = _(
            'No plain-text email body available. Please see attachment "email_html_body.html".'
        )

    if ticket:
        try:
            t = Ticket.objects.get(id=ticket)
        except Ticket.DoesNotExist:
            logger.info(
                "Tracking ID %s-%s not associated with existing ticket. Creating new ticket."
                % (queue.slug, ticket))
            ticket = None
        else:
            logger.info("Found existing ticket with Tracking ID %s-%s" %
                        (t.queue.slug, t.id))
            if t.status == Ticket.CLOSED_STATUS:
                t.status = Ticket.REOPENED_STATUS
                t.save()
            new = False

    smtp_priority = message.get('priority', '')
    smtp_importance = message.get('importance', '')
    high_priority_types = {'high', 'important', '1', 'urgent'}
    priority = 2 if high_priority_types & {smtp_priority, smtp_importance
                                           } else 3

    if ticket is None:
        new = True
        t = Ticket.objects.create(
            title=subject,
            queue=queue,
            submitter_email=sender_email,
            created=timezone.now(),
            description=body,
            priority=priority,
        )
        logger.debug("Created new ticket %s-%s" % (t.queue.slug, t.id))

    f = FollowUp(
        ticket=t,
        title=_('E-Mail Received from %(sender_email)s' %
                {'sender_email': sender_email}),
        date=timezone.now(),
        public=True,
        comment=body,
    )

    if t.status == Ticket.REOPENED_STATUS:
        f.new_status = Ticket.REOPENED_STATUS
        f.title = _(
            'Ticket Re-Opened by E-Mail Received from %(sender_email)s' %
            {'sender_email': sender_email})

    f.save()
    logger.debug("Created new FollowUp for Ticket")

    if six.PY2:
        logger.info(("[%s-%s] %s" % (
            t.queue.slug,
            t.id,
            t.title,
        )).encode('ascii', 'replace'))
    elif six.PY3:
        logger.info("[%s-%s] %s" % (
            t.queue.slug,
            t.id,
            t.title,
        ))

    attached = process_attachments(f, files)
    for att_file in attached:
        logger.info(
            "Attachment '%s' successfully added to ticket from email." %
            att_file[0])

    context = safe_template_context(t)

    if new:
        if sender_email:
            send_templated_mail(
                'newticket_submitter',
                context,
                recipients=sender_email,
                sender=queue.from_address,
                fail_silently=True,
            )
        if queue.new_ticket_cc:
            send_templated_mail(
                'newticket_cc',
                context,
                recipients=queue.new_ticket_cc,
                sender=queue.from_address,
                fail_silently=True,
            )
        if queue.updated_ticket_cc and queue.updated_ticket_cc != queue.new_ticket_cc:
            send_templated_mail(
                'newticket_cc',
                context,
                recipients=queue.updated_ticket_cc,
                sender=queue.from_address,
                fail_silently=True,
            )
    else:
        context.update(comment=f.comment)
        if t.assigned_to:
            send_templated_mail(
                'updated_owner',
                context,
                recipients=t.assigned_to.email,
                sender=queue.from_address,
                fail_silently=True,
            )
        if queue.updated_ticket_cc:
            send_templated_mail(
                'updated_cc',
                context,
                recipients=queue.updated_ticket_cc,
                sender=queue.from_address,
                fail_silently=True,
            )

    return t
Ejemplo n.º 43
0
def ticket_from_message(message, queue, logger):
    # 'message' must be an RFC822 formatted message.
    message = email.message_from_string(
        message) if six.PY3 else email.message_from_string(
            message.encode('utf-8'))
    subject = message.get('subject', _('Comment from e-mail'))
    subject = decode_mail_headers(decodeUnknown(message.get_charset(),
                                                subject))
    for affix in STRIPPED_SUBJECT_STRINGS:
        subject = subject.replace(affix, "")
    subject = subject.strip()

    sender = message.get('from', _('Unknown Sender'))
    sender = decode_mail_headers(decodeUnknown(message.get_charset(), sender))
    sender_email = email.utils.parseaddr(sender)[1]

    cc = message.get_all('cc', None)
    if cc:
        # first, fixup the encoding if necessary
        cc = [
            decode_mail_headers(decodeUnknown(message.get_charset(), x))
            for x in cc
        ]
        # get_all checks if multiple CC headers, but individual emails may be comma separated too
        tempcc = []
        for hdr in cc:
            tempcc.extend(hdr.split(','))
        # use a set to ensure no duplicates
        cc = set([x.strip() for x in tempcc])

    for ignore in IgnoreEmail.objects.filter(
            Q(queues=queue) | Q(queues__isnull=True)):
        if ignore.test(sender_email):
            if ignore.keep_in_mailbox:
                # By returning 'False' the message will be kept in the mailbox,
                # and the 'True' will cause the message to be deleted.
                return False
            return True

    matchobj = re.match(r".*\[" + queue.slug + "-(?P<id>\d+)\]", subject)
    if matchobj:
        # This is a reply or forward.
        ticket = matchobj.group('id')
        logger.info("Matched tracking ID %s-%s" % (queue.slug, ticket))
    else:
        logger.info("No tracking ID matched.")
        ticket = None

    body = None
    counter = 0
    files = []

    for part in message.walk():
        if part.get_content_maintype() == 'multipart':
            continue

        name = part.get_param("name")
        if name:
            name = email.utils.collapse_rfc2231_value(name)

        if part.get_content_maintype() == 'text' and name is None:
            if part.get_content_subtype() == 'plain':
                body = EmailReplyParser.parse_reply(
                    decodeUnknown(part.get_content_charset(),
                                  part.get_payload(decode=True)))
                # workaround to get unicode text out rather than escaped text
                try:
                    body = body.encode('ascii').decode('unicode_escape')
                except UnicodeEncodeError:
                    body.encode('utf-8')
                logger.debug("Discovered plain text MIME part")
            else:
                files.append(
                    SimpleUploadedFile(
                        _("email_html_body.html"),
                        encoding.smart_bytes(part.get_payload()), 'text/html'))
                logger.debug("Discovered HTML MIME part")
        else:
            if not name:
                ext = mimetypes.guess_extension(part.get_content_type())
                name = "part-%i%s" % (counter, ext)
            payload = part.get_payload()
            if isinstance(payload, list):
                payload = payload.pop().as_string()
            payloadToWrite = payload
            # check version of python to ensure use of only the correct error type
            if six.PY2:
                non_b64_err = binascii.Error
            else:
                non_b64_err = TypeError
            try:
                logger.debug("Try to base64 decode the attachment payload")
                if six.PY2:
                    payloadToWrite = base64.decodestring(payload)
                else:
                    payloadToWrite = base64.decodebytes(payload)
            except non_b64_err:
                logger.debug("Payload was not base64 encoded, using raw bytes")
                payloadToWrite = payload
            files.append(
                SimpleUploadedFile(name, part.get_payload(decode=True),
                                   mimetypes.guess_type(name)[0]))
            logger.debug("Found MIME attachment %s" % name)

        counter += 1

    if not body:
        mail = BeautifulSoup(part.get_payload(), "lxml")
        if ">" in mail.text:
            message_body = mail.text.split(">")[1]
            body = message_body.encode('ascii', errors='ignore')
        else:
            body = mail.text

    if ticket:
        try:
            t = Ticket.objects.get(id=ticket)
        except Ticket.DoesNotExist:
            logger.info(
                "Tracking ID %s-%s not associated with existing ticket. Creating new ticket."
                % (queue.slug, ticket))
            ticket = None
        else:
            logger.info("Found existing ticket with Tracking ID %s-%s" %
                        (t.queue.slug, t.id))
            if t.status == Ticket.CLOSED_STATUS:
                t.status = Ticket.REOPENED_STATUS
                t.save()
            new = False

    smtp_priority = message.get('priority', '')
    smtp_importance = message.get('importance', '')
    high_priority_types = {'high', 'important', '1', 'urgent'}
    priority = 2 if high_priority_types & {smtp_priority, smtp_importance
                                           } else 3

    if ticket is None:
        if settings.QUEUE_EMAIL_BOX_UPDATE_ONLY:
            return None
        new = True
        t = Ticket.objects.create(
            title=subject,
            queue=queue,
            submitter_email=sender_email,
            created=timezone.now(),
            description=body,
            priority=priority,
        )
        logger.debug("Created new ticket %s-%s" % (t.queue.slug, t.id))

    if cc:
        # get list of currently CC'd emails
        current_cc = TicketCC.objects.filter(ticket=ticket)
        current_cc_emails = [x.email for x in current_cc if x.email]
        # get emails of any Users CC'd to email, if defined
        # (some Users may not have an associated email, e.g, when using LDAP)
        current_cc_users = [
            x.user.email for x in current_cc if x.user and x.user.email
        ]
        # ensure submitter, assigned user, queue email not added
        other_emails = [queue.email_address]
        if t.submitter_email:
            other_emails.append(t.submitter_email)
        if t.assigned_to:
            other_emails.append(t.assigned_to.email)
        current_cc = set(current_cc_emails + current_cc_users + other_emails)
        # first, add any User not previously CC'd (as identified by User's email)
        all_users = User.objects.all()
        all_user_emails = set([x.email for x in all_users])
        users_not_currently_ccd = all_user_emails.difference(set(current_cc))
        users_to_cc = cc.intersection(users_not_currently_ccd)
        for user in users_to_cc:
            tcc = TicketCC.objects.create(ticket=t,
                                          user=User.objects.get(email=user),
                                          can_view=True,
                                          can_update=False)
            tcc.save()
        # then add remaining emails alphabetically, makes testing easy
        new_cc = cc.difference(current_cc).difference(all_user_emails)
        new_cc = sorted(list(new_cc))
        for ccemail in new_cc:
            tcc = TicketCC.objects.create(ticket=t,
                                          email=ccemail.replace('\n',
                                                                ' ').replace(
                                                                    '\r', ' '),
                                          can_view=True,
                                          can_update=False)
            tcc.save()

    f = FollowUp(
        ticket=t,
        title=_('E-Mail Received from %(sender_email)s' %
                {'sender_email': sender_email}),
        date=timezone.now(),
        public=True,
        comment=body,
    )

    if t.status == Ticket.REOPENED_STATUS:
        f.new_status = Ticket.REOPENED_STATUS
        f.title = _(
            'Ticket Re-Opened by E-Mail Received from %(sender_email)s' %
            {'sender_email': sender_email})

    f.save()
    logger.debug("Created new FollowUp for Ticket")

    if six.PY2:
        logger.info(("[%s-%s] %s" % (
            t.queue.slug,
            t.id,
            t.title,
        )).encode('ascii', 'replace'))
    elif six.PY3:
        logger.info("[%s-%s] %s" % (
            t.queue.slug,
            t.id,
            t.title,
        ))

    attached = process_attachments(f, files)
    for att_file in attached:
        logger.info(
            "Attachment '%s' (with size %s) successfully added to ticket from email."
            % (att_file[0], att_file[1].size))

    context = safe_template_context(t)

    if new:
        if sender_email:
            send_templated_mail(
                'newticket_submitter',
                context,
                recipients=sender_email,
                sender=queue.from_address,
                fail_silently=True,
            )
        if queue.new_ticket_cc:
            send_templated_mail(
                'newticket_cc',
                context,
                recipients=queue.new_ticket_cc,
                sender=queue.from_address,
                fail_silently=True,
            )
        if queue.updated_ticket_cc and queue.updated_ticket_cc != queue.new_ticket_cc:
            send_templated_mail(
                'newticket_cc',
                context,
                recipients=queue.updated_ticket_cc,
                sender=queue.from_address,
                fail_silently=True,
            )
    else:
        context.update(comment=f.comment)
        if t.assigned_to:
            send_templated_mail(
                'updated_owner',
                context,
                recipients=t.assigned_to.email,
                sender=queue.from_address,
                fail_silently=True,
            )
        if queue.updated_ticket_cc:
            send_templated_mail(
                'updated_cc',
                context,
                recipients=queue.updated_ticket_cc,
                sender=queue.from_address,
                fail_silently=True,
            )

    return t
Ejemplo n.º 44
0
 def _get_cache_attribute(self, view, obj):
     h = md5(
         smart_bytes('{0}.{1}.{2}'.format(self.opts.app_label,
                                          self.opts.object_name.lower(),
                                          hash(obj))))
     return '_{0}_permission_{1}'.format(view, h.hexdigest())
oDir = "./Output"
if not os.path.isdir(oDir) or not os.path.exists(oDir):
    os.makedirs(oDir)

marcDF = pd.read_csv(marcFilename, sep="~", encoding='utf-8', index_col=False)
fundDF = pd.read_csv(giftFilename, encoding='utf-8')

print(marcDF)
print(fundDF)

######################################################################################################
######################################################################################################
#######     make sure all columns are encoded as UTF-8 in dataframes, and merge
for col in marcDF.columns:
    marcDF[col] = marcDF[col].apply(lambda x: smart_bytes(x).decode('utf-8'))

for col in fundDF.columns:
    fundDF[col] = fundDF[col].apply(lambda x: smart_bytes(x).decode('utf-8'))

gf = pd.merge(marcDF, fundDF, on='MMS Id')

print(gf)

for col in gf.columns:

    gf[col] = gf[col].apply(lambda x: x.replace("&", "\&"))

gf = gf.replace('nan', '', regex=True)

gf = gf.drop(gf[gf['Title'].str.isupper()].index)
Ejemplo n.º 46
0
    def get(self, request):
        """Callback handler for OIDC authorization code flow.

        This is based on the mozilla-django-oidc library.
        This callback is used to verify the identity added by the user.
        Users are already logged in and we do not care about authentication.
        The JWT token is used to prove the identity of the user.
        """

        profile = request.user.userprofile
        # This is a difference nonce than the one used to login!
        nonce = request.session.get('oidc_verify_nonce')
        if nonce:
            # Make sure that nonce is not used twice
            del request.session['oidc_verify_nonce']

        # Check for all possible errors and display a message to the user.
        errors = [
            'code' not in request.GET, 'state' not in request.GET,
            'oidc_verify_state' not in request.session,
            not request.GET.get('state')
            or request.GET['state'] != request.session['oidc_verify_state']
        ]
        if any(errors):
            msg = 'Something went wrong, account verification failed.'
            messages.error(request, msg)
            return redirect('phonebook:profile_edit')

        token_payload = {
            'client_id':
            self.OIDC_RP_VERIFICATION_CLIENT_ID,
            'client_secret':
            self.OIDC_RP_VERIFICATION_CLIENT_SECRET,
            'grant_type':
            'authorization_code',
            'code':
            request.GET['code'],
            'redirect_uri':
            absolutify(self.request,
                       nonprefixed_url('phonebook:verify_identity_callback')),
        }
        response = requests.post(self.OIDC_OP_TOKEN_ENDPOINT,
                                 data=token_payload,
                                 verify=import_from_settings(
                                     'OIDC_VERIFY_SSL', True))
        response.raise_for_status()
        token_response = response.json()
        id_token = token_response.get('id_token')

        # Verify JWT
        jws = JWS.from_compact(force_bytes(id_token))
        jwk = JWK.load(smart_bytes(self.OIDC_RP_VERIFICATION_CLIENT_SECRET))
        verified_token = None
        if jws.verify(jwk):
            verified_token = jws.payload

        # Create the new Identity Profile.
        if verified_token:
            user_info = json.loads(verified_token)
            email = user_info['email']
            verification_user_id = user_info.get(ORIGINAL_CONNECTION_USER_ID)
            msg = ''

            if not user_info.get('email_verified'):
                msg = 'Account verification failed: Email is not verified.'

            if not verification_user_id:
                msg = 'Account verification failed: Could not get original user id'

            if msg:
                messages.error(request, msg)
                return redirect('phonebook:profile_edit')

            user_q = {'auth0_user_id': verification_user_id, 'email': email}

            # If we are linking GitHub we need to save
            # the username too.
            if 'github|' in verification_user_id:
                user_q['username'] = user_info['nickname']

            # Check that the identity doesn't exist in another Identity profile
            # or in another mozillians profile
            error_msg = ''
            if IdpProfile.objects.filter(**user_q).exists():
                error_msg = 'Account verification failed: Identity already exists.'
            elif User.objects.filter(email__iexact=email).exclude(
                    pk=profile.user.pk).exists():
                error_msg = 'The email in this identity is used by another user.'
            if error_msg:
                messages.error(request, error_msg)
                next_url = self.request.session.get('oidc_login_next', None)
                return HttpResponseRedirect(
                    next_url or reverse('phonebook:profile_edit'))

            # Save the new identity to the IdpProfile
            user_q['profile'] = profile
            idp, created = IdpProfile.objects.get_or_create(**user_q)

            current_idp = get_object_or_none(IdpProfile,
                                             profile=profile,
                                             primary=True)
            # The new identity is stronger than the one currently used. Let's swap
            append_msg = ''
            # We need to check for equality too in the case a user updates the primary email in
            # the same identity (matching auth0_user_id). If there is an addition of the same type
            # we are not swapping login identities
            if ((current_idp and current_idp.type < idp.type)
                    or (current_idp
                        and current_idp.auth0_user_id == idp.auth0_user_id)
                    or (not current_idp and created
                        and idp.type >= IdpProfile.PROVIDER_GITHUB)):
                IdpProfile.objects.filter(profile=profile).exclude(
                    pk=idp.pk).update(primary=False)
                idp.primary = True
                idp.save()
                # Also update the primary email of the user
                update_email_in_basket(profile.user.email, idp.email)
                User.objects.filter(pk=profile.user.id).update(email=idp.email)
                append_msg = ' You need to use this identity the next time you will login.'

            send_userprofile_to_cis.delay(profile.pk)
            if created:
                msg = 'Account successfully verified.'
                if append_msg:
                    msg += append_msg
                messages.success(request, msg)
            else:
                msg = 'Account verification failed: Identity already exists.'
                messages.error(request, msg)

        next_url = self.request.session.get('oidc_login_next', None)
        return HttpResponseRedirect(next_url
                                    or reverse('phonebook:profile_edit'))
Ejemplo n.º 47
0
 def generate_version(self, key, url, content):
     # return sha1(smart_str(content)).hexdigest()
     return sha1(smart_bytes(content)).hexdigest()
Ejemplo n.º 48
0
def auth_decode(value):
  return jsonpickle.decode(base64.b64decode(encoding.smart_bytes(value)))
Ejemplo n.º 49
0
 def coerce(self, value):
     return smart_bytes(value)
Ejemplo n.º 50
0
 def save_story(self, html):
     self.story.original_page_z = zlib.compress(smart_bytes(html))
     try:
         self.story.save()
     except NotUniqueError:
         pass
Ejemplo n.º 51
0
 def to_python(self, value):
     if value is None:
         return None
     if isinstance(value, oauth2client.client.Credentials):
         return value
     return pickle.loads(base64.b64decode(smart_bytes(value)))
Ejemplo n.º 52
0
def get_hexdigest(plaintext, length=None):
    digest = hashlib.md5(encoding.smart_bytes(plaintext)).hexdigest()
    if length:
        return digest[:length]
    return digest
Ejemplo n.º 53
0
def default_username(email):
    # Store the username as a base64 encoded sha1 of the email address
    # this protects against data leakage because usernames are often
    # treated as public identifiers (so we can't use the email address).
    return base64.urlsafe_b64encode(hashlib.sha1(
        smart_bytes(email)).digest()).rstrip(b'=')