Ejemplo n.º 1
0
def import_uploads_s3(bucket_name, import_dir, avatar_bucket=False):
    # type: (str, Path, bool) -> None
    conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
    bucket = conn.get_bucket(bucket_name, validate=True)

    records_filename = os.path.join(import_dir, "records.json")
    with open(records_filename) as records_file:
        records = ujson.loads(records_file.read())

    for record in records:
        key = Key(bucket)

        if avatar_bucket:
            # For avatars, we need to rehash the user's email with the
            # new server's avatar salt
            avatar_hash = user_avatar_hash(record['user_profile_email'])
            key.key = avatar_hash
            if record['s3_path'].endswith('.original'):
                key.key += '.original'
        else:
            key.key = record['s3_path']

        user_profile_id = int(record['user_profile_id'])
        # Support email gateway bot and other cross-realm messages
        if user_profile_id in id_maps["user_profile"]:
            logging.info("Uploaded by ID mapped user: %s!" % (user_profile_id,))
            user_profile_id = id_maps["user_profile"][user_profile_id]
        user_profile = get_user_profile_by_id(user_profile_id)
        key.set_metadata("user_profile_id", str(user_profile.id))
        key.set_metadata("realm_id", str(user_profile.realm.id))
        key.set_metadata("orig_last_modified", record['last_modified'])

        headers = {'Content-Type': key['content_type']}

        key.set_contents_from_filename(os.path.join(import_dir, record['path']), headers=headers)
Ejemplo n.º 2
0
def upload_avatar_image_local(user_file, user_profile, email):
    email_hash = user_avatar_hash(email)

    image_data = user_file.read()
    write_local_file('avatars', email_hash + '.original', image_data)

    resized_data = resize_avatar(image_data)
    write_local_file('avatars', email_hash + '.png', resized_data)
Ejemplo n.º 3
0
def upload_avatar_image_local(user_file, user_profile, email):
    email_hash = user_avatar_hash(email)

    image_data = user_file.read()
    write_local_file('avatars', email_hash+'.original', image_data)

    resized_data = resize_avatar(image_data)
    write_local_file('avatars', email_hash+'.png', resized_data)
Ejemplo n.º 4
0
    def upload_avatar_image(self, user_file, user_profile, email):
        # type: (File, UserProfile, text_type) -> None
        email_hash = user_avatar_hash(email)

        image_data = user_file.read()
        write_local_file('avatars', email_hash+'.original', image_data)

        resized_data = resize_avatar(image_data)
        write_local_file('avatars', email_hash+'.png', resized_data)
Ejemplo n.º 5
0
def export_avatars_local_helper(realm, output_dir, local_dir):
    # type: (Realm, Path, Path) -> None
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    count = 0
    records = []

    users = list(UserProfile.objects.filter(realm=realm))
    users += [
        get_user_profile_by_email(settings.NOTIFICATION_BOT),
        get_user_profile_by_email(settings.EMAIL_GATEWAY_BOT),
        get_user_profile_by_email(settings.WELCOME_BOT),
    ]
    for user in users:
        if user.avatar_source == UserProfile.AVATAR_FROM_GRAVATAR:
            continue
        # NOTE: There is an avatar source called AVATAR_FROM_SYSTEM,
        #       but I'm not sure we support it any more.  If we
        #       have system-generated avatars, then arguably we
        #       don't need to export them, but it's probably
        #       expedient to just copy them over.  The more
        #       common case is AVATAR_FROM_USER, which is handled
        #       here as well.  AVATAR_FROM_GRAVATAR refers to
        #       avatars hosted by gravatar.com, and for them,
        #       we have no files to worry about exporting

        avatar_hash = user_avatar_hash(user.email)
        wildcard = os.path.join(local_dir, avatar_hash + '.*')

        for local_path in glob.glob(wildcard):
            logging.info('Copying avatar file for user %s from %s' % (
                user.email, local_path))
            fn = os.path.basename(local_path)
            output_path = os.path.join(output_dir, fn)
            mkdir_p(str(os.path.dirname(output_path)))
            subprocess.check_call(["cp", "-a", str(local_path), str(output_path)])
            stat = os.stat(local_path)
            record = dict(realm_id=realm.id,
                          user_profile_id=user.id,
                          user_profile_email=user.email,
                          s3_path=fn,
                          path=fn,
                          size=stat.st_size,
                          last_modified=stat.st_mtime,
                          content_type=None)
            records.append(record)

            count += 1

            if (count % 100 == 0):
                logging.info("Finished %s" % (count,))

    with open(os.path.join(output_dir, "records.json"), "w") as records_file:
        ujson.dump(records, records_file, indent=4)
Ejemplo n.º 6
0
def upload_avatar_image_s3(user_file, user_profile, email):
    content_type = guess_type(user_file.name)[0]
    bucket_name = settings.S3_AVATAR_BUCKET
    s3_file_name = user_avatar_hash(email)

    image_data = user_file.read()
    upload_image_to_s3(
        bucket_name,
        s3_file_name + ".original",
        content_type,
        user_profile,
        image_data,
    )

    resized_data = resize_avatar(image_data)
    upload_image_to_s3(
        bucket_name,
        s3_file_name,
        'image/png',
        user_profile,
        resized_data,
    )
Ejemplo n.º 7
0
def upload_avatar_image_s3(user_file, user_profile, email):
    content_type = guess_type(user_file.name)[0]
    bucket_name = settings.S3_AVATAR_BUCKET
    s3_file_name = user_avatar_hash(email)

    image_data = user_file.read()
    upload_image_to_s3(
        bucket_name,
        s3_file_name + ".original",
        content_type,
        user_profile,
        image_data,
    )

    resized_data = resize_avatar(image_data)
    upload_image_to_s3(
        bucket_name,
        s3_file_name,
        'image/png',
        user_profile,
        resized_data,
    )
Ejemplo n.º 8
0
def import_uploads_local(import_dir, avatar_bucket=False):
    records_filename = os.path.join(import_dir, "records.json")
    with open(records_filename) as records_file:
        records = ujson.loads(records_file.read())

    for record in records:
        if avatar_bucket:
            # For avatars, we need to rehash the user's email with the
            # new server's avatar salt
            avatar_hash = user_avatar_hash(record['user_profile_email'])
            file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", avatar_hash)
            if record['s3_path'].endswith('.original'):
                file_path += '.original'
            else:
                file_path += '.png'
        else:
            file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "files", record['s3_path'])

        orig_file_path = os.path.join(import_dir, record['path'])
        if not os.path.exists(os.path.dirname(file_path)):
            subprocess.check_call(["mkdir", "-p", os.path.dirname(file_path)])
        shutil.copy(orig_file_path, file_path)
Ejemplo n.º 9
0
def export_bucket(realm, bucket_name, output_dir, avatar_bucket=False):
    # type: (Realm, str, Path, bool) -> None
    conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
    bucket = conn.get_bucket(bucket_name, validate=True)
    records = []

    logging.info("Downloading uploaded files from %s" % (bucket_name))

    avatar_hash_values = set()
    user_ids = set()
    if avatar_bucket:
        bucket_list = bucket.list()
        for user_profile in UserProfile.objects.filter(realm=realm):
            avatar_hash = user_avatar_hash(user_profile.email)
            avatar_hash_values.add(avatar_hash)
            avatar_hash_values.add(avatar_hash + ".original")
            user_ids.add(user_profile.id)
    else:
        bucket_list = bucket.list(prefix="%s/" % (realm.id,))

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    if settings.EMAIL_GATEWAY_BOT is not None:
        email_gateway_bot = get_user_profile_by_email(settings.EMAIL_GATEWAY_BOT)
    else:
        email_gateway_bot = None

    count = 0
    for bkey in bucket_list:
        if avatar_bucket and bkey.name not in avatar_hash_values:
            continue
        key = bucket.get_key(bkey.name)

        # This can happen if an email address has moved realms
        if 'realm_id' in key.metadata and key.metadata['realm_id'] != str(realm.id):
            if email_gateway_bot is None or key.metadata['user_profile_id'] != str(email_gateway_bot.id):
                raise Exception("Key metadata problem: %s %s / %s" % (key.name, key.metadata, realm.id))
            # Email gateway bot sends messages, potentially including attachments, cross-realm.
            print("File uploaded by email gateway bot: %s / %s" % (key.name, key.metadata))
        elif avatar_bucket:
            if 'user_profile_id' not in key.metadata:
                raise Exception("Missing user_profile_id in key metadata: %s" % (key.metadata,))
            if int(key.metadata['user_profile_id']) not in user_ids:
                raise Exception("Wrong user_profile_id in key metadata: %s" % (key.metadata,))
        elif 'realm_id' not in key.metadata:
            raise Exception("Missing realm_id in key metadata: %s" % (key.metadata,))

        record = dict(s3_path=key.name, bucket=bucket_name,
                      size=key.size, last_modified=key.last_modified,
                      content_type=key.content_type, md5=key.md5)
        record.update(key.metadata)

        # A few early avatars don't have 'realm_id' on the object; fix their metadata
        user_profile = get_user_profile_by_id(record['user_profile_id'])
        if 'realm_id' not in record:
            record['realm_id'] = user_profile.realm_id
        record['user_profile_email'] = user_profile.email

        if avatar_bucket:
            dirname = output_dir
            filename = os.path.join(dirname, key.name)
            record['path'] = key.name
        else:
            fields = key.name.split('/')
            if len(fields) != 3:
                raise Exception("Suspicious key %s" % (key.name))
            dirname = os.path.join(output_dir, fields[1])
            filename = os.path.join(dirname, fields[2])
            record['path'] = os.path.join(fields[1], fields[2])

        if not os.path.exists(dirname):
            os.makedirs(dirname)
        key.get_contents_to_filename(filename)

        records.append(record)
        count += 1

        if (count % 100 == 0):
            logging.info("Finished %s" % (count,))

    with open(os.path.join(output_dir, "records.json"), "w") as records_file:
        ujson.dump(records, records_file, indent=4)