def replace_countdown(text):
    regex = r"\#countdown\((\d*)\)"
    lines = text.split("\n")

    find_count = 1
    lines_finds = IterFinds(lines, regex)
    for line_index, i in lines_finds:
        for match_index, each_find in enumerate(
                re.finditer(regex, lines[line_index])):
            if find_count > 3:  # Process max of 25 per message
                return "\n".join(lines)
            elif match_index == i:
                try:
                    match_epoch = int(each_find.groups()[0])
                except:
                    continue
                start_string = lines[line_index][:each_find.start()]
                end_string = lines[line_index][each_find.end():]
                rand_str = get_random_alphanumeric_string(
                    12, with_punctuation=False, with_spaces=False)

                middle_string = """<span class="replace-funcs">#countdown(</span><span class="replace-funcs" id="countdown_{rand_str}"></span><span class="replace-funcs">)</span><script type="text/javascript">
var countDownDate_{rand_str} = {epoch_end} * 1000;
var x_{rand_str} = setInterval(function() {{
    var now = new Date().getTime();
    var distance = countDownDate_{rand_str} - now;

    var days = Math.floor(distance / (1000 * 60 * 60 * 24));
    var hours = Math.floor((distance % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60));
    var minutes = Math.floor((distance % (1000 * 60 * 60)) / (1000 * 60));
    var seconds = Math.floor((distance % (1000 * 60)) / 1000);

    var str_return = "";
    if (days) {{
        str_return += days;
        if (days > 1)  str_return += " Days, ";
        else str_return += " Day, ";
    }};
    if (hours || days) {{
        str_return += hours;
        if (hours > 1) str_return += " Hours, ";
        else str_return += " Hour, ";
    }};
    if (minutes || hours || days) str_return += minutes + " Min, ";
    if (seconds || minutes || hours || days) str_return += seconds + " Sec";

    if (distance < 0) {{
        clearInterval(x_{rand_str});
        document.getElementById("countdown_{rand_str}").innerHTML = "Expired";
    }} else {{
        document.getElementById("countdown_{rand_str}").innerHTML = str_return;
    }};
}}, 1000);
</script>""".format(rand_str=rand_str,
                    epoch_end=match_epoch).replace("\n", " ")
                find_count += 1
                lines[line_index] = start_string + middle_string + end_string

    return "\n".join(lines)
Esempio n. 2
0
def replace_ascii_xsmall(text):
    list_replacements = []
    for each_find in re.finditer(r"(?i)\[aa\-xs](.*?)\[\/aa\-xs]",
                                 text,
                                 flags=re.DOTALL):
        list_replacements.append({
            "ID":
            get_random_alphanumeric_string(30,
                                           with_punctuation=False,
                                           with_spaces=False),
            "string_with_tags":
            each_find.group(),
            "string_wo_tags":
            each_find.groups()[0]
        })
    return list_replacements
Esempio n. 3
0
def decrypt_safe_size(message, passphrase, max_size):
    """
    Ensure a decrypted message is of a safe size.
    Since incredibly large PGP messages (e.g. several GB of a repeating character) can
    be encrypted to a very small message, we must monitor the size of the decrypted
    data, and if it grows beyond a size threshold, halt the decryption process.
    """
    gpg = gnupg.GPG()
    tmp_decrypted = "/tmp/decrypted_msg_{}".format(
        get_random_alphanumeric_string(16,
                                       with_punctuation=False,
                                       with_spaces=False))

    delete_file(tmp_decrypted)

    def decrypt_message(message):
        gpg.decrypt_file(message, passphrase=passphrase, output=tmp_decrypted)

    proc = multiprocessing.Process(target=decrypt_message,
                                   args=(BytesIO(message.encode()), ))
    proc.start()

    size_too_large = False
    while proc.is_alive():
        if not os.path.exists(tmp_decrypted):
            pass
        else:
            if os.path.getsize(tmp_decrypted) > max_size:
                proc.terminate()
                size_too_large = True
    try:
        if os.path.exists(tmp_decrypted) and not size_too_large:
            with open(tmp_decrypted, 'r') as file:
                decrypted_str = file.read()
            return decrypted_str
        else:
            return None
    finally:
        delete_file(tmp_decrypted)
Esempio n. 4
0
def send_message(errors, form_post, form_steg, dict_send):
    """Conduct the file upload and sending of a message"""
    zip_file = "/tmp/{}".format(
        get_random_alphanumeric_string(15,
                                       with_punctuation=False,
                                       with_spaces=False))

    if dict_send["save_dir"]:
        try:
            dict_send[
                "file_enc_cipher"] = form_post.upload_cipher_and_key.data.split(
                    ",")[0]
            dict_send["file_enc_key_bytes"] = int(
                form_post.upload_cipher_and_key.data.split(",")[1])
        except:
            msg = "Unknown cannot parse cipher and key length: {}".format(
                form_post.upload_cipher_and_key.data)
            errors.append(msg)
            logger.error("{}: {}".format(dict_send["post_id"], msg))
            return "Error", errors

        steg_inserted = False
        for i, f in enumerate(dict_send["file_order"], start=1):
            if not f:
                continue

            fp = os.path.join(dict_send["save_dir"], f)
            file_extension = html.escape(
                os.path.splitext(f)[1].split(".")[-1].lower())
            try:
                if form_post.strip_exif.data and file_extension in [
                        "png", "jpeg", "jpg"
                ]:
                    PIL.Image.MAX_IMAGE_PIXELS = 500000000
                    im = Image.open(fp)
                    logger.info(
                        "{}: Stripping image metadata/exif from {}".format(
                            dict_send["post_id"], fp))
                    im.save(fp)
            except Exception as e:
                msg = "{}: Error opening image/stripping exif: {}".format(
                    dict_send["post_id"], e)
                errors.append(msg)
                logger.exception(msg)

            # encrypt steg message into image
            # Get first image that steg can be inserted into
            if (form_steg and i == form_steg.image_steg_insert.data
                    and file_extension in ["jpg", "jpeg"]
                    and not steg_inserted):
                logger.info("{}: Adding steg message to image {}".format(
                    dict_send["post_id"], fp))

                pgp_passphrase_steg = config.PGP_PASSPHRASE_STEG
                with session_scope(DB_PATH) as new_session:
                    chan = new_session.query(Chan).filter(
                        Chan.address == form_post.board_id.data).first()
                    if chan and chan.pgp_passphrase_steg:
                        pgp_passphrase_steg = chan.pgp_passphrase_steg

                steg_status = steg_encrypt(fp, fp, form_steg.steg_message.data,
                                           pgp_passphrase_steg)

                if steg_status != "success":
                    errors.append(steg_status)
                    logger.exception(steg_status)
                else:
                    steg_inserted = True

        # Create zip archive of files
        def zipdir(path, ziph):
            # ziph is zipfile handle
            for root, dirs, files in os.walk(path):
                for file in files:
                    ziph.write(os.path.join(root, file), file)

        try:
            zipf = zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_LZMA)
            zipdir(dict_send["save_dir"], zipf)
            zipf.close()
        except:
            logger.error("{}: Could not zip file")

        # Delete tmp directory
        delete_files_recursive(dict_send["save_dir"])

    if any(dict_send["file_order"]):
        # Generate random filename and extension
        file_extension = ""
        while file_extension in [""] + config.UPLOAD_BANNED_EXT:
            file_name = get_random_alphanumeric_string(30,
                                                       with_punctuation=False,
                                                       with_spaces=False)
            file_extension = get_random_alphanumeric_string(
                3,
                with_punctuation=False,
                with_digits=False,
                with_spaces=False).lower()
            dict_send["upload_filename"] = "{}.{}".format(
                file_name, file_extension)
        save_encrypted_path = "/tmp/{}".format(dict_send["upload_filename"])

    if any(dict_send["file_order"]) and form_post.upload.data != "bitmessage":
        with session_scope(DB_PATH) as new_session:
            upload_info = new_session.query(UploadSites).filter(
                UploadSites.domain == form_post.upload.data).first()

            if upload_info:
                dict_send["file_url_type"] = upload_info.domain
                dict_send["file_upload_settings"] = {
                    "domain": upload_info.domain,
                    "type": upload_info.type,
                    "uri": upload_info.uri,
                    "download_prefix": upload_info.download_prefix,
                    "response": upload_info.response,
                    "direct_dl_url": upload_info.direct_dl_url,
                    "extra_curl_options": upload_info.extra_curl_options,
                    "upload_word": upload_info.upload_word,
                    "form_name": upload_info.form_name
                }
            else:
                logger.error("{}: Upload domain not found".format(
                    dict_send["post_id"]))

            # encrypt file
            if dict_send["file_enc_cipher"] == "NONE":
                logger.info("{}: Not encrypting attachment(s)".format(
                    dict_send["post_id"]))
                os.rename(zip_file, save_encrypted_path)
            else:
                dict_send[
                    "file_enc_password"] = get_random_alphanumeric_string(300)
                logger.info(
                    "{}: Encrypting attachment(s) with {} and {}-bit key".
                    format(dict_send["post_id"], dict_send["file_enc_cipher"],
                           dict_send["file_enc_key_bytes"] * 8))
                ret_crypto = crypto_multi_enc(
                    dict_send["file_enc_cipher"],
                    dict_send["file_enc_password"] +
                    config.PGP_PASSPHRASE_ATTACH,
                    zip_file,
                    save_encrypted_path,
                    key_bytes=dict_send["file_enc_key_bytes"])
                if not ret_crypto:
                    msg = "Unknown encryption cipher: {}".format(
                        dict_send["file_enc_cipher"])
                    errors.append(msg)
                    logger.error("{}: {}".format(dict_send["post_id"], msg))
                    return "Error", errors

                delete_file(zip_file)

            # Generate hash before parts removed
            dict_send["file_sha256_hash"] = generate_hash(save_encrypted_path)
            if dict_send["file_sha256_hash"]:
                logger.info("{}: Attachment hash generated: {}".format(
                    dict_send["post_id"], dict_send["file_sha256_hash"]))

            file_size = os.path.getsize(save_encrypted_path)
            number_of_extracts = config.UPLOAD_FRAG_AMT
            if file_size < 2000:
                extract_starts_sizes = [{
                    "start": 0,
                    "size": int(file_size * 0.5)
                }]
            else:
                extract_starts_sizes = [{
                    "start": 0,
                    "size": config.UPLOAD_FRAG_START_BYTES
                }]
                sequences = return_non_overlapping_sequences(
                    number_of_extracts, config.UPLOAD_FRAG_START_BYTES,
                    file_size - config.UPLOAD_FRAG_END_BYTES,
                    config.UPLOAD_FRAG_MIN_BYTES, config.UPLOAD_FRAG_MAX_BYTES)
                for pos, size in sequences:
                    extract_starts_sizes.append({"start": pos, "size": size})
                extract_starts_sizes.append({
                    "start":
                    file_size - config.UPLOAD_FRAG_END_BYTES,
                    "size":
                    config.UPLOAD_FRAG_END_BYTES
                })
            logger.info("{}: File extraction positions and sizes: {}".format(
                dict_send["post_id"], extract_starts_sizes))
            logger.info("{}: File size before: {}".format(
                dict_send["post_id"], os.path.getsize(save_encrypted_path)))

            data_extracted_start_base64 = data_file_multiple_extract(
                save_encrypted_path, extract_starts_sizes, chunk=4096)

            dict_send["file_size"] = os.path.getsize(save_encrypted_path)
            logger.info("{}: File size after: {}".format(
                dict_send["post_id"], dict_send["file_size"]))

            dict_send["file_extracts_start_base64"] = json.dumps(
                data_extracted_start_base64)

            # Upload file
            upload_id = get_random_alphanumeric_string(12,
                                                       with_spaces=False,
                                                       with_punctuation=False)
            try:
                with session_scope(DB_PATH) as new_session:
                    upl = UploadProgress()
                    upl.upload_id = upload_id
                    upl.uploading = True
                    upl.subject = base64.b64decode(
                        dict_send["subject"]).decode()
                    upl.total_size_bytes = dict_send["file_size"]
                    new_session.add(upl)
                    new_session.commit()

                upload_success = None
                curl_options = None
                if ("type" in dict_send["file_upload_settings"]
                        and dict_send["file_upload_settings"]["type"]
                        == "anonfile"):
                    if dict_send["file_upload_settings"]["uri"]:
                        anon = AnonFile(
                            proxies=config.TOR_PROXIES,
                            custom_timeout=432000,
                            uri=dict_send["file_upload_settings"]["uri"],
                            upload_id=upload_id)
                    else:
                        anon = AnonFile(proxies=config.TOR_PROXIES,
                                        custom_timeout=432000,
                                        server=form_post.upload.data,
                                        upload_id=upload_id)
                elif ("type" in dict_send["file_upload_settings"]
                      and dict_send["file_upload_settings"]["type"] == "curl"):
                    curl_options = dict_send["file_upload_settings"]
                    curl_upload = UploadCurl(upload_id=upload_id)

                for i in range(3):
                    logger.info("{}: Uploading {} file".format(
                        dict_send["post_id"],
                        human_readable_size(
                            os.path.getsize(save_encrypted_path))))
                    if ("type" in dict_send["file_upload_settings"]
                            and dict_send["file_upload_settings"]["type"]
                            == "anonfile"):
                        status, web_url = anon.upload_file(save_encrypted_path)
                    elif (curl_options
                          and "type" in dict_send["file_upload_settings"] and
                          dict_send["file_upload_settings"]["type"] == "curl"):
                        status, web_url = curl_upload.upload_curl(
                            dict_send["post_id"],
                            curl_options["domain"],
                            curl_options["uri"],
                            save_encrypted_path,
                            download_prefix=curl_options["download_prefix"],
                            upload_word=curl_options["upload_word"],
                            response=curl_options["response"])

                    if not status:
                        logger.error("{}: File upload failed".format(
                            dict_send["post_id"]))
                    else:
                        logger.info("{}: Upload success: URL: {}".format(
                            dict_send["post_id"], web_url))
                        upload_success = web_url
                        with session_scope(DB_PATH) as new_session:
                            upl = new_session.query(UploadProgress).filter(
                                UploadProgress.upload_id == upload_id).first()
                            if upl:
                                upl.progress_size_bytes = os.path.getsize(
                                    save_encrypted_path)
                                upl.progress_percent = 100
                                upl.uploading = False
                                new_session.commit()
                        break
                    time.sleep(15)
            except:
                logger.exception("uploading file")
            finally:
                delete_file(save_encrypted_path)
                with session_scope(DB_PATH) as new_session:
                    upl = new_session.query(UploadProgress).filter(
                        UploadProgress.upload_id == upload_id).first()
                    if upl:
                        upl.uploading = False
                        new_session.commit()

            if upload_success:
                dict_send["file_url"] = upload_success
            else:
                msg = "File upload failed after 3 attempts"
                errors.append(msg)
                logger.error("{}: {}".format(dict_send["post_id"], msg))
                return "Error", errors

    elif any(
            dict_send["file_order"]) and form_post.upload.data == "bitmessage":
        with session_scope(DB_PATH) as new_session:
            settings = new_session.query(GlobalSettings).first()
            if settings.enable_kiosk_mode and settings.kiosk_disable_bm_attach:
                msg = "Attaching files using the Bitmessage Upload Method is currently prohibited. " \
                      "Use one of the alternate upload methods."
                errors.append(msg)
                logger.error("{}: {}".format(dict_send["post_id"], msg))
                return "Error", errors

        # encrypt file
        try:
            dict_send[
                "file_enc_cipher"] = form_post.upload_cipher_and_key.data.split(
                    ",")[0]
            dict_send["file_enc_key_bytes"] = int(
                form_post.upload_cipher_and_key.data.split(",")[1])
        except:
            msg = "Unknown cannot parse cipher and key length: {}".format(
                form_post.upload_cipher_and_key.data)
            errors.append(msg)
            logger.error("{}: {}".format(dict_send["post_id"], msg))
            return "Error", errors

        if dict_send["file_enc_cipher"] == "NONE":
            logger.info("{}: Not encrypting attachment(s)".format(
                dict_send["post_id"]))
            os.rename(zip_file, save_encrypted_path)
        else:
            dict_send["file_enc_password"] = get_random_alphanumeric_string(
                300)
            logger.info(
                "{}: Encrypting attachment(s) with {} and {}-bit key".format(
                    dict_send["post_id"], dict_send["file_enc_cipher"],
                    dict_send["file_enc_key_bytes"] * 8))
            ret_crypto = crypto_multi_enc(
                dict_send["file_enc_cipher"],
                dict_send["file_enc_password"] + config.PGP_PASSPHRASE_ATTACH,
                zip_file,
                save_encrypted_path,
                key_bytes=dict_send["file_enc_key_bytes"])
            if not ret_crypto:
                msg = "Unknown encryption cipher: {}".format(
                    dict_send["file_enc_cipher"])
                errors.append(msg)
                logger.error("{}: {}".format(dict_send["post_id"], msg))
                return "Error", errors

            delete_file(zip_file)

        dict_send["file_uploaded"] = base64.b64encode(
            open(save_encrypted_path, "rb").read()).decode()

        delete_file(save_encrypted_path)

    dict_message = {
        "version": config.VERSION_MSG,
        "message_type": "post",
        "is_op": form_post.is_op.data == "yes",
        "op_sha256_hash": dict_send["op_sha256_hash"],
        "timestamp_utc": daemon_com.get_utc(),
        "file_size": dict_send["file_size"],
        "file_amount": dict_send["file_amount"],
        "file_url_type": dict_send["file_url_type"],
        "file_url": dict_send["file_url"],
        "file_upload_settings": dict_send["file_upload_settings"],
        "file_extracts_start_base64": dict_send["file_extracts_start_base64"],
        "file_base64": dict_send["file_uploaded"],
        "file_sha256_hash": dict_send["file_sha256_hash"],
        "file_enc_cipher": dict_send["file_enc_cipher"],
        "file_enc_key_bytes": dict_send["file_enc_key_bytes"],
        "file_enc_password": dict_send["file_enc_password"],
        "file_order": dict_send["file_order"],
        "image1_spoiler": form_post.image1_spoiler.data,
        "image2_spoiler": form_post.image2_spoiler.data,
        "image3_spoiler": form_post.image3_spoiler.data,
        "image4_spoiler": form_post.image4_spoiler.data,
        "upload_filename": dict_send["upload_filename"],
        "sage": dict_send["sage"],
        "subject": dict_send["subject"],
        "message": dict_send["message"],
        "nation": dict_send["nation"],
        "nation_base64": dict_send["nation_base64"],
        "nation_name": dict_send["nation_name"],
    }

    if zip_file:
        delete_file(zip_file)

    pgp_passphrase_msg = config.PGP_PASSPHRASE_MSG
    with session_scope(DB_PATH) as new_session:
        chan = new_session.query(Chan).filter(
            Chan.address == form_post.board_id.data).first()
        if chan and chan.pgp_passphrase_msg:
            pgp_passphrase_msg = chan.pgp_passphrase_msg

    gpg = gnupg.GPG()
    message_encrypted = gpg.encrypt(json.dumps(dict_message),
                                    symmetric="AES256",
                                    passphrase=pgp_passphrase_msg,
                                    recipients=None)

    message_send = base64.b64encode(message_encrypted.data).decode()

    if len(message_send) > config.BM_PAYLOAD_MAX_SIZE:
        msg = "Message payload too large: {}. Must be less than {}".format(
            human_readable_size(len(message_send)),
            human_readable_size(config.BM_PAYLOAD_MAX_SIZE))
        logger.error(msg)
        errors.append(msg)
        return "Error", errors
    else:
        logger.info("{}: Message size: {}".format(dict_send["post_id"],
                                                  len(message_send)))

    # prolong inventory clear if sending a message
    now = time.time()
    if daemon_com.get_timer_clear_inventory() > now:
        daemon_com.update_timer_clear_inventory(config.CLEAR_INVENTORY_WAIT)

    # Don't allow a message to send while Bitmessage is restarting
    allow_send = False
    timer = time.time()
    while not allow_send:
        if daemon_com.bitmessage_restarting() is False:
            allow_send = True
        if time.time() - timer > config.BM_WAIT_DELAY:
            logger.error("{}: Unable to send message: "
                         "Could not detect Bitmessage running.".format(
                             dict_send["post_id"]))
            msg = "Unable to send message."
            errors = ["Could not detect Bitmessage running."]
            return msg, errors
        time.sleep(1)

    lf = LF()
    if lf.lock_acquire(config.LOCKFILE_API, to=config.API_LOCK_TIMEOUT):
        return_str = None
        try:
            return_str = api.sendMessage(form_post.board_id.data,
                                         form_post.from_address.data, "",
                                         message_send, 2, form_post.ttl.data)
            if return_str:
                logger.info(
                    "{}: Message sent from {} to {} with TTL of {} sec: {}".
                    format(dict_send["post_id"], form_post.from_address.data,
                           form_post.board_id.data, form_post.ttl.data,
                           return_str))
        except Exception:
            pass
        finally:
            time.sleep(config.API_PAUSE)
            lf.lock_release(config.LOCKFILE_API)
            return_msg = "Post of size {} placed in send queue. The time it " \
                         "takes to send a message is related to the size of the " \
                         "post due to the proof of work required to send. " \
                         "Generally, the larger the post, the longer it takes to " \
                         "send. Posts ~10 KB take around a minute or less to send, " \
                         "whereas messages >= 100 KB can take several minutes to " \
                         "send. BM returned: {}".format(
                            human_readable_size(len(message_send)), return_str)
            return return_msg, errors
Esempio n. 5
0
def submit_post(form_post, form_steg=None):
    """Process the form for making a post"""
    errors = []

    file_list = []
    file_upload = False

    dict_send = {
        "save_dir":
        None,
        "zip_file":
        None,
        "file_size":
        None,
        "file_amount":
        None,
        "file_filename":
        None,
        "file_extension":
        None,
        "file_url_type":
        None,
        "file_url":
        None,
        "file_upload_settings": {},
        "file_extracts_start_base64":
        None,
        "file_sha256_hash":
        None,
        "file_enc_cipher":
        None,
        "file_enc_key_bytes":
        None,
        "file_enc_password":
        None,
        "file_order": [],
        "media_height":
        None,
        "media_width":
        None,
        "file_uploaded":
        None,
        "upload_filename":
        None,
        "op_sha256_hash":
        None,
        "sage":
        None,
        "subject":
        None,
        "message":
        None,
        "nation":
        None,
        "nation_base64":
        None,
        "nation_name":
        None,
        "post_id":
        get_random_alphanumeric_string(6,
                                       with_punctuation=False,
                                       with_spaces=False)
    }

    if form_post.is_op.data != "yes":
        with session_scope(DB_PATH) as new_session:
            thread = new_session.query(Threads).filter(
                Threads.thread_hash == form_post.thread_id.data).first()
            if thread:
                sub_strip = thread.subject.encode('utf-8').strip()
                sub_unescape = html.unescape(sub_strip.decode())
                sub_b64enc = base64.b64encode(sub_unescape.encode())
                dict_send["subject"] = sub_b64enc.decode()
            else:
                msg = "Board ({}) ID or Thread ({}) ID invalid".format(
                    form_post.board_id.data, form_post.thread_id.data)
                logger.error(msg)
                errors.append(msg)
                return "Error", errors
    else:
        if not form_post.subject.data:
            logger.error("Subject required")
            return
        subject_test = form_post.subject.data.encode('utf-8').strip()
        if len(subject_test) > 64:
            msg = "Subject too large: {}. Must be less than 64 characters".format(
                len(subject_test))
            logger.error(msg)
            errors.append(msg)
            return "Error", errors
        dict_send["subject"] = base64.b64encode(subject_test).decode()

    if form_post.nation.data:
        if (form_post.nation.data.startswith("customflag")
                and len(form_post.nation.data.split("_")) == 2):
            flag_id = int(form_post.nation.data.split("_")[1])
            with session_scope(DB_PATH) as new_session:
                flag = new_session.query(Flags).filter(
                    Flags.id == flag_id).first()
                if flag:
                    dict_send["nation_name"] = flag.name
                    dict_send["nation_base64"] = flag.flag_base64
        else:
            dict_send["nation"] = form_post.nation.data

    if form_post.sage.data:
        dict_send["sage"] = True

    if form_post.body.data:
        dict_send["message"] = form_post.body.data.encode(
            'utf-8').strip().decode()

    if form_post.is_op.data == "no" and form_post.op_sha256_hash.data:
        dict_send["op_sha256_hash"] = form_post.op_sha256_hash.data

    if bool(form_post.file1.data[0]):
        file_list.append(form_post.file1.data[0])
    else:
        file_list.append(None)

    if bool(form_post.file2.data[0]):
        file_list.append(form_post.file2.data[0])
    else:
        file_list.append(None)

    if bool(form_post.file3.data[0]):
        file_list.append(form_post.file3.data[0])
    else:
        file_list.append(None)

    if bool(form_post.file4.data[0]):
        file_list.append(form_post.file4.data[0])
    else:
        file_list.append(None)

    if file_list:
        file_upload = True
        for each_file in file_list:
            if not each_file:
                continue
            try:
                file_filename = html.escape(each_file.filename)
                file_extension = html.escape(
                    os.path.splitext(file_filename)[1].split(".")[-1].lower())
            except Exception as e:
                msg = "Error determining file extension: {}".format(e)
                logger.error("{}: {}".format(dict_send["post_id"], msg))
                errors.append(msg)
                return "Error", errors

    spawn_send_thread = False
    save_file_size = 0
    if file_upload:
        # get number of files being sent
        dict_send["file_amount"] = sum([
            bool(form_post.file1.data[0]),
            bool(form_post.file2.data[0]),
            bool(form_post.file3.data[0]),
            bool(form_post.file4.data[0])
        ])

        dict_send["save_dir"] = "/tmp/{}".format(
            get_random_alphanumeric_string(15,
                                           with_punctuation=False,
                                           with_spaces=False))
        os.mkdir(dict_send["save_dir"])
        for each_file in file_list:
            if not each_file:
                dict_send["file_order"].append(None)
                continue
            save_file_path = "{}/{}".format(dict_send["save_dir"],
                                            each_file.filename)
            delete_file(save_file_path)
            # Save file to disk
            logger.info("{}: Saving file to {}".format(dict_send["post_id"],
                                                       save_file_path))
            each_file.save(save_file_path)
            dict_send["file_order"].append(each_file.filename)

        def get_size(start_path):
            total_size = 0
            for dirpath, dirnames, filenames in os.walk(start_path):
                for f in filenames:
                    fp = os.path.join(dirpath, f)
                    # skip if it is symbolic link
                    if not os.path.islink(fp):
                        total_size += os.path.getsize(fp)

            return total_size

        save_file_size = get_size(dict_send["save_dir"])
        logger.info("{}: Upload size is {}".format(
            dict_send["post_id"], human_readable_size(save_file_size)))
        if save_file_size > config.UPLOAD_SIZE_TO_THREAD:
            spawn_send_thread = True

    if spawn_send_thread:
        # Spawn a thread to send the message if the file is large.
        # This prevents the user's page from either timing out or waiting a very long
        # time to refresh. It's better to give the user feedback about what's happening.
        logger.info(
            "{}: File size above {}. Spawning background upload thread.".
            format(dict_send["post_id"],
                   human_readable_size(config.UPLOAD_SIZE_TO_THREAD)))
        msg_send = Thread(target=send_message,
                          args=(
                              errors,
                              form_post,
                              form_steg,
                              dict_send,
                          ))
        msg_send.daemon = True
        msg_send.start()
        msg = "Your file that will be uploaded is {}, which is above the {} size to wait " \
              "for the upload to finish. Instead, a thread was spawned to handle the upload " \
              "and this message was generated to let you know your post is uploading in the " \
              "background. The upload progress can be viewed (after encryption and any other " \
              "processing) on the status page). Depending on the size of your upload and the " \
              "service it's being uploaded to, the time it takes to send your post will vary. " \
              "Give your post ample time to send so you don't make duplicate posts.".format(
                human_readable_size(save_file_size),
                human_readable_size(config.UPLOAD_SIZE_TO_THREAD))
        return msg, []
    else:
        logger.info(
            "{}: No files or total file size below {}. Sending in foreground.".
            format(dict_send["post_id"],
                   human_readable_size(config.UPLOAD_SIZE_TO_THREAD)))
        return send_message(errors, form_post, form_steg, dict_send)
Esempio n. 6
0
def download_and_extract(
        address,
        message_id,
        file_url,
        file_upload_settings,
        file_extracts_start_base64,
        upload_filename,
        file_path,
        file_sha256_hash,
        file_enc_cipher,
        file_enc_key_bytes,
        file_enc_password):

    logger.info("download_and_extract {}, {}, {}, {}, {}, {}, {}, {}, {}, password={}".format(
        address,
        message_id,
        file_url,
        file_upload_settings,
        upload_filename,
        file_path,
        file_sha256_hash,
        file_enc_cipher,
        file_enc_key_bytes,
        file_enc_password))

    file_sha256_hashes_match = False
    file_size = None
    file_amount = None
    file_do_not_download = None
    file_progress = None
    file_download_successful = None
    downloaded = None
    force_allow_download = False
    download_url = None
    media_info = {}
    message_steg = {}
    resume_start_download = False

    if message_id in daemon_com.get_start_download() or resume_start_download:
        resume_start_download = True
        force_allow_download = True
        file_do_not_download = False
        daemon_com.remove_start_download(message_id)

    # save downloaded file to /tmp/
    # filename has been randomly generated, so no risk of collisions
    download_path = "/tmp/{}".format(upload_filename)

    with session_scope(DB_PATH) as new_session:
        settings = new_session.query(GlobalSettings).first()

        if (file_enc_cipher == "NONE" and
                settings.never_auto_download_unencrypted and
                not force_allow_download):
            logger.info(
                "{}: Instructed to never auto-download unencrypted attachments. "
                "Manual override needed.".format(
                    message_id[-config.ID_LENGTH:].upper()))
            file_do_not_download = True
            message = new_session.query(Messages).filter(
                Messages.message_id == message_id).first()
            file_progress = "Current settings prohibit automatically downloading unencrypted attachments."
            if message:
                message.file_progress = "Current settings prohibit automatically downloading unencrypted attachments."
                new_session.commit()
            return (file_download_successful,
                    file_size,
                    file_amount,
                    file_do_not_download,
                    file_sha256_hashes_match,
                    file_progress,
                    media_info,
                    message_steg)

        if (not settings.auto_dl_from_unknown_upload_sites and
                not is_upload_site_in_database(file_upload_settings)):
            logger.info(
                "{}: Instructed to never auto-download from unknown upload sites. "
                "Save upload site to database then instruct to download.".format(
                    message_id[-config.ID_LENGTH:].upper()))
            file_do_not_download = True
            message = new_session.query(Messages).filter(
                Messages.message_id == message_id).first()
            file_progress = "Unknown upload site detected. Add upload site and manually start download."
            if message:
                message.file_progress = file_progress
                new_session.commit()
            return (file_download_successful,
                    file_size,
                    file_amount,
                    file_do_not_download,
                    file_sha256_hashes_match,
                    file_progress,
                    media_info,
                    message_steg)

        if not settings.allow_net_file_size_check and not force_allow_download:
            logger.info("{}: Not connecting to determine file size. Manual override needed.".format(
                message_id[-config.ID_LENGTH:].upper()))
            file_do_not_download = True
            message = new_session.query(Messages).filter(
                Messages.message_id == message_id).first()
            file_progress = "Configuration doesn't allow getting file size. Manual override required."
            if message:
                message.file_progress = file_progress
                new_session.commit()
            return (file_download_successful,
                    file_size,
                    file_amount,
                    file_do_not_download,
                    file_sha256_hashes_match,
                    file_progress,
                    media_info,
                    message_steg)
        else:
            logger.info("{}: Getting URL and file size...".format(message_id[-config.ID_LENGTH:].upper()))

    # Parse page for URL to direct download zip
    if "direct_dl_url" in file_upload_settings and file_upload_settings["direct_dl_url"]:
        download_url = file_url
    else:
        try:
            logger.info("{}: Finding download URL on upload page".format(message_id[-config.ID_LENGTH:].upper()))
            html_return = requests.get(
                file_url,
                headers={'User-Agent': generate_user_agent()})
            soup = bs4.BeautifulSoup(html_return.text, "html.parser")
            for link in soup.find_all('a', href=True):
                href = link.get('href')
                if href and href.endswith(upload_filename):
                    download_url = href
                    break
        except:
            logger.error("{}: Error getting upload page".format(message_id[-config.ID_LENGTH:].upper()))

    if not download_url:
        logger.error("{}: Could not find URL for {}".format(
            message_id[-config.ID_LENGTH:].upper(), upload_filename))
        daemon_com.remove_start_download(message_id)
        with session_scope(DB_PATH) as new_session:
            message = new_session.query(Messages).filter(
                Messages.message_id == message_id).first()
            if message:
                message.file_progress = "Could not find download URL. Try again."
                new_session.commit()
        return (file_download_successful,
                file_size,
                file_amount,
                file_do_not_download,
                file_sha256_hashes_match,
                file_progress,
                media_info,
                message_steg)
    else:
        logger.info("{}: Found URL".format(message_id[-config.ID_LENGTH:].upper()))
        time.sleep(5)
        for _ in range(3):
            logger.info("{}: Getting file size".format(message_id[-config.ID_LENGTH:].upper()))
            try:
                if resume_start_download:
                    headers = requests.head(
                        download_url,
                        headers={'User-Agent': generate_user_agent()}).headers
                    logger.info("{}: Headers: {}".format(message_id[-config.ID_LENGTH:].upper(), headers))
                    if 'Content-length' in headers:
                        file_size = int(headers['Content-length'])
                        logger.info("{}: File size acquired: {}".format(
                            message_id[-config.ID_LENGTH:].upper(), human_readable_size(file_size)))
                        break
                    else:
                        logger.error("{}: 'content-length' not in header".format(message_id[-config.ID_LENGTH:].upper()))
                else:
                    with session_scope(DB_PATH) as new_session:
                        settings = new_session.query(GlobalSettings).first()
                        # Don't download file if user set to 0
                        if settings.max_download_size == 0:
                            downloaded = "prohibited"
                            file_do_not_download = True
                            logger.info("{}: File prevented from being auto-download.".format(
                                message_id[-config.ID_LENGTH:].upper()))
                            break

                        # Check file size and auto-download if less than user-set size
                        headers = requests.head(
                            download_url,
                            headers={'User-Agent': generate_user_agent()}).headers
                        logger.info("{}: Headers: {}".format(message_id[-config.ID_LENGTH:].upper(), headers))
                        if 'Content-length' in headers:
                            file_size = int(headers['Content-length'])
                            if file_size and file_size > settings.max_download_size * 1024 * 1024:
                                downloaded = "too_large"
                                file_do_not_download = True
                                logger.info(
                                    "{}: File size ({}) is greater than max allowed "
                                    "to auto-download ({}). Not downloading.".format(
                                        message_id[-config.ID_LENGTH:].upper(),
                                        human_readable_size(file_size),
                                        human_readable_size(settings.max_download_size * 1024 * 1024)))
                                break
                            else:
                                file_do_not_download = False
                                logger.info(
                                    "{}: File size ({}) is less than max allowed "
                                    "to auto-download ({}). Downloading.".format(
                                        message_id[-config.ID_LENGTH:].upper(),
                                        human_readable_size(file_size),
                                        human_readable_size(settings.max_download_size * 1024 * 1024)))
                                break
                        else:
                            logger.error("{}: 'content-length' not in header".format(
                                message_id[-config.ID_LENGTH:].upper()))
                time.sleep(15)
            except Exception as err:
                logger.exception("{}: Could not get file size: {}".format(
                    message_id[-config.ID_LENGTH:].upper(), err))
                file_do_not_download = True
                time.sleep(15)

        if file_do_not_download and not force_allow_download:
            logger.info("{}: Not downloading.".format(message_id[-config.ID_LENGTH:].upper()))
            with session_scope(DB_PATH) as new_session:
                message = new_session.query(Messages).filter(
                    Messages.message_id == message_id).first()
                if message:
                    message.file_progress = "Configuration doesn't allow auto-downloading of this file. Manual override required."
                    new_session.commit()
            return (file_download_successful,
                    file_size,
                    file_amount,
                    file_do_not_download,
                    file_sha256_hashes_match,
                    file_progress,
                    media_info,
                    message_steg)
        else:
            logger.info("{}: Downloading...".format(message_id[-config.ID_LENGTH:].upper()))
            file_do_not_download = False
            time.sleep(5)

        for _ in range(config.DOWNLOAD_ATTEMPTS):
            try:
                download_with_resume(message_id, download_url, download_path)
                if file_size == os.path.getsize(download_path):
                    break
                logger.error("{}: File size does not match what's expected".format(message_id[-config.ID_LENGTH:].upper()))
            except IOError:
                logger.error("{}: Could not download".format(message_id[-config.ID_LENGTH:].upper()))
            except Exception as err:
                logger.error("{}: Exception downloading: {}".format(message_id[-config.ID_LENGTH:].upper(), err))
            time.sleep(60)

        try:
            if file_size == os.path.getsize(download_path):
                logger.info("{}: Download completed".format(message_id[-config.ID_LENGTH:].upper()))
                downloaded = "downloaded"
            else:
                logger.error("{}: Download not complete".format(message_id[-config.ID_LENGTH:].upper()))
        except:
            logger.error("{}: Issue downloading file".format(message_id[-config.ID_LENGTH:].upper()))

        if downloaded == "prohibited":
            logger.info("{}: File prohibited from auto-downloading".format(
                message_id[-config.ID_LENGTH:].upper()))
        elif downloaded == "too_large":
            with session_scope(DB_PATH) as new_session:
                settings = new_session.query(GlobalSettings).first()
                logger.info("{}: File size ({}) is larger than allowed to auto-download ({})".format(
                    message_id[-config.ID_LENGTH:].upper(),
                    human_readable_size(file_size),
                    human_readable_size(settings.max_download_size * 1024 * 1024)))
        elif downloaded == "downloaded":
            logger.info("{}: File successfully downloaded".format(message_id[-config.ID_LENGTH:].upper()))
            file_download_successful = True
        elif downloaded is None:
            logger.error("{}: Could not download file after {} attempts".format(
                message_id[-config.ID_LENGTH:].upper(), config.DOWNLOAD_ATTEMPTS))
            with session_scope(DB_PATH) as new_session:
                message = new_session.query(Messages).filter(
                    Messages.message_id == message_id).first()
                if message:
                    message.file_progress = "Could not download file after {} attempts".format(
                        config.DOWNLOAD_ATTEMPTS)
                    new_session.commit()
            file_download_successful = False

        if file_download_successful:
            # Add missing parts back to file
            if file_extracts_start_base64:
                size_before = os.path.getsize(download_path)
                data_file_multiple_insert(download_path, file_extracts_start_base64, chunk=4096)
                logger.info("{}: File data insertion. Before: {}, After: {}".format(
                    message_id[-config.ID_LENGTH:].upper(), size_before, os.path.getsize(download_path)))

            # compare SHA256 hashes
            if file_sha256_hash:
                if not validate_file(download_path, file_sha256_hash):
                    logger.info(
                        "{}: File SHA256 hash ({}) does not match provided SHA256"
                        " hash ({}). Deleting.".format(
                            message_id[-config.ID_LENGTH:].upper(),
                            generate_hash(download_path),
                            file_sha256_hash))
                    file_sha256_hashes_match = False
                    file_download_successful = False
                    delete_file(download_path)
                    return (file_download_successful,
                            file_size,
                            file_amount,
                            file_do_not_download,
                            file_sha256_hashes_match,
                            file_progress,
                            media_info,
                            message_steg)
                else:
                    file_sha256_hashes_match = True
                    logger.info("{}: File SHA256 hashes match ({})".format(
                        message_id[-config.ID_LENGTH:].upper(), file_sha256_hash))

            if file_enc_cipher == "NONE":
                logger.info("{}: File not encrypted".format(message_id[-config.ID_LENGTH:].upper()))
                full_path_filename = download_path
            else:
                # decrypt file
                full_path_filename = "/tmp/{}.zip".format(
                    get_random_alphanumeric_string(12, with_punctuation=False, with_spaces=False))
                delete_file(full_path_filename)  # make sure no file already exists
                logger.info("{}: Decrypting file".format(message_id[-config.ID_LENGTH:].upper()))
                with session_scope(DB_PATH) as new_session:
                    message = new_session.query(Messages).filter(
                        Messages.message_id == message_id).first()
                    if message:
                        message.file_progress = "Decrypting file"
                        new_session.commit()

                try:
                    with session_scope(DB_PATH) as new_session:
                        settings = new_session.query(GlobalSettings).first()
                        ret_crypto = crypto_multi_decrypt(
                            file_enc_cipher,
                            file_enc_password + config.PGP_PASSPHRASE_ATTACH,
                            download_path,
                            full_path_filename,
                            key_bytes=file_enc_key_bytes,
                            max_size_bytes=settings.max_extract_size * 1024 * 1024)
                        if not ret_crypto:
                            logger.error("{}: Issue decrypting attachment")
                            message = new_session.query(Messages).filter(
                                Messages.message_id == message_id).first()
                            if message:
                                message.file_progress = "Issue decrypting attachment. Check log."
                                new_session.commit()
                            file_download_successful = False
                            return (file_download_successful,
                                    file_size,
                                    file_amount,
                                    file_do_not_download,
                                    file_sha256_hashes_match,
                                    file_progress,
                                    media_info,
                                    message_steg)
                    logger.info("{}: Finished decrypting file".format(message_id[-config.ID_LENGTH:].upper()))

                    # z = zipfile.ZipFile(download_path)
                    # z.setpassword(config.PGP_PASSPHRASE_ATTACH.encode())
                    # z.extract(extract_filename, path=extract_path)
                except Exception:
                    logger.exception("Error decrypting attachment")
                    message = new_session.query(Messages).filter(
                        Messages.message_id == message_id).first()
                    if message:
                        message.file_progress = "Error decrypting attachment. Check log."
                        new_session.commit()

            # Get the number of files in the zip archive
            try:
                file_amount_test = count_files_in_zip(message_id, full_path_filename)
            except Exception as err:
                with session_scope(DB_PATH) as new_session:
                    message = new_session.query(Messages).filter(
                        Messages.message_id == message_id).first()
                    if message:
                        message.file_progress = "Error checking zip: {}".format(
                            message_id[-config.ID_LENGTH:].upper(), err)
                        new_session.commit()
                logger.error("{}: Error checking zip: {}".format(
                    message_id[-config.ID_LENGTH:].upper(), err))
                file_do_not_download = True
                return (file_download_successful,
                        file_size,
                        file_amount,
                        file_do_not_download,
                        file_sha256_hashes_match,
                        file_progress,
                        media_info,
                        message_steg)

            if file_amount_test:
                file_amount = file_amount_test

            if file_amount and file_amount > config.FILE_ATTACHMENTS_MAX:
                logger.info("{}: Number of attachments ({}) exceed the maximum ({}).".format(
                    message_id[-config.ID_LENGTH:].upper(), file_amount, config.FILE_ATTACHMENTS_MAX))
                file_do_not_download = True
                return (file_download_successful,
                        file_size,
                        file_amount,
                        file_do_not_download,
                        file_sha256_hashes_match,
                        file_progress,
                        media_info,
                        message_steg)

            # Check size of zip contents before extraction
            can_extract = True
            with zipfile.ZipFile(full_path_filename, 'r') as zipObj:
                total_size = 0
                for each_file in zipObj.infolist():
                    total_size += each_file.file_size
                logger.info("ZIP contents size: {}".format(total_size))
                with session_scope(DB_PATH) as new_session:
                    settings = new_session.query(GlobalSettings).first()
                    if (settings.max_extract_size and
                            total_size > settings.max_extract_size * 1024 * 1024):
                        can_extract = False
                        logger.error(
                            "ZIP content size greater than max allowed ({} bytes). " 
                            "Not extracting.".format(settings.max_extract_size * 1024 * 1024))
                        file_download_successful = False
                        with session_scope(DB_PATH) as new_session:
                            message = new_session.query(Messages).filter(
                                Messages.message_id == message_id).first()
                            if message:
                                message.file_progress = "Attachment extraction size greater than allowed"
                                new_session.commit()

            if can_extract:
                # Extract zip archive
                extract_path = "{}/{}".format(config.FILE_DIRECTORY, message_id)
                extract_zip(message_id, full_path_filename, extract_path)
                delete_file(full_path_filename)  # Secure delete

                errors_files, media_info, message_steg = process_attachments(message_id, extract_path)

                if errors_files:
                    logger.error(
                        "{}: File extension greater than {} characters. Deleting.".format(
                            message_id[-config.ID_LENGTH:].upper(), config.MAX_FILE_EXT_LENGTH))
                    delete_files_recursive(extract_path)
                    file_do_not_download = True
                    return (file_download_successful,
                            file_size,
                            file_amount,
                            file_do_not_download,
                            file_sha256_hashes_match,
                            file_progress,
                            media_info,
                            message_steg)

                with session_scope(DB_PATH) as new_session:
                    message = new_session.query(Messages).filter(
                        Messages.message_id == message_id).first()
                    if message:
                        message.file_progress = "Attachment processing successful"
                        new_session.commit()

        delete_file(download_path)

    return (file_download_successful,
            file_size,
            file_amount,
            file_do_not_download,
            file_sha256_hashes_match,
            file_progress,
            media_info,
            message_steg)
Esempio n. 7
0
def parse_message(message_id, json_obj):
    file_decoded = None
    file_filename = None
    file_url_type = None
    file_url = None
    file_upload_settings = None
    file_extracts_start_base64 = None
    file_size = None
    file_amount = None
    file_sha256_hash = None
    file_enc_cipher = None
    file_enc_key_bytes = None
    file_enc_password = None
    file_sha256_hashes_match = False
    file_download_successful = False
    file_order = None
    file_progress = None
    media_info = {}
    upload_filename = None
    saved_file_filename = None
    saved_image_thumb_filename = None
    image1_spoiler = None
    image2_spoiler = None
    image3_spoiler = None
    image4_spoiler = None
    op_sha256_hash = None
    sage = None
    message = None
    nation = None
    nation_base64 = None
    nation_name = None
    message_steg = {}
    file_do_not_download = False
    file_path = None

    dict_msg = json_obj['message_decrypted']

    # SHA256 hash of the original encrypted message payload to identify the OP of the thread.
    # Each reply must identify the thread it's replying to by supplying the OP hash.
    # If the OP hash doesn't exist, a new thread is created.
    # This prevents OP hijacking by impersonating an OP with an earlier send timestamp.
    message_sha256_hash = hashlib.sha256(
        json.dumps(json_obj['message']).encode('utf-8')).hexdigest()
    # logger.info("Message SHA256: {}".format(message_sha256_hash))

    # Check if message properly formatted, delete if not.
    if "subject" not in dict_msg or not dict_msg["subject"]:
        logger.error("{}: Message missing required subject. Deleting.".format(
            message_id[-config.ID_LENGTH:].upper()))
        daemon_com.trash_message(message_id)
        return
    else:
        subject = html.escape(
            base64.b64decode(dict_msg["subject"]).decode('utf-8')).strip()
        if len(base64.b64decode(dict_msg["subject"]).decode('utf-8')) > 64:
            logger.error("{}: Subject too large. Deleting".format(
                message_id[-config.ID_LENGTH:].upper()))
            daemon_com.trash_message(message_id)
            return

    if "version" not in dict_msg or not dict_msg["version"]:
        logger.error("{}: Message has no version. Deleting.".format(
            message_id[-config.ID_LENGTH:].upper()))
        daemon_com.trash_message(message_id)
        return
    else:
        version = dict_msg["version"]

    # logger.info("dict_msg: {}".format(dict_msg))

    # Determine if message indicates if it's OP or not
    if "is_op" in dict_msg and dict_msg["is_op"]:
        is_op = dict_msg["is_op"]
    else:
        is_op = False
        if "sage" in dict_msg and dict_msg["sage"]:
            sage = True

    # Determine if message indicates if it's a reply to an OP by supplying OP hash
    if "op_sha256_hash" in dict_msg and dict_msg["op_sha256_hash"]:
        op_sha256_hash = dict_msg["op_sha256_hash"]

    # Determine if message is an OP or a reply
    if is_op:
        thread_id = get_thread_id(message_sha256_hash)
    elif op_sha256_hash:
        thread_id = get_thread_id(op_sha256_hash)
    else:
        logger.error("{}: Message neither OP nor reply: Deleting.".format(
            message_id[-config.ID_LENGTH:].upper()))
        daemon_com.trash_message(message_id)
        return

    # Now that the thread_is id determined, check if there exists an Admin command
    # instructing the deletion of the thread/message
    with session_scope(DB_PATH) as new_session:
        admin_post_delete = new_session.query(Command).filter(
            and_(Command.action == "delete", Command.action_type == "post",
                 Command.chan_address == json_obj['toAddress'],
                 Command.thread_id == thread_id,
                 Command.message_id == message_id)).first()

        admin_thread_delete = new_session.query(Command).filter(
            and_(Command.action == "delete", Command.action_type == "thread",
                 Command.chan_address == json_obj['toAddress'],
                 Command.thread_id == thread_id)).first()

        if admin_post_delete or admin_thread_delete:
            logger.error("{}: Admin deleted this post or thread".format(
                message_id[-config.ID_LENGTH:].upper()))
            daemon_com.trash_message(message_id)
            return

    if ("timestamp_utc" in dict_msg and dict_msg["timestamp_utc"]
            and isinstance(dict_msg["timestamp_utc"], int)):
        timestamp_sent = dict_msg["timestamp_utc"]
    else:
        timestamp_sent = int(json_obj['receivedTime'])

    log_age_and_expiration(message_id, daemon_com.get_utc(), timestamp_sent,
                           get_msg_expires_time(message_id))

    # Check if board is set to automatically clear and message is older than the last clearing
    if chan_auto_clears_and_message_too_old(json_obj['toAddress'],
                                            timestamp_sent):
        logger.info(
            "{}: Message outside current auto clear period. Deleting.".format(
                message_id[-config.ID_LENGTH:].upper()))
        daemon_com.trash_message(message_id)
        return

    if "message" in dict_msg and dict_msg["message"]:
        message = dict_msg["message"]
    if "file_filename" in dict_msg and dict_msg["file_filename"]:
        file_filename = dict_msg["file_filename"]
        logger.info("{} Filename on post: {}".format(
            message_id[-config.ID_LENGTH:].upper(), dict_msg["file_filename"]))
    if "image1_spoiler" in dict_msg and dict_msg["image1_spoiler"]:
        image1_spoiler = dict_msg["image1_spoiler"]
    if "image2_spoiler" in dict_msg and dict_msg["image2_spoiler"]:
        image2_spoiler = dict_msg["image2_spoiler"]
    if "image3_spoiler" in dict_msg and dict_msg["image3_spoiler"]:
        image3_spoiler = dict_msg["image3_spoiler"]
    if "image4_spoiler" in dict_msg and dict_msg["image4_spoiler"]:
        image4_spoiler = dict_msg["image4_spoiler"]
    if "upload_filename" in dict_msg and dict_msg["upload_filename"]:
        upload_filename = dict_msg["upload_filename"]
    if "file_size" in dict_msg and dict_msg["file_size"]:
        file_size = dict_msg["file_size"]
    if "file_amount" in dict_msg and dict_msg["file_amount"]:
        file_amount = dict_msg["file_amount"]
    if "file_url" in dict_msg and dict_msg["file_url"]:
        file_url = dict_msg["file_url"]
    if "file_url_type" in dict_msg and dict_msg["file_url_type"]:
        file_url_type = dict_msg["file_url_type"]
    if "file_upload_settings" in dict_msg and dict_msg["file_upload_settings"]:
        file_upload_settings = dict_msg["file_upload_settings"]
    if "file_extracts_start_base64" in dict_msg and dict_msg[
            "file_extracts_start_base64"] is not None:
        file_extracts_start_base64 = json.loads(
            dict_msg["file_extracts_start_base64"])
    if "file_base64" in dict_msg and dict_msg["file_base64"] is not None:
        try:
            file_decoded = base64.b64decode(dict_msg["file_base64"])
            file_size = len(file_decoded)
        except Exception as err:
            logger.exception("{}: Exception decoding attachments: {}".format(
                message_id[-config.ID_LENGTH:].upper(), err))
    if "file_sha256_hash" in dict_msg and dict_msg["file_sha256_hash"]:
        file_sha256_hash = dict_msg["file_sha256_hash"]
    if "file_enc_cipher" in dict_msg and dict_msg["file_enc_cipher"]:
        file_enc_cipher = dict_msg["file_enc_cipher"]
    if "file_enc_key_bytes" in dict_msg and dict_msg["file_enc_key_bytes"]:
        file_enc_key_bytes = dict_msg["file_enc_key_bytes"]
    if "file_enc_password" in dict_msg and dict_msg["file_enc_password"]:
        file_enc_password = dict_msg["file_enc_password"]
    if "file_order" in dict_msg and dict_msg["file_order"]:
        file_order = dict_msg["file_order"]

    if "nation" in dict_msg and dict_msg["nation"]:
        nation = dict_msg["nation"]
    if "nation_base64" in dict_msg and dict_msg["nation_base64"]:
        nation_base64 = dict_msg["nation_base64"]
    if "nation_name" in dict_msg and dict_msg["nation_name"]:
        nation_name = dict_msg["nation_name"]

    if ((file_amount and file_amount > 4)
            or (file_order and len(file_order) > 4)):
        logger.error(
            "{}: More than 4 files found in message. Deleting.".format(
                message_id[-config.ID_LENGTH:].upper()))
        daemon_com.trash_message(message_id)
        return

    if nation_base64:
        flag_pass = True
        try:
            flag = Image.open(BytesIO(base64.b64decode(nation_base64)))
            flag_width, flag_height = flag.size
            if flag_width > config.FLAG_MAX_WIDTH or flag_height > config.FLAG_MAX_HEIGHT:
                flag_pass = False
                logger.error(
                    "Flag dimensions is too large (max 25x15): {}x{}".format(
                        flag_width, flag_height))
            if len(base64.b64decode(nation_base64)) > config.FLAG_MAX_SIZE:
                flag_pass = False
                logger.error(
                    "Flag file size is too large: {}. Must be less than or equal to 3500 bytes."
                    .format(len(base64.b64decode(nation_base64))))
        except:
            flag_pass = False
            logger.error("Error attempting to open flag image")

        if not nation_name:
            flag_pass = False
            logger.error("{}: Flag name not found".format(
                message_id[-config.ID_LENGTH:].upper()))
        elif len(nation_name) > 64:
            flag_pass = False
            logger.error("{}: Flag name too long: {}".format(
                message_id[-config.ID_LENGTH:].upper(), nation_name))

        if not flag_pass:
            logger.error(
                "{}: Base64 flag didn't pass validation. Deleting.".format(
                    message_id[-config.ID_LENGTH:].upper()))
            daemon_com.trash_message(message_id)
            return

    if file_url or file_decoded:
        save_dir = "{}/{}".format(config.FILE_DIRECTORY, message_id)
        try:
            os.mkdir(save_dir)
        except:
            pass
        saved_file_filename = "{}.zip".format(message_id)
        file_path = "{}/{}".format(config.FILE_DIRECTORY, saved_file_filename)

    if file_url:
        # Create dir to extract files into
        logger.info("{}: Filename on disk: {}".format(
            message_id[-config.ID_LENGTH:].upper(), saved_file_filename))

        if os.path.exists(file_path) and os.path.getsize(file_path) != 0:
            logger.info(
                "{}: Downloaded zip file found. Not attempting to download.".
                format(message_id[-config.ID_LENGTH:].upper()))
            file_size_test = os.path.getsize(file_path)
            file_download_successful = True
            extract_zip(message_id, file_path, save_dir)
        else:
            logger.info("{}: File not found. Attempting to download.".format(
                message_id[-config.ID_LENGTH:].upper()))
            logger.info("{}: Downloading file url: {}".format(
                message_id[-config.ID_LENGTH:].upper(), file_url))

            if upload_filename and file_url_type and file_upload_settings:
                # Pick a download slot to fill (2 slots per domain)
                domain = urlparse(file_url).netloc
                lockfile1 = "/var/lock/upload_{}_1.lock".format(domain)
                lockfile2 = "/var/lock/upload_{}_2.lock".format(domain)

                lf = LF()
                lockfile = random.choice([lockfile1, lockfile2])
                if lf.lock_acquire(lockfile, to=600):
                    try:
                        (file_download_successful, file_size_test,
                         file_amount_test, file_do_not_download,
                         file_sha256_hashes_match, file_progress, media_info,
                         message_steg) = download_and_extract(
                             json_obj['toAddress'], message_id, file_url,
                             file_upload_settings, file_extracts_start_base64,
                             upload_filename, file_path, file_sha256_hash,
                             file_enc_cipher, file_enc_key_bytes,
                             file_enc_password)

                        if file_size_test:
                            file_size = file_size_test

                        if file_amount_test:
                            file_amount = file_amount_test
                    finally:
                        lf.lock_release(lockfile)

        if file_download_successful:
            for dirpath, dirnames, filenames in os.walk(save_dir):
                for f in filenames:
                    fp = os.path.join(dirpath, f)
                    if os.path.islink(fp):  # skip symbolic links
                        continue

                    file_extension = html.escape(
                        os.path.splitext(f)[1].split(".")[-1].lower())
                    if not file_extension:
                        logger.error(
                            "{}: File extension not found. Deleting.".format(
                                message_id[-config.ID_LENGTH:].upper()))
                        daemon_com.trash_message(message_id)
                        return
                    elif len(file_extension) >= config.MAX_FILE_EXT_LENGTH:
                        logger.error(
                            "{}: File extension greater than {} characters. Deleting."
                            .format(message_id[-config.ID_LENGTH:].upper(),
                                    config.MAX_FILE_EXT_LENGTH))
                        daemon_com.trash_message(message_id)
                        return
                    if file_extension in config.FILE_EXTENSIONS_IMAGE:
                        saved_image_thumb_filename = "{}_thumb.{}".format(
                            message_id, file_extension)
                        img_thumb_filename = "{}/{}".format(
                            save_dir, saved_image_thumb_filename)
                        generate_thumbnail(message_id, fp, img_thumb_filename,
                                           file_extension)

    # Bitmessage attachment
    if file_decoded:
        encrypted_zip = "/tmp/{}.zip".format(
            get_random_alphanumeric_string(12,
                                           with_punctuation=False,
                                           with_spaces=False))
        # encrypted_zip_object = BytesIO(file_decoded)
        output_file = open(encrypted_zip, 'wb')
        output_file.write(file_decoded)
        output_file.close()

        if file_enc_cipher == "NONE":
            logger.info("{}: File not encrypted".format(
                message_id[-config.ID_LENGTH:].upper()))
            decrypted_zip = encrypted_zip
        elif file_enc_password:
            # decrypt file
            decrypted_zip = "/tmp/{}.zip".format(
                get_random_alphanumeric_string(12,
                                               with_punctuation=False,
                                               with_spaces=False))
            delete_file(decrypted_zip)  # make sure no file already exists
            logger.info("{}: Decrypting file".format(
                message_id[-config.ID_LENGTH:].upper()))

            try:
                with session_scope(DB_PATH) as new_session:
                    settings = new_session.query(GlobalSettings).first()
                    ret_crypto = crypto_multi_decrypt(
                        file_enc_cipher,
                        file_enc_password + config.PGP_PASSPHRASE_ATTACH,
                        encrypted_zip,
                        decrypted_zip,
                        key_bytes=file_enc_key_bytes,
                        max_size_bytes=settings.max_extract_size * 1024 * 1024)
                    if not ret_crypto:
                        logger.error("{}: Issue decrypting file")
                        return
                    else:
                        logger.info("{}: Finished decrypting file".format(
                            message_id[-config.ID_LENGTH:].upper()))

                    delete_file(encrypted_zip)
                    # z = zipfile.ZipFile(download_path)
                    # z.setpassword(config.PGP_PASSPHRASE_ATTACH.encode())
                    # z.extract(extract_filename, path=extract_path)
            except Exception:
                logger.exception("Error decrypting file")

        # Get the number of files in the zip archive
        try:
            file_amount_test = count_files_in_zip(message_id, decrypted_zip)
        except Exception as err:
            file_amount_test = None
            logger.error("{}: Error checking zip: {}".format(
                message_id[-config.ID_LENGTH:].upper(), err))

        if file_amount_test:
            file_amount = file_amount_test

        if file_amount > config.FILE_ATTACHMENTS_MAX:
            logger.info(
                "{}: Number of attachments ({}) exceed the maximum ({}).".
                format(message_id[-config.ID_LENGTH:].upper(), file_amount,
                       config.FILE_ATTACHMENTS_MAX))
            daemon_com.trash_message(message_id)
            return

        # Check size of zip contents before extraction
        can_extract = True
        with zipfile.ZipFile(decrypted_zip, 'r') as zipObj:
            total_size = 0
            for each_file in zipObj.infolist():
                total_size += each_file.file_size
            logger.info("ZIP contents size: {}".format(total_size))
            with session_scope(DB_PATH) as new_session:
                settings = new_session.query(GlobalSettings).first()
                if (settings.max_extract_size and
                        total_size > settings.max_extract_size * 1024 * 1024):
                    can_extract = False
                    logger.error(
                        "ZIP content size greater than max allowed ({} bytes). "
                        "Not extracting.".format(settings.max_extract_size *
                                                 1024 * 1024))

        if can_extract:
            # Extract zip archive
            extract_path = "{}/{}".format(config.FILE_DIRECTORY, message_id)
            extract_zip(message_id, decrypted_zip, extract_path)
            delete_file(decrypted_zip)  # Secure delete

            errors_files, media_info, message_steg = process_attachments(
                message_id, extract_path)

            if errors_files:
                logger.error(
                    "{}: File extension greater than {} characters. Deleting.".
                    format(message_id[-config.ID_LENGTH:].upper(),
                           config.MAX_FILE_EXT_LENGTH))
                delete_files_recursive(extract_path)
                daemon_com.trash_message(message_id)
                return

    thread_locked = False
    thread_anchored = False
    owner_posting = False
    with session_scope(DB_PATH) as new_session:
        try:
            thread = new_session.query(Threads).filter(
                Threads.thread_hash == thread_id).first()

            if thread:
                admin_cmd = new_session.query(Command).filter(
                    and_(Command.action == "set",
                         Command.action_type == "thread_options",
                         Command.thread_id == thread.thread_hash)).first()
                if admin_cmd:
                    # Check for remote thread lock
                    if (admin_cmd.thread_lock and admin_cmd.thread_lock_ts
                            and timestamp_sent > admin_cmd.thread_lock_ts):
                        thread_locked = "Post timestamp is after remote lock. Deleting."

                    # Check for remote thread anchor
                    if (admin_cmd.thread_anchor and admin_cmd.thread_anchor_ts
                            and timestamp_sent > admin_cmd.thread_anchor_ts):
                        thread_anchored = "Post timestamp is after remote anchor. Not updating thread timestamp."

                # Check for local thread lock
                if thread.locked_local and timestamp_sent > thread.locked_local_ts:
                    thread_locked = "Post timestamp is after local lock. Deleting."

                # Check for local thread anchor
                if thread.anchored_local and timestamp_sent > thread.anchored_local_ts:
                    thread_anchored = "Post timestamp is after local anchor. Not updating thread timestamp."

            if thread_locked:
                chan = new_session.query(Chan).filter(
                    Chan.address == json_obj['toAddress']).first()
                if chan:
                    access = get_access(json_obj['toAddress'])
                    if json_obj['fromAddress'] in access["primary_addresses"]:
                        owner_posting = True
                        logger.error(
                            "{}: Owner posting in locked thread. Allowing.".
                            format(message_id[-config.ID_LENGTH:].upper()))
        except Exception:
            logger.exception("Checking thread lock")

    if thread_locked and not owner_posting:
        logger.info(thread_locked)
        daemon_com.trash_message(message_id)
        return

    with session_scope(DB_PATH) as new_session:
        try:
            chan = new_session.query(Chan).filter(
                Chan.address == json_obj['toAddress']).first()
            chan.last_post_number = chan.last_post_number + 1

            thread = new_session.query(Threads).filter(
                Threads.thread_hash == thread_id).first()

            if not thread and is_op:  # OP received, create new thread
                new_thread = Threads()
                new_thread.thread_hash = thread_id
                new_thread.thread_hash_short = thread_id[-12:]
                new_thread.op_sha256_hash = message_sha256_hash
                if chan:
                    new_thread.chan_id = chan.id
                new_thread.subject = subject
                new_thread.timestamp_sent = timestamp_sent
                new_thread.timestamp_received = int(json_obj['receivedTime'])
                new_session.add(new_thread)

                if timestamp_sent > chan.timestamp_sent:
                    chan.timestamp_sent = timestamp_sent
                if int(json_obj['receivedTime']) > chan.timestamp_received:
                    chan.timestamp_received = int(json_obj['receivedTime'])

                new_session.commit()
                id_thread = new_thread.id

            elif not thread and not is_op:  # Reply received before OP, create thread with OP placeholder
                new_thread = Threads()
                new_thread.thread_hash = thread_id
                new_thread.thread_hash_short = thread_id[-12:]
                new_thread.op_sha256_hash = op_sha256_hash
                if chan:
                    new_thread.chan_id = chan.id
                new_thread.subject = subject
                new_thread.timestamp_sent = timestamp_sent
                new_thread.timestamp_received = int(json_obj['receivedTime'])
                new_session.add(new_thread)

                if timestamp_sent > chan.timestamp_sent:
                    chan.timestamp_sent = timestamp_sent
                if int(json_obj['receivedTime']) > chan.timestamp_received:
                    chan.timestamp_received = int(json_obj['receivedTime'])

                new_session.commit()
                id_thread = new_thread.id

            elif thread and not is_op:  # Reply received after OP, add to current thread
                if thread_anchored:
                    logger.info(thread_anchored)

                if timestamp_sent > thread.timestamp_sent:
                    if not sage and not thread_anchored:
                        thread.timestamp_sent = timestamp_sent
                if int(json_obj['receivedTime']) > thread.timestamp_received:
                    if not sage and thread_anchored:
                        thread.timestamp_received = int(
                            json_obj['receivedTime'])

                if timestamp_sent > chan.timestamp_sent:
                    if not sage and not thread_anchored:
                        chan.timestamp_sent = timestamp_sent
                if int(json_obj['receivedTime']) > chan.timestamp_received:
                    if not sage and not thread_anchored:
                        chan.timestamp_received = int(json_obj['receivedTime'])

                new_session.commit()
                id_thread = thread.id

            elif thread and is_op:
                # Post indicating it is OP but thread already exists
                # Could have received reply before OP
                # Add OP to current thread
                id_thread = thread.id

            lf = LF()
            if lf.lock_acquire(config.LOCKFILE_STORE_POST, to=20):
                try:
                    # Create message
                    new_msg = Messages()
                    new_msg.version = version
                    new_msg.message_id = message_id
                    new_msg.post_id = get_post_id(message_id)
                    new_msg.post_number = chan.last_post_number
                    new_msg.expires_time = get_msg_expires_time(message_id)
                    new_msg.thread_id = id_thread
                    new_msg.address_from = bleach.clean(
                        json_obj['fromAddress'])
                    new_msg.message_sha256_hash = message_sha256_hash
                    new_msg.is_op = is_op
                    if sage:
                        new_msg.sage = sage
                    new_msg.message = message
                    new_msg.subject = subject
                    new_msg.nation = nation
                    new_msg.nation_base64 = nation_base64
                    new_msg.nation_name = nation_name
                    if file_decoded == b"":  # Empty file
                        new_msg.file_decoded = b" "
                    else:
                        new_msg.file_decoded = file_decoded
                    new_msg.file_filename = file_filename
                    new_msg.file_url = file_url
                    new_msg.file_upload_settings = json.dumps(
                        file_upload_settings)
                    new_msg.file_extracts_start_base64 = json.dumps(
                        file_extracts_start_base64)
                    new_msg.file_size = file_size
                    new_msg.file_amount = file_amount
                    new_msg.file_do_not_download = file_do_not_download
                    new_msg.file_progress = file_progress
                    new_msg.file_sha256_hash = file_sha256_hash
                    new_msg.file_enc_cipher = file_enc_cipher
                    new_msg.file_enc_key_bytes = file_enc_key_bytes
                    new_msg.file_enc_password = file_enc_password
                    new_msg.file_sha256_hashes_match = file_sha256_hashes_match
                    new_msg.file_order = json.dumps(file_order)
                    new_msg.file_download_successful = file_download_successful
                    new_msg.upload_filename = upload_filename
                    new_msg.saved_file_filename = saved_file_filename
                    new_msg.saved_image_thumb_filename = saved_image_thumb_filename
                    new_msg.image1_spoiler = image1_spoiler
                    new_msg.image2_spoiler = image2_spoiler
                    new_msg.image3_spoiler = image3_spoiler
                    new_msg.image4_spoiler = image4_spoiler
                    new_msg.timestamp_received = int(json_obj['receivedTime'])
                    new_msg.timestamp_sent = timestamp_sent
                    new_msg.media_info = json.dumps(media_info)
                    new_msg.message_steg = json.dumps(message_steg)
                    new_msg.message_original = json_obj["message"]
                    new_session.add(new_msg)

                    if timestamp_sent > chan.timestamp_sent:
                        chan.timestamp_sent = timestamp_sent
                    if int(json_obj['receivedTime']) > chan.timestamp_received:
                        chan.timestamp_received = int(json_obj['receivedTime'])

                    new_session.commit()

                    message_edit = new_session.query(Messages).filter(
                        Messages.message_id == message_id).first()
                    try:
                        message_edit.popup_html = generate_reply_link_html(
                            message_edit)
                        new_session.commit()
                    except Exception as err:
                        logger.exception(
                            "{}: Couldn't generate popup HTML: {}".format(
                                message_id[-config.ID_LENGTH:].upper(), err))

                    process_message_replies(message_id, message)

                    # Determine if an admin command to delete with comment is present
                    # Replace comment and delete file information
                    commands = new_session.query(Command).filter(
                        and_(Command.action == "delete_comment",
                             Command.action_type == "post",
                             Command.chan_address ==
                             json_obj['toAddress'])).all()
                    for each_cmd in commands:
                        try:
                            options = json.loads(each_cmd.options)
                        except:
                            options = {}
                        if ("delete_comment" in options
                                and "message_id" in options["delete_comment"]
                                and options["delete_comment"]["message_id"]
                                == message_id
                                and "comment" in options["delete_comment"]):

                            if "from_address" in options["delete_comment"]:
                                from_address = options["delete_comment"][
                                    "from_address"]
                            else:
                                from_address = json_obj['fromAddress']

                            # replace comment
                            delete_and_replace_comment(
                                options["delete_comment"]["message_id"],
                                options["delete_comment"]["comment"],
                                from_address=from_address,
                                local_delete=False)

                    # Generate card
                    generate_card(thread_id, force_generate=True)
                except Exception:
                    logger.exception("Saving message to DB")
                finally:
                    time.sleep(config.API_PAUSE)
                    lf.lock_release(config.LOCKFILE_API)

            # Delete message from Bitmessage after parsing and adding to BitChan database
            lf = LF()
            if lf.lock_acquire(config.LOCKFILE_API, to=120):
                try:
                    return_val = api.trashMessage(message_id)
                except Exception as err:
                    logger.error(
                        "{}: Exception during message delete: {}".format(
                            message_id[-config.ID_LENGTH:].upper(), err))
                finally:
                    time.sleep(config.API_PAUSE)
                    lf.lock_release(config.LOCKFILE_API)
        except Exception as err:
            logger.error(
                "{}: Could not write to database. Deleting. Error: {}".format(
                    message_id[-config.ID_LENGTH:].upper(), err))
            logger.exception("1")
            daemon_com.trash_message(message_id)
            return