Exemplo n.º 1
0
    def allow_download(self):
        try:
            logger.info("{}: Allowing download".format(
                self.message_id[-config.ID_LENGTH:].upper()))
            with session_scope(DB_PATH) as new_session:
                message = new_session.query(Messages).filter(
                    Messages.message_id == self.message_id).first()
                if message:
                    file_path = "{}/{}".format(config.FILE_DIRECTORY,
                                               message.saved_file_filename)

                    # Pick a download slot to fill (2 slots per domain)
                    domain = urlparse(message.file_url).netloc
                    lockfile1 = "/var/lock/upload_{}_1.lock".format(domain)
                    lockfile2 = "/var/lock/upload_{}_2.lock".format(domain)

                    lf = LF()
                    lockfile = random.choice([lockfile1, lockfile2])
                    if lf.lock_acquire(lockfile, to=600):
                        try:
                            (file_download_successful, file_size, file_amount,
                             file_do_not_download, file_sha256_hashes_match,
                             media_info, message_steg) = download_and_extract(
                                 message.thread.chan.address, self.message_id,
                                 message.file_url,
                                 json.loads(message.file_upload_settings),
                                 json.loads(
                                     message.file_extracts_start_base64),
                                 message.upload_filename, file_path,
                                 message.file_sha256_hash,
                                 message.file_enc_cipher,
                                 message.file_enc_key_bytes,
                                 message.file_enc_password)
                        finally:
                            lf.lock_release(lockfile)

                    if file_download_successful:
                        if file_size:
                            message.file_size = file_size
                        if file_amount:
                            message.file_amount = file_amount
                        message.file_download_successful = file_download_successful
                        message.file_do_not_download = file_do_not_download
                        message.file_sha256_hashes_match = file_sha256_hashes_match
                        message.media_info = json.dumps(media_info)
                        message.message_steg = json.dumps(message_steg)
                        new_session.commit()
        except Exception as e:
            logger.error("{}: Error allowing download: {}".format(
                self.message_id[-config.ID_LENGTH:].upper(), e))
        finally:
            with session_scope(DB_PATH) as new_session:
                message = new_session.query(Messages).filter(
                    Messages.message_id == self.message_id).first()
                message.file_currently_downloading = False
                new_session.commit()
Exemplo n.º 2
0
def replace_stich(text, message_id):
    lines = text.split("\n")
    regex = r"(?:\A|\s)(?i)#stich"
    stichomancy_lf = "/var/lock/stichomancy.lock"
    lf = LF()

    find_count = 1
    lines_finds = IterFinds(lines, regex)
    for line_index, i in lines_finds:
        for match_index, each_find in enumerate(
                re.finditer(regex, lines[line_index])):
            if find_count > 2:  # Process max of 2 per message
                return "\n".join(lines)
            elif match_index == i:
                match = lines[line_index][each_find.start():each_find.end()]
                start_string = lines[line_index][:each_find.start()]
                end_string = lines[line_index][each_find.end():]
                count = 0
                try:
                    while True:
                        count += 1
                        new_seed = "{}{}{}{}".format(message_id, line_index, i,
                                                     match_index)
                        random_quote = None
                        quote = None
                        author = None
                        title = None
                        url = None
                        if lf.lock_acquire(stichomancy_lf, to=600):
                            try:
                                _, quote, url, title, author = stichomancy_pull(
                                    new_seed)
                            except:
                                logger.exception("getting quote")
                            finally:
                                lf.lock_release(stichomancy_lf)
                        if quote:
                            title_str = title
                            if author and "Various" not in author:
                                title_str += " by {}".format(author)

                            random_quote = "\"{quote}\" -<a class=\"link\" href=\"{url}\">{title}</a>".format(
                                quote=quote, url=url, title=title_str)
                        if random_quote or count > 5:
                            break
                except:
                    time.sleep(3)
                    continue
                middle_string = ' <span class="replace-funcs">{}({})</span>'.format(
                    match, random_quote)

                find_count += 1
                lines[line_index] = start_string + middle_string + end_string

    return "\n".join(lines)
Exemplo n.º 3
0
def get_messages_from_page(mailbox, page, address):
    messages_sorted = []
    messages_page = []

    settings = GlobalSettings.query.first()

    if mailbox == "inbox":
        lf = LF()
        if lf.lock_acquire(config.LOCKFILE_API, to=config.API_LOCK_TIMEOUT):
            try:
                messages = api.getInboxMessagesByReceiver(address)
                # Sort messages
                if "inboxMessages" in messages:
                    messages_sorted = sorted(messages["inboxMessages"],
                                             key=itemgetter('receivedTime'),
                                             reverse=True)
            except Exception as err:
                logger.error("Error: {}".format(err))
            finally:
                time.sleep(config.API_PAUSE)
                lf.lock_release(config.LOCKFILE_API)
    elif mailbox == "sent":
        lf = LF()
        if lf.lock_acquire(config.LOCKFILE_API, to=config.API_LOCK_TIMEOUT):
            try:
                messages = api.getSentMessagesBySender(address)
                # Sort messages
                if "sentMessages" in messages:
                    messages_sorted = sorted(messages["sentMessages"],
                                             key=itemgetter('lastActionTime'),
                                             reverse=True)
            except Exception as err:
                logger.error("Error: {}".format(err))
            finally:
                time.sleep(config.API_PAUSE)
                lf.lock_release(config.LOCKFILE_API)

    msg_start = int((int(page) - 1) * settings.messages_per_mailbox_page)
    msg_end = int(int(page) * settings.messages_per_mailbox_page) - 1
    for i, msg in enumerate(messages_sorted):
        if msg_start <= i <= msg_end:
            messages_page.append(msg)

    return messages_page, messages_sorted
Exemplo n.º 4
0
def address_book():
    global_admin, allow_msg = allowed_access(
        check_is_global_admin=True)
    if not global_admin:
        return allow_msg

    form_addres_book = forms_settings.AddressBook()
    form_confirm = forms_board.Confirm()

    status_msg = session.get('status_msg', {"status_message": []})

    if request.method == 'GET':
        if 'status_msg' in session:
            session.pop('status_msg')

    elif request.method == 'POST':
        if form_addres_book.add.data:
            if not form_addres_book.label.data or not form_addres_book.address.data:
                status_msg['status_message'].append("Label and address required")

            if not status_msg['status_message']:
                lf = LF()
                if lf.lock_acquire(config.LOCKFILE_API, to=config.API_LOCK_TIMEOUT):
                    try:
                        label = base64.b64encode(form_addres_book.label.data.encode()).decode()
                        try:
                            return_str = api.addAddressBookEntry(
                                form_addres_book.address.data, label)
                        except Exception as e:
                            if e:
                                return_str = "Could not add to Address Book: {}".format(e)
                            else:
                                return_str = "Not a valid address?"

                        if return_str:
                            if "Added address" in return_str:
                                new_add_book = AddressBook()
                                new_add_book.address = form_addres_book.address.data
                                new_add_book.label = form_addres_book.label.data
                                new_add_book.save()

                                daemon_com.refresh_address_book()
                                status_msg['status_title'] = "Success"
                                status_msg['status_message'].append(
                                    "Added Address Book entry {}".format(
                                        form_addres_book.label.data))
                                status_msg['status_message'].append(
                                    "Give the system a few seconds for the change to take effect.")
                            else:
                                status_msg['status_message'].append(return_str)
                        else:
                            status_msg['status_message'].append(
                                "Error creating Address Book entry")
                    finally:
                        time.sleep(config.API_PAUSE)
                        lf.lock_release(config.LOCKFILE_API)

        elif form_addres_book.rename.data:
            if not form_addres_book.add_label.data or not form_addres_book.address.data:
                status_msg['status_message'].append("Label and address required")

            if not status_msg['status_message']:
                add_book = AddressBook.query.filter(
                    AddressBook.address == form_addres_book.address.data).first()
                if add_book:
                    add_book.label = form_addres_book.add_label.data
                    add_book.save()
                    daemon_com.refresh_address_book()
                    status_msg['status_title'] = "Success"
                    status_msg['status_message'].append("Address Book entry renamed.")
                    status_msg['status_message'].append(
                        "Give the system a few seconds for the change to take effect.")

        elif form_addres_book.delete.data:
            add_book = None
            if not form_addres_book.address.data:
                status_msg['status_message'].append("Address required")
            else:
                add_book = AddressBook.query.filter(
                    AddressBook.address == form_addres_book.address.data).first()

            if not form_confirm.confirm.data:
                return render_template("pages/confirm.html",
                                       action="delete_address_book",
                                       add_book=add_book)

            if not status_msg['status_message']:
                lf = LF()
                if lf.lock_acquire(config.LOCKFILE_API, to=config.API_LOCK_TIMEOUT):
                    try:
                        return_str = api.deleteAddressBookEntry(form_addres_book.address.data)
                        if "Deleted address book entry" in return_str:
                            if add_book:
                                add_book.delete()
                            daemon_com.refresh_address_book()
                            status_msg['status_title'] = "Success"
                            status_msg['status_message'].append("Address Book entry deleted.")
                            status_msg['status_message'].append(
                                "Give the system a few seconds for the change to take effect.")
                        else:
                            status_msg['status_message'].append(
                                "Error deleting Address Book entry: {}".format(return_str))
                    finally:
                        time.sleep(config.API_PAUSE)
                        lf.lock_release(config.LOCKFILE_API)

        session['status_msg'] = status_msg

        if 'status_title' not in status_msg and status_msg['status_message']:
            status_msg['status_title'] = "Error"

        return redirect(url_for("routes_address_book.address_book"))

    return render_template("pages/address_book.html",
                           form_addres_book=form_addres_book,
                           status_msg=status_msg)
Exemplo n.º 5
0
def send_message(errors, form_post, form_steg, dict_send):
    """Conduct the file upload and sending of a message"""
    zip_file = "/tmp/{}".format(
        get_random_alphanumeric_string(15,
                                       with_punctuation=False,
                                       with_spaces=False))

    if dict_send["save_dir"]:
        try:
            dict_send[
                "file_enc_cipher"] = form_post.upload_cipher_and_key.data.split(
                    ",")[0]
            dict_send["file_enc_key_bytes"] = int(
                form_post.upload_cipher_and_key.data.split(",")[1])
        except:
            msg = "Unknown cannot parse cipher and key length: {}".format(
                form_post.upload_cipher_and_key.data)
            errors.append(msg)
            logger.error("{}: {}".format(dict_send["post_id"], msg))
            return "Error", errors

        steg_inserted = False
        for i, f in enumerate(dict_send["file_order"], start=1):
            if not f:
                continue

            fp = os.path.join(dict_send["save_dir"], f)
            file_extension = html.escape(
                os.path.splitext(f)[1].split(".")[-1].lower())
            try:
                if form_post.strip_exif.data and file_extension in [
                        "png", "jpeg", "jpg"
                ]:
                    PIL.Image.MAX_IMAGE_PIXELS = 500000000
                    im = Image.open(fp)
                    logger.info(
                        "{}: Stripping image metadata/exif from {}".format(
                            dict_send["post_id"], fp))
                    im.save(fp)
            except Exception as e:
                msg = "{}: Error opening image/stripping exif: {}".format(
                    dict_send["post_id"], e)
                errors.append(msg)
                logger.exception(msg)

            # encrypt steg message into image
            # Get first image that steg can be inserted into
            if (form_steg and i == form_steg.image_steg_insert.data
                    and file_extension in ["jpg", "jpeg"]
                    and not steg_inserted):
                logger.info("{}: Adding steg message to image {}".format(
                    dict_send["post_id"], fp))

                pgp_passphrase_steg = config.PGP_PASSPHRASE_STEG
                with session_scope(DB_PATH) as new_session:
                    chan = new_session.query(Chan).filter(
                        Chan.address == form_post.board_id.data).first()
                    if chan and chan.pgp_passphrase_steg:
                        pgp_passphrase_steg = chan.pgp_passphrase_steg

                steg_status = steg_encrypt(fp, fp, form_steg.steg_message.data,
                                           pgp_passphrase_steg)

                if steg_status != "success":
                    errors.append(steg_status)
                    logger.exception(steg_status)
                else:
                    steg_inserted = True

        # Create zip archive of files
        def zipdir(path, ziph):
            # ziph is zipfile handle
            for root, dirs, files in os.walk(path):
                for file in files:
                    ziph.write(os.path.join(root, file), file)

        try:
            zipf = zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_LZMA)
            zipdir(dict_send["save_dir"], zipf)
            zipf.close()
        except:
            logger.error("{}: Could not zip file")

        # Delete tmp directory
        delete_files_recursive(dict_send["save_dir"])

    if any(dict_send["file_order"]):
        # Generate random filename and extension
        file_extension = ""
        while file_extension in [""] + config.UPLOAD_BANNED_EXT:
            file_name = get_random_alphanumeric_string(30,
                                                       with_punctuation=False,
                                                       with_spaces=False)
            file_extension = get_random_alphanumeric_string(
                3,
                with_punctuation=False,
                with_digits=False,
                with_spaces=False).lower()
            dict_send["upload_filename"] = "{}.{}".format(
                file_name, file_extension)
        save_encrypted_path = "/tmp/{}".format(dict_send["upload_filename"])

    if any(dict_send["file_order"]) and form_post.upload.data != "bitmessage":
        with session_scope(DB_PATH) as new_session:
            upload_info = new_session.query(UploadSites).filter(
                UploadSites.domain == form_post.upload.data).first()

            if upload_info:
                dict_send["file_url_type"] = upload_info.domain
                dict_send["file_upload_settings"] = {
                    "domain": upload_info.domain,
                    "type": upload_info.type,
                    "uri": upload_info.uri,
                    "download_prefix": upload_info.download_prefix,
                    "response": upload_info.response,
                    "direct_dl_url": upload_info.direct_dl_url,
                    "extra_curl_options": upload_info.extra_curl_options,
                    "upload_word": upload_info.upload_word,
                    "form_name": upload_info.form_name
                }
            else:
                logger.error("{}: Upload domain not found".format(
                    dict_send["post_id"]))

            # encrypt file
            if dict_send["file_enc_cipher"] == "NONE":
                logger.info("{}: Not encrypting attachment(s)".format(
                    dict_send["post_id"]))
                os.rename(zip_file, save_encrypted_path)
            else:
                dict_send[
                    "file_enc_password"] = get_random_alphanumeric_string(300)
                logger.info(
                    "{}: Encrypting attachment(s) with {} and {}-bit key".
                    format(dict_send["post_id"], dict_send["file_enc_cipher"],
                           dict_send["file_enc_key_bytes"] * 8))
                ret_crypto = crypto_multi_enc(
                    dict_send["file_enc_cipher"],
                    dict_send["file_enc_password"] +
                    config.PGP_PASSPHRASE_ATTACH,
                    zip_file,
                    save_encrypted_path,
                    key_bytes=dict_send["file_enc_key_bytes"])
                if not ret_crypto:
                    msg = "Unknown encryption cipher: {}".format(
                        dict_send["file_enc_cipher"])
                    errors.append(msg)
                    logger.error("{}: {}".format(dict_send["post_id"], msg))
                    return "Error", errors

                delete_file(zip_file)

            # Generate hash before parts removed
            dict_send["file_sha256_hash"] = generate_hash(save_encrypted_path)
            if dict_send["file_sha256_hash"]:
                logger.info("{}: Attachment hash generated: {}".format(
                    dict_send["post_id"], dict_send["file_sha256_hash"]))

            file_size = os.path.getsize(save_encrypted_path)
            number_of_extracts = config.UPLOAD_FRAG_AMT
            if file_size < 2000:
                extract_starts_sizes = [{
                    "start": 0,
                    "size": int(file_size * 0.5)
                }]
            else:
                extract_starts_sizes = [{
                    "start": 0,
                    "size": config.UPLOAD_FRAG_START_BYTES
                }]
                sequences = return_non_overlapping_sequences(
                    number_of_extracts, config.UPLOAD_FRAG_START_BYTES,
                    file_size - config.UPLOAD_FRAG_END_BYTES,
                    config.UPLOAD_FRAG_MIN_BYTES, config.UPLOAD_FRAG_MAX_BYTES)
                for pos, size in sequences:
                    extract_starts_sizes.append({"start": pos, "size": size})
                extract_starts_sizes.append({
                    "start":
                    file_size - config.UPLOAD_FRAG_END_BYTES,
                    "size":
                    config.UPLOAD_FRAG_END_BYTES
                })
            logger.info("{}: File extraction positions and sizes: {}".format(
                dict_send["post_id"], extract_starts_sizes))
            logger.info("{}: File size before: {}".format(
                dict_send["post_id"], os.path.getsize(save_encrypted_path)))

            data_extracted_start_base64 = data_file_multiple_extract(
                save_encrypted_path, extract_starts_sizes, chunk=4096)

            dict_send["file_size"] = os.path.getsize(save_encrypted_path)
            logger.info("{}: File size after: {}".format(
                dict_send["post_id"], dict_send["file_size"]))

            dict_send["file_extracts_start_base64"] = json.dumps(
                data_extracted_start_base64)

            # Upload file
            upload_id = get_random_alphanumeric_string(12,
                                                       with_spaces=False,
                                                       with_punctuation=False)
            try:
                with session_scope(DB_PATH) as new_session:
                    upl = UploadProgress()
                    upl.upload_id = upload_id
                    upl.uploading = True
                    upl.subject = base64.b64decode(
                        dict_send["subject"]).decode()
                    upl.total_size_bytes = dict_send["file_size"]
                    new_session.add(upl)
                    new_session.commit()

                upload_success = None
                curl_options = None
                if ("type" in dict_send["file_upload_settings"]
                        and dict_send["file_upload_settings"]["type"]
                        == "anonfile"):
                    if dict_send["file_upload_settings"]["uri"]:
                        anon = AnonFile(
                            proxies=config.TOR_PROXIES,
                            custom_timeout=432000,
                            uri=dict_send["file_upload_settings"]["uri"],
                            upload_id=upload_id)
                    else:
                        anon = AnonFile(proxies=config.TOR_PROXIES,
                                        custom_timeout=432000,
                                        server=form_post.upload.data,
                                        upload_id=upload_id)
                elif ("type" in dict_send["file_upload_settings"]
                      and dict_send["file_upload_settings"]["type"] == "curl"):
                    curl_options = dict_send["file_upload_settings"]
                    curl_upload = UploadCurl(upload_id=upload_id)

                for i in range(3):
                    logger.info("{}: Uploading {} file".format(
                        dict_send["post_id"],
                        human_readable_size(
                            os.path.getsize(save_encrypted_path))))
                    if ("type" in dict_send["file_upload_settings"]
                            and dict_send["file_upload_settings"]["type"]
                            == "anonfile"):
                        status, web_url = anon.upload_file(save_encrypted_path)
                    elif (curl_options
                          and "type" in dict_send["file_upload_settings"] and
                          dict_send["file_upload_settings"]["type"] == "curl"):
                        status, web_url = curl_upload.upload_curl(
                            dict_send["post_id"],
                            curl_options["domain"],
                            curl_options["uri"],
                            save_encrypted_path,
                            download_prefix=curl_options["download_prefix"],
                            upload_word=curl_options["upload_word"],
                            response=curl_options["response"])

                    if not status:
                        logger.error("{}: File upload failed".format(
                            dict_send["post_id"]))
                    else:
                        logger.info("{}: Upload success: URL: {}".format(
                            dict_send["post_id"], web_url))
                        upload_success = web_url
                        with session_scope(DB_PATH) as new_session:
                            upl = new_session.query(UploadProgress).filter(
                                UploadProgress.upload_id == upload_id).first()
                            if upl:
                                upl.progress_size_bytes = os.path.getsize(
                                    save_encrypted_path)
                                upl.progress_percent = 100
                                upl.uploading = False
                                new_session.commit()
                        break
                    time.sleep(15)
            except:
                logger.exception("uploading file")
            finally:
                delete_file(save_encrypted_path)
                with session_scope(DB_PATH) as new_session:
                    upl = new_session.query(UploadProgress).filter(
                        UploadProgress.upload_id == upload_id).first()
                    if upl:
                        upl.uploading = False
                        new_session.commit()

            if upload_success:
                dict_send["file_url"] = upload_success
            else:
                msg = "File upload failed after 3 attempts"
                errors.append(msg)
                logger.error("{}: {}".format(dict_send["post_id"], msg))
                return "Error", errors

    elif any(
            dict_send["file_order"]) and form_post.upload.data == "bitmessage":
        with session_scope(DB_PATH) as new_session:
            settings = new_session.query(GlobalSettings).first()
            if settings.enable_kiosk_mode and settings.kiosk_disable_bm_attach:
                msg = "Attaching files using the Bitmessage Upload Method is currently prohibited. " \
                      "Use one of the alternate upload methods."
                errors.append(msg)
                logger.error("{}: {}".format(dict_send["post_id"], msg))
                return "Error", errors

        # encrypt file
        try:
            dict_send[
                "file_enc_cipher"] = form_post.upload_cipher_and_key.data.split(
                    ",")[0]
            dict_send["file_enc_key_bytes"] = int(
                form_post.upload_cipher_and_key.data.split(",")[1])
        except:
            msg = "Unknown cannot parse cipher and key length: {}".format(
                form_post.upload_cipher_and_key.data)
            errors.append(msg)
            logger.error("{}: {}".format(dict_send["post_id"], msg))
            return "Error", errors

        if dict_send["file_enc_cipher"] == "NONE":
            logger.info("{}: Not encrypting attachment(s)".format(
                dict_send["post_id"]))
            os.rename(zip_file, save_encrypted_path)
        else:
            dict_send["file_enc_password"] = get_random_alphanumeric_string(
                300)
            logger.info(
                "{}: Encrypting attachment(s) with {} and {}-bit key".format(
                    dict_send["post_id"], dict_send["file_enc_cipher"],
                    dict_send["file_enc_key_bytes"] * 8))
            ret_crypto = crypto_multi_enc(
                dict_send["file_enc_cipher"],
                dict_send["file_enc_password"] + config.PGP_PASSPHRASE_ATTACH,
                zip_file,
                save_encrypted_path,
                key_bytes=dict_send["file_enc_key_bytes"])
            if not ret_crypto:
                msg = "Unknown encryption cipher: {}".format(
                    dict_send["file_enc_cipher"])
                errors.append(msg)
                logger.error("{}: {}".format(dict_send["post_id"], msg))
                return "Error", errors

            delete_file(zip_file)

        dict_send["file_uploaded"] = base64.b64encode(
            open(save_encrypted_path, "rb").read()).decode()

        delete_file(save_encrypted_path)

    dict_message = {
        "version": config.VERSION_MSG,
        "message_type": "post",
        "is_op": form_post.is_op.data == "yes",
        "op_sha256_hash": dict_send["op_sha256_hash"],
        "timestamp_utc": daemon_com.get_utc(),
        "file_size": dict_send["file_size"],
        "file_amount": dict_send["file_amount"],
        "file_url_type": dict_send["file_url_type"],
        "file_url": dict_send["file_url"],
        "file_upload_settings": dict_send["file_upload_settings"],
        "file_extracts_start_base64": dict_send["file_extracts_start_base64"],
        "file_base64": dict_send["file_uploaded"],
        "file_sha256_hash": dict_send["file_sha256_hash"],
        "file_enc_cipher": dict_send["file_enc_cipher"],
        "file_enc_key_bytes": dict_send["file_enc_key_bytes"],
        "file_enc_password": dict_send["file_enc_password"],
        "file_order": dict_send["file_order"],
        "image1_spoiler": form_post.image1_spoiler.data,
        "image2_spoiler": form_post.image2_spoiler.data,
        "image3_spoiler": form_post.image3_spoiler.data,
        "image4_spoiler": form_post.image4_spoiler.data,
        "upload_filename": dict_send["upload_filename"],
        "sage": dict_send["sage"],
        "subject": dict_send["subject"],
        "message": dict_send["message"],
        "nation": dict_send["nation"],
        "nation_base64": dict_send["nation_base64"],
        "nation_name": dict_send["nation_name"],
    }

    if zip_file:
        delete_file(zip_file)

    pgp_passphrase_msg = config.PGP_PASSPHRASE_MSG
    with session_scope(DB_PATH) as new_session:
        chan = new_session.query(Chan).filter(
            Chan.address == form_post.board_id.data).first()
        if chan and chan.pgp_passphrase_msg:
            pgp_passphrase_msg = chan.pgp_passphrase_msg

    gpg = gnupg.GPG()
    message_encrypted = gpg.encrypt(json.dumps(dict_message),
                                    symmetric="AES256",
                                    passphrase=pgp_passphrase_msg,
                                    recipients=None)

    message_send = base64.b64encode(message_encrypted.data).decode()

    if len(message_send) > config.BM_PAYLOAD_MAX_SIZE:
        msg = "Message payload too large: {}. Must be less than {}".format(
            human_readable_size(len(message_send)),
            human_readable_size(config.BM_PAYLOAD_MAX_SIZE))
        logger.error(msg)
        errors.append(msg)
        return "Error", errors
    else:
        logger.info("{}: Message size: {}".format(dict_send["post_id"],
                                                  len(message_send)))

    # prolong inventory clear if sending a message
    now = time.time()
    if daemon_com.get_timer_clear_inventory() > now:
        daemon_com.update_timer_clear_inventory(config.CLEAR_INVENTORY_WAIT)

    # Don't allow a message to send while Bitmessage is restarting
    allow_send = False
    timer = time.time()
    while not allow_send:
        if daemon_com.bitmessage_restarting() is False:
            allow_send = True
        if time.time() - timer > config.BM_WAIT_DELAY:
            logger.error("{}: Unable to send message: "
                         "Could not detect Bitmessage running.".format(
                             dict_send["post_id"]))
            msg = "Unable to send message."
            errors = ["Could not detect Bitmessage running."]
            return msg, errors
        time.sleep(1)

    lf = LF()
    if lf.lock_acquire(config.LOCKFILE_API, to=config.API_LOCK_TIMEOUT):
        return_str = None
        try:
            return_str = api.sendMessage(form_post.board_id.data,
                                         form_post.from_address.data, "",
                                         message_send, 2, form_post.ttl.data)
            if return_str:
                logger.info(
                    "{}: Message sent from {} to {} with TTL of {} sec: {}".
                    format(dict_send["post_id"], form_post.from_address.data,
                           form_post.board_id.data, form_post.ttl.data,
                           return_str))
        except Exception:
            pass
        finally:
            time.sleep(config.API_PAUSE)
            lf.lock_release(config.LOCKFILE_API)
            return_msg = "Post of size {} placed in send queue. The time it " \
                         "takes to send a message is related to the size of the " \
                         "post due to the proof of work required to send. " \
                         "Generally, the larger the post, the longer it takes to " \
                         "send. Posts ~10 KB take around a minute or less to send, " \
                         "whereas messages >= 100 KB can take several minutes to " \
                         "send. BM returned: {}".format(
                            human_readable_size(len(message_send)), return_str)
            return return_msg, errors
Exemplo n.º 6
0
def leave(address):
    global_admin, allow_msg = allowed_access(check_is_global_admin=True)
    if not global_admin:
        return allow_msg

    form_confirm = forms_board.Confirm()

    chan = Chan.query.filter(Chan.address == address).first()

    if request.method != 'POST' or not form_confirm.confirm.data:
        return render_template("pages/confirm.html",
                               action="leave",
                               address=address,
                               chan=chan)

    status_msg = {"status_message": []}

    admin_cmds = Command.query.filter(Command.chan_address == address).all()
    for each_adm_cmd in admin_cmds:
        each_adm_cmd.delete()

    lf = LF()
    if lf.lock_acquire(config.LOCKFILE_MSG_PROC, to=60):
        try:
            for each_thread in chan.threads:
                for each_message in each_thread.messages:
                    delete_post(each_message.message_id)  # Delete thread posts
                delete_thread(each_thread.thread_hash)  # Delete thread

            deleted_msgs = DeletedMessages.query.filter(
                DeletedMessages.address_to == address).all()
            for each_msg in deleted_msgs:
                logger.info("DeletedMessages: Deleting entry: {}".format(
                    each_msg.message_id))
                each_msg.delete()

            try:
                daemon_com.leave_chan(address)  # Leave chan in Bitmessage
                delete_chan(address)  # Delete chan

                # Delete mod log entries for address
                mod_logs = ModLog.query.filter(
                    ModLog.board_address == address).all()
                for each_entry in mod_logs:
                    each_entry.delete()
            except:
                logger.exception(
                    "Could not delete chan via daemon or delete_chan()")

            daemon_com.delete_and_vacuum()

            status_msg['status_title'] = "Success"
            status_msg['status_message'].append("Deleted {}".format(address))
        finally:
            time.sleep(1)
            lf.lock_release(config.LOCKFILE_MSG_PROC)

    board = {"current_chan": None}
    url = ""
    url_text = ""

    return render_template("pages/alert.html",
                           board=board,
                           status_msg=status_msg,
                           url=url,
                           url_text=url_text)
Exemplo n.º 7
0
def identities():
    global_admin, allow_msg = allowed_access(
        check_is_global_admin=True)
    if not global_admin:
        return allow_msg

    form_identity = forms_settings.Identity()
    form_confirm = forms_board.Confirm()

    status_msg = session.get('status_msg', {"status_message": []})

    if request.method == 'GET':
        if 'status_msg' in session:
            session.pop('status_msg')

    elif request.method == 'POST':
        if form_identity.create_identity.data:
            if not form_identity.label.data or not form_identity.passphrase.data:
                status_msg['status_message'].append("Label and passphrase required")

            errors, dict_chan_info = process_passphrase(form_identity.passphrase.data)
            if dict_chan_info:
                status_msg['status_message'].append("Cannot create an Identity with board/list passphrase")

            if not status_msg['status_message']:
                lf = LF()
                if lf.lock_acquire(config.LOCKFILE_API, to=config.API_LOCK_TIMEOUT):
                    try:
                        b64_passphrase = base64.b64encode(form_identity.passphrase.data.encode())
                        return_str = api.createDeterministicAddresses(b64_passphrase.decode())
                        if return_str:
                            if ("addresses" in return_str and
                                    len(return_str["addresses"]) == 1 and
                                    return_str["addresses"][0]):

                                ident = Identity.query.filter(
                                    Identity.address == return_str["addresses"][0]).first()
                                if ident:
                                    logger.info(
                                        "Creating identity that already exists in the database. "
                                        "Skipping adding entry")
                                else:
                                    new_ident = Identity()
                                    new_ident.address = return_str["addresses"][0]
                                    new_ident.label = form_identity.label.data
                                    new_ident.passphrase_base64 = b64_passphrase
                                    new_ident.save()

                                daemon_com.refresh_identities()

                                if form_identity.resync.data:
                                    daemon_com.signal_clear_inventory()

                                status_msg['status_title'] = "Success"
                                status_msg['status_message'].append(
                                    "Created identity {} with address {}.".format(
                                        form_identity.label.data, return_str["addresses"][0]))
                                status_msg['status_message'].append(
                                    "Give the system a few seconds for the change to take effect.")
                            else:
                                status_msg['status_message'].append(
                                    "Error creating Identity: {}".format(return_str))
                        else:
                            status_msg['status_message'].append("Error creating Identity")
                    finally:
                        time.sleep(config.API_PAUSE)
                        lf.lock_release(config.LOCKFILE_API)

        elif form_identity.rename.data:
            if not form_identity.ident_label.data or not form_identity.address.data:
                status_msg['status_message'].append("Label and address required")

            if not status_msg['status_message']:
                ident = Identity.query.filter(
                    Identity.address == form_identity.address.data).first()
                if ident:
                    ident.label = form_identity.ident_label.data
                    ident.save()
                    daemon_com.refresh_identities()
                    status_msg['status_title'] = "Success"
                    status_msg['status_message'].append("Identity renamed.")
                    status_msg['status_message'].append(
                        "Give the system a few seconds for the change to take effect.")

        elif form_identity.delete.data:
            ident = None
            if not form_identity.address.data:
                status_msg['status_message'].append("Address required")
            else:
                ident = Identity.query.filter(
                    Identity.address == form_identity.address.data).first()

            if not form_confirm.confirm.data:
                return render_template("pages/confirm.html",
                                       action="delete_identity",
                                       ident=ident)

            if not status_msg['status_message']:
                lf = LF()
                if lf.lock_acquire(config.LOCKFILE_API, to=config.API_LOCK_TIMEOUT):
                    try:
                        return_str = api.deleteAddress(form_identity.address.data)
                        if return_str == "success":
                            if ident:
                                ident.delete()
                            daemon_com.refresh_identities()
                            status_msg['status_title'] = "Success"
                            status_msg['status_message'].append("Identity deleted.")
                            status_msg['status_message'].append(
                                "Give the system a few seconds for the change to take effect.")
                        else:
                            status_msg['status_message'].append(
                                "Error deleting Identity: {}".format(return_str))
                    finally:
                        time.sleep(config.API_PAUSE)
                        lf.lock_release(config.LOCKFILE_API)

        session['status_msg'] = status_msg

        if 'status_title' not in status_msg and status_msg['status_message']:
            status_msg['status_title'] = "Error"

        return redirect(url_for("routes_identities.identities"))

    return render_template("pages/identities.html",
                           form_identity=form_identity,
                           status_msg=status_msg)
Exemplo n.º 8
0
def diag():
    global_admin, allow_msg = allowed_access(check_is_global_admin=True)
    if not global_admin:
        return allow_msg

    status_msg = session.get('status_msg', {"status_message": []})
    form_diag = forms_settings.Diag()

    # get all messages sending
    import sqlite3
    from binascii import hexlify
    row = []
    try:
        conn = sqlite3.connect('file:{}'.format(config.messages_dat), uri=True)
        conn.text_factory = bytes
        c = conn.cursor()
        c.execute(
            "SELECT msgid, fromaddress, toaddress, lastactiontime, message, status "
            "FROM sent "
            "WHERE folder='sent'")
        row = c.fetchall()
        conn.commit()
        conn.close()
    except Exception as err:
        logger.exception("Error checking for POW: {}".format(err))

    # Convert msg IDs
    sending_msgs = []
    for each_row in row:
        if each_row[5].decode() in ["doingmsgpow", "msgqueued"]:
            sending_msgs.append(
                (hexlify(each_row[0]).decode(),
                 each_row[1].decode(), each_row[2].decode(), each_row[3],
                 len(each_row[4]), each_row[5].decode()))

    if request.method == 'POST':
        if form_diag.del_sending_msg.data:
            cancel_send_id_list = []
            for each_input in request.form:
                if each_input.startswith("delsendingmsgid_"):
                    cancel_send_id_list.append(each_input.split("_")[1])

            if not cancel_send_id_list:
                status_msg['status_message'].append(
                    "Must select at least one message to cancel the sending of."
                )

            if not status_msg['status_message']:
                lf = LF()
                if lf.lock_acquire(config.LOCKFILE_API,
                                   to=config.API_LOCK_TIMEOUT):
                    try:
                        for each_id in cancel_send_id_list:
                            logger.info(
                                "Trashing msg with ID: {}".format(each_id))
                            api.trashSentMessage(each_id)
                            time.sleep(0.1)

                        time.sleep(1)
                        daemon_com.restart_bitmessage()
                        status_msg['status_title'] = "Success"
                        status_msg['status_message'].append(
                            "Deleted message(s) being sent and restarting Bitmessage. "
                            "Please wait at least 60 seconds before canceling another send."
                        )
                    except Exception as err:
                        logger.error("Error: {}".format(err))
                    finally:
                        time.sleep(config.API_PAUSE)
                        lf.lock_release(config.LOCKFILE_API)

        if form_diag.del_inventory.data:
            try:
                daemon_com.clear_bm_inventory()
                status_msg['status_title'] = "Success"
                status_msg['status_message'].append(
                    "Deleted Bitmessage inventory and restarting Bitmessage. Give it time to resync."
                )
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't delete Bitmessage inventory: {}".format(err))
                logger.exception("Couldn't delete BM inventory")

        elif form_diag.del_deleted_msg_db.data:
            try:
                deleted_msgs = DeletedMessages.query.all()
                for each_msg in deleted_msgs:
                    logger.info("DeletedMessages: Deleting entry: {}".format(
                        each_msg.message_id))
                    each_msg.delete()
                status_msg['status_title'] = "Success"
                status_msg['status_message'].append(
                    "Cleared Deleted Message table")
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't clear Deleted Message table: {}".format(err))
                logger.exception("Couldn't clear Deleted Message table")

        elif form_diag.del_non_bc_msg_list.data:
            try:
                settings = GlobalSettings.query.first()
                settings.discard_message_ids = "[]"
                settings.save()
                status_msg['status_title'] = "Success"
                status_msg['status_message'].append(
                    "Cleared Non-BC Message List")
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't clear Non-BC Message List: {}".format(err))
                logger.exception("Couldn't clear Non-BC Message List")

        elif form_diag.del_trash.data:
            try:
                daemon_com.delete_and_vacuum()
                status_msg['status_title'] = "Success"
                status_msg['status_message'].append(
                    "Deleted Bitmessage Trash items.")
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't delete Bitmessage Trash items: {}".format(err))
                logger.exception("Couldn't delete BM Trash Items")

        elif form_diag.del_popup_html.data:
            try:
                for each_message in Messages.query.all():
                    each_message.popup_html = ""
                    each_message.save()
                status_msg['status_title'] = "Success"
                status_msg['status_message'].append(
                    "Deleted popup HTML for all messages.")
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't delete popup HTML: {}".format(err))
                logger.exception("Couldn't delete popup HTML")

        elif form_diag.del_cards.data:
            try:
                cards = PostCards.query.all()
                for each_card in cards:
                    each_card.delete()
                status_msg['status_title'] = "Success"
                status_msg['status_message'].append("Deleted cards.")
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't delete cards: {}".format(err))
                logger.exception("Couldn't delete cards")

        elif form_diag.del_mod_log.data:
            try:
                mod_logs = ModLog.query.all()
                for each_entry in mod_logs:
                    each_entry.delete()
                status_msg['status_title'] = "Success"
                status_msg['status_message'].append("Deleted Mod Log.")
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't delete Mod Log: {}".format(err))
                logger.exception("Couldn't delete Mod Log")

        elif form_diag.del_posts_without_thread.data:
            try:
                messages = Messages.query.all()
                for each_msg in messages:
                    if not each_msg.thread:
                        each_msg.delete()
                status_msg['status_title'] = "Success"
                status_msg['status_message'].append("Deleted orphaned posts.")
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't delete orphaned posts: {}".format(err))
                logger.exception("Couldn't delete orphaned posts")

        elif form_diag.fix_thread_board_timestamps.data:
            try:
                threads = Threads.query.all()
                for each_thread in threads:
                    latest_post = Messages.query.filter(
                        Messages.thread_id == each_thread.id).order_by(
                            Messages.timestamp_sent.desc()).first()
                    if latest_post:
                        each_thread.timestamp_sent = latest_post.timestamp_sent
                        each_thread.save()

                boards = Chan.query.filter(Chan.type == "board").all()
                for each_board in boards:
                    latest_thread = Threads.query.filter(
                        Threads.chan_id == each_board.id).order_by(
                            Threads.timestamp_sent.desc()).first()
                    if latest_thread:
                        each_board.timestamp_sent = latest_thread.timestamp_sent
                        each_board.save()

                status_msg['status_title'] = "Success"
                status_msg['status_message'].append(
                    "Fixed thread and board timestamps.")
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't fix thread and board timestamps: {}".format(err))
                logger.exception("Couldn't fix thread and board timestamps")

        elif form_diag.fix_thread_short_hashes.data:
            try:
                threads = Threads.query.all()
                for each_thread in threads:
                    each_thread.thread_hash_short = each_thread.thread_hash[
                        -12:]
                    each_thread.save()

                status_msg['status_title'] = "Success"
                status_msg['status_message'].append(
                    "Fixed thread short hashes")
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't fix thread short hashes: {}".format(err))
                logger.exception("Couldn't fix thread short hashes")

        elif form_diag.download_backup.data:
            date_now = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
            filename = 'bitchan-backup_{}.tar'.format(date_now)
            save_path = '/home/{}'.format(filename)

            def delete_backup_files():
                time.sleep(7200)
                delete_files = glob.glob("/home/*.tar")
                delete_files.append(
                    '/home/bitchan/bitchan_backup-usr_bitchan.tar')
                delete_files.append(
                    '/home/bitchan/bitchan_backup-usr_bitmessage.tar')
                for each_file in delete_files:
                    delete_file(each_file)

            try:
                cmd = 'tar -cvf /home/bitchan/bitchan_backup-usr_bitchan.tar /usr/local/bitchan'
                output = subprocess.check_output(cmd, shell=True, text=True)
                logger.debug("Command: {}, Output: {}".format(cmd, output))

                cmd = 'tar -cvf /home/bitchan/bitchan_backup-usr_bitmessage.tar /usr/local/bitmessage'
                output = subprocess.check_output(cmd, shell=True, text=True)
                logger.debug("Command: {}, Output: {}".format(cmd, output))

                cmd = 'tar -cvf {} /home/bitchan'.format(save_path)
                output = subprocess.check_output(cmd, shell=True, text=True)
                logger.debug("Command: {}, Output: {}".format(cmd, output))

                thread_download = Thread(target=delete_backup_files)
                thread_download.start()

                return send_file(save_path, mimetype='application/x-tar')
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't generate backup archive: {}".format(err))
                logger.exception("Couldn't generate backup archive")

        elif form_diag.download_backup.data:
            try:
                save_path = '/tmp/bitchan-backup_to_restore.tar'
                delete_file(save_path)
                form_diag.restore_backup_file.data.save(save_path)

                cmd = 'tar -xvf {} -C /'.format(save_path)
                output = subprocess.check_output(cmd, shell=True, text=True)
                logger.debug("Command: {}, Output: {}".format(cmd, output))

                cmd = 'tar -xvf /home/bitchan/bitchan_backup-usr_bitchan.tar -C /'
                output = subprocess.check_output(cmd, shell=True, text=True)
                logger.debug("Command: {}, Output: {}".format(cmd, output))

                cmd = 'tar -xvf /home/bitchan/bitchan_backup-usr_bitmessage.tar -C /'
                output = subprocess.check_output(cmd, shell=True, text=True)
                logger.debug("Command: {}, Output: {}".format(cmd, output))

                def delete_backup_files():
                    delete_files = [
                        save_path,
                        '/home/bitchan/bitchan_backup-usr_bitchan.tar',
                        '/home/bitchan/bitchan_backup-usr_bitmessage.tar'
                    ]
                    for each_file in delete_files:
                        delete_file(each_file)

                subprocess.Popen('docker stop -t 15 bitchan_daemon 2>&1',
                                 shell=True)
                time.sleep(15)
                subprocess.Popen('docker start bitchan_daemon 2>&1',
                                 shell=True)

                subprocess.Popen('docker stop -t 15 bitmessage 2>&1',
                                 shell=True)
                time.sleep(15)
                subprocess.Popen('docker start bitmessage 2>&1', shell=True)

                thread_download = Thread(target=delete_backup_files)
                thread_download.start()

                status_msg['status_title'] = "Success"
                status_msg['status_message'].append(
                    "Restored backup and restarted Bitmessage and BitChan")
            except Exception as err:
                status_msg['status_message'].append(
                    "Couldn't restore backup: {}".format(err))
                logger.exception("Couldn't restore backup archive")

        elif form_diag.bulk_delete_threads_submit.data:
            address = "0"
            if form_diag.bulk_delete_threads_address.data:
                board = Chan.query.filter(
                    Chan.address == form_diag.bulk_delete_threads_address.data)
                if not board.count():
                    status_msg['status_message'].append(
                        "Invalid Address: {}".format(
                            form_diag.bulk_delete_threads_address.data))
                else:
                    address = board.address

            return redirect(
                url_for("routes_admin.bulk_delete_thread",
                        current_chan=address))

        if 'status_title' not in status_msg and status_msg['status_message']:
            status_msg['status_title'] = "Error"

    return render_template("pages/diag.html",
                           flask_session_login=flask_session_login,
                           form_diag=form_diag,
                           replace_lt_gt=replace_lt_gt,
                           sending_msgs=sending_msgs,
                           settings=GlobalSettings.query.first(),
                           status_msg=status_msg,
                           themes=themes.themes)
Exemplo n.º 9
0
def bug_report():
    allowed, allow_msg = allowed_access(check_can_view=True)
    if not allowed:
        return allow_msg

    status_msg = session.get('status_msg', {"status_message": []})
    form_bug = forms_board.BugReport()

    if request.method == 'POST':
        if form_bug.send.data and form_bug.bug_report.data:
            try:
                # Only send from a board or list
                # Do not send from an identity
                if config.DEFAULT_CHANS[0][
                        "address"] in daemon_com.get_all_chans():
                    address_from = config.DEFAULT_CHANS[0]["address"]
                elif daemon_com.get_all_chans():
                    address_from = list(daemon_com.get_all_chans().keys())[0]
                else:
                    status_msg['status_message'].append(
                        "Could not find address to send from. "
                        "Join/Create a board or list and try again.")
                    address_from = None

                alembic_version = Alembic.query.first().version_num
                message_compiled = "BitChan version: {}\n".format(
                    config.VERSION_BITCHAN)
                message_compiled += "Database version: {} (should be {})\n\n".format(
                    alembic_version, config.VERSION_ALEMBIC)
                message_compiled += "Message:\n\n{}".format(
                    form_bug.bug_report.data)
                message_b64 = base64.b64encode(
                    message_compiled.encode()).decode()

                ts = datetime.datetime.fromtimestamp(
                    daemon_com.get_utc()).strftime('%Y-%m-%d %H:%M:%S')
                subject = "Bug Report {} ({})".format(config.VERSION_BITCHAN,
                                                      ts)
                subject_b64 = base64.b64encode(subject.encode()).decode()

                if not status_msg['status_message']:
                    if address_from:
                        # Don't allow a message to send while Bitmessage is restarting
                        allow_send = False
                        timer = time.time()
                        while not allow_send:
                            if daemon_com.bitmessage_restarting() is False:
                                allow_send = True
                            if time.time() - timer > config.BM_WAIT_DELAY:
                                logger.error(
                                    "Unable to send message: "
                                    "Could not detect Bitmessage running.")
                                return
                            time.sleep(1)

                        if allow_send:
                            lf = LF()
                            if lf.lock_acquire(config.LOCKFILE_API,
                                               to=config.API_LOCK_TIMEOUT):
                                try:
                                    return_str = api.sendMessage(
                                        config.BITCHAN_BUG_REPORT_ADDRESS,
                                        address_from, subject_b64, message_b64,
                                        2, config.BM_TTL)
                                    if return_str:
                                        status_msg['status_title'] = "Success"
                                        status_msg['status_message'].append(
                                            "Sent. Thank you for your feedback. "
                                            "Send returned: {}".format(
                                                return_str))
                                finally:
                                    time.sleep(config.API_PAUSE)
                                    lf.lock_release(config.LOCKFILE_API)

            except Exception as err:
                status_msg['status_message'].append(
                    "Could not send: {}".format(err))
                logger.exception("Could not send bug report: {}".format(err))

        if 'status_title' not in status_msg and status_msg['status_message']:
            status_msg['status_title'] = "Error"

    return render_template("pages/bug_report.html",
                           form_bug=form_bug,
                           replace_lt_gt=replace_lt_gt,
                           settings=GlobalSettings.query.first(),
                           status_msg=status_msg,
                           themes=themes.themes)
Exemplo n.º 10
0
def compose(address_from, address_to):
    global_admin, allow_msg = allowed_access(check_is_global_admin=True)
    if not global_admin:
        return allow_msg

    from_all = []

    form_msg = forms_mailbox.Compose()

    if address_from == "0":
        address_from = ""

    if address_to == "0":
        address_to = ""

    from_all.extend(daemon_com.get_identities().keys())
    from_all.extend(daemon_com.get_all_chans().keys())

    form_populate = session.get('form_populate', {})
    status_msg = session.get('status_msg', {"status_message": []})

    if request.method == 'GET':
        if 'form_populate' in session:
            session.pop('form_populate')
        if 'status_msg' in session:
            session.pop('status_msg')

    if request.method == 'POST':
        if form_msg.send.data:
            if not form_msg.to_address.data:
                status_msg['status_message'].append(
                    "Must provide a To Address")
            if not form_msg.from_address.data:
                status_msg['status_message'].append(
                    "Must provide a From Address")
            if not (3600 <= form_msg.ttl.data <= 2419200):
                status_msg['status_message'].append(
                    "TTL must be between 3600 and 2419200")

            if not status_msg['status_message']:
                if form_msg.subject.data:
                    subject = base64.b64encode(
                        form_msg.subject.data.encode()).decode()
                else:
                    subject = ""
                if form_msg.body.data:
                    message = base64.b64encode(
                        form_msg.body.data.encode()).decode()
                else:
                    message = ""

                # Don't allow a message to send while Bitmessage is restarting
                allow_send = False
                timer = time.time()
                while not allow_send:
                    if daemon_com.bitmessage_restarting() is False:
                        allow_send = True
                    if time.time() - timer > config.BM_WAIT_DELAY:
                        logger.error("Unable to send message: "
                                     "Could not detect Bitmessage running.")
                        return
                    time.sleep(1)

                if allow_send:
                    lf = LF()
                    if lf.lock_acquire(config.LOCKFILE_API,
                                       to=config.API_LOCK_TIMEOUT):
                        try:  # TODO: message sends but results in error. Diagnose.
                            status_msg['status_title'] = "Success"
                            status_msg['status_message'].append(
                                "Message sent to queue")
                            try:
                                return_str = api.sendMessage(
                                    form_msg.to_address.data,
                                    form_msg.from_address.data, subject,
                                    message, 2, form_msg.ttl.data)
                            except Exception as err:
                                if err.__str__(
                                ) == "<Fault 21: 'Unexpected API Failure - too many values to unpack'>":
                                    return_str = "Error: API Failure (despite this error, the message probably still sent)"
                                else:
                                    return_str = "Error: {}".format(err)
                            if return_str:
                                logger.info(
                                    "Send message from {} to {}. Returned: {}".
                                    format(form_msg.from_address.data,
                                           form_msg.to_address.data,
                                           return_str))
                                status_msg['status_message'].append(
                                    "Bitmessage returned: {}".format(
                                        return_str))
                        except Exception as err:
                            logger.exception("Error: {}".format(err))
                        finally:
                            time.sleep(config.API_PAUSE)
                            lf.lock_release(config.LOCKFILE_API)

        if 'status_title' not in status_msg and status_msg['status_message']:
            status_msg['status_title'] = "Error"

            form_populate = {
                "to_address": form_msg.to_address.data,
                "from_address": form_msg.from_address.data,
                "ttl": form_msg.ttl.data,
                "subject": form_msg.subject.data,
                "body": form_msg.body.data,
            }

        session['form_populate'] = form_populate
        session['status_msg'] = status_msg

        if not address_from:
            address_from = "0"

        return redirect(
            url_for("routes_mail.compose",
                    address_from=address_from,
                    address_to="0"))

    return render_template("mailbox/compose.html",
                           address_from=address_from,
                           address_to=address_to,
                           form_populate=form_populate,
                           from_all=from_all,
                           get_from_list_all=get_from_list_all,
                           status_msg=status_msg)
Exemplo n.º 11
0
def mailbox(ident_address, mailbox, page, msg_id):
    global_admin, allow_msg = allowed_access(check_is_global_admin=True)
    if not global_admin:
        return allow_msg

    status_msg = {"status_message": []}
    messages = []
    msg_selected = []
    identities = daemon_com.get_identities()
    page = int(page)

    form_mail = forms_mailbox.Mailbox()

    if msg_id != "0":
        if mailbox == "inbox":
            lf = LF()
            if lf.lock_acquire(config.LOCKFILE_API,
                               to=config.API_LOCK_TIMEOUT):
                try:
                    msg_selected = api.getInboxMessageById(msg_id, True)
                    if "inboxMessage" in msg_selected:
                        msg_selected = msg_selected["inboxMessage"][0]
                        expires = get_msg_expires_time(msg_id)
                        if expires:
                            msg_selected["expires_time"] = expires
                except Exception as err:
                    logger.error("Error: {}".format(err))
                finally:
                    time.sleep(config.API_PAUSE)
                    lf.lock_release(config.LOCKFILE_API)

        elif mailbox == "sent":
            lf = LF()
            if lf.lock_acquire(config.LOCKFILE_API,
                               to=config.API_LOCK_TIMEOUT):
                try:
                    msg_selected = api.getSentMessageById(msg_id)
                    if "sentMessage" in msg_selected:
                        msg_selected = msg_selected["sentMessage"][0]
                        expires = get_msg_expires_time(msg_id)
                        if expires:
                            msg_selected["expires_time"] = expires
                except Exception as err:
                    logger.error("Error: {}".format(err))
                finally:
                    time.sleep(config.API_PAUSE)
                    lf.lock_release(config.LOCKFILE_API)

    if request.method == 'POST':
        settings = GlobalSettings.query.first()

        if ((form_mail.messages_per_mailbox_page.data or
             (form_mail.messages_per_mailbox_page.data
              and form_mail.set_per_page.data))
                and form_mail.messages_per_mailbox_page.data !=
                settings.messages_per_mailbox_page):
            settings.messages_per_mailbox_page = form_mail.messages_per_mailbox_page.data
            settings.save()

        elif form_mail.execute_bulk_action.data and form_mail.bulk_action.data:
            msg_ids = request.form.getlist("selected_msg")

            if form_mail.bulk_action.data == "delete":
                lf = LF()
                if lf.lock_acquire(config.LOCKFILE_API,
                                   to=config.API_LOCK_TIMEOUT):
                    try:
                        for each_id in msg_ids:
                            if mailbox == "inbox":
                                api.trashInboxMessage(each_id)
                            elif mailbox == "sent":
                                api.trashSentMessage(each_id)
                    except Exception as err:
                        logger.error("Error: {}".format(err))
                    finally:
                        time.sleep(config.API_PAUSE)
                        lf.lock_release(config.LOCKFILE_API)

                return redirect(
                    url_for("routes_mail.mailbox",
                            ident_address=ident_address,
                            mailbox=mailbox,
                            page="1",
                            msg_id="0"))

            if form_mail.bulk_action.data in ["mark_read", "mark_unread"]:
                lf = LF()
                if lf.lock_acquire(config.LOCKFILE_API,
                                   to=config.API_LOCK_TIMEOUT):
                    try:
                        for each_id in msg_ids:
                            api.getInboxMessageById(
                                each_id,
                                form_mail.bulk_action.data == "mark_read")
                    except Exception as err:
                        logger.error("Error: {}".format(err))
                    finally:
                        time.sleep(config.API_PAUSE)
                        lf.lock_release(config.LOCKFILE_API)

                daemon_com.update_unread_mail_count(ident_address)

                return redirect(
                    url_for("routes_mail.mailbox",
                            ident_address=ident_address,
                            mailbox=mailbox,
                            page=page,
                            msg_id=msg_id))

        elif form_mail.reply.data and form_mail.message_id.data:
            lf = LF()
            if lf.lock_acquire(config.LOCKFILE_API,
                               to=config.API_LOCK_TIMEOUT):
                try:
                    msg_selected = api.getInboxMessageById(
                        form_mail.message_id.data, True)
                    if "inboxMessage" in msg_selected:
                        msg_selected = msg_selected["inboxMessage"][0]
                        form_populate = {
                            "to_address":
                            msg_selected["fromAddress"],
                            "body":
                            "\n\n\n------------------------------------------------------\n{}"
                            .format(base64_decode(msg_selected["message"]))
                        }
                        if base64_decode(
                                msg_selected["subject"]).startswith("Re:"):
                            form_populate["subject"] = base64_decode(
                                msg_selected["subject"])
                        else:
                            form_populate["subject"] = "Re: {}".format(
                                base64_decode(msg_selected["subject"]))
                        session['form_populate'] = form_populate
                        session['status_msg'] = status_msg
                except Exception as err:
                    logger.error("Error: {}".format(err))
                finally:
                    time.sleep(config.API_PAUSE)
                    lf.lock_release(config.LOCKFILE_API)

            return redirect(url_for("routes_mail.compose", address_to="0"))

        elif form_mail.forward.data and form_mail.message_id.data:
            lf = LF()
            if lf.lock_acquire(config.LOCKFILE_API,
                               to=config.API_LOCK_TIMEOUT):
                try:
                    msg_selected = api.getInboxMessageById(
                        form_mail.message_id.data, True)
                    if "inboxMessage" in msg_selected:
                        msg_selected = msg_selected["inboxMessage"][0]
                        form_populate = {
                            "body":
                            "\n\n\n------------------------------------------------------\n{}"
                            .format(base64_decode(msg_selected["message"]))
                        }
                        if base64_decode(
                                msg_selected["subject"]).startswith("Fwd:"):
                            form_populate["subject"] = base64_decode(
                                msg_selected["subject"])
                        else:
                            form_populate["subject"] = "Fwd: {}".format(
                                base64_decode(msg_selected["subject"]))
                        session['form_populate'] = form_populate
                        session['status_msg'] = status_msg
                except Exception as err:
                    logger.error("Error: {}".format(err))
                finally:
                    time.sleep(config.API_PAUSE)
                    lf.lock_release(config.LOCKFILE_API)

            return redirect(url_for("routes_mail.compose", address_to="0"))

        elif form_mail.delete.data and form_mail.message_id.data:
            lf = LF()
            if lf.lock_acquire(config.LOCKFILE_API,
                               to=config.API_LOCK_TIMEOUT):
                try:
                    api.trashMessage(form_mail.message_id.data)
                except Exception as err:
                    logger.error("Error: {}".format(err))
                finally:
                    time.sleep(config.API_PAUSE)
                    lf.lock_release(config.LOCKFILE_API)

            return redirect(
                url_for("routes_mail.mailbox",
                        ident_address=ident_address,
                        mailbox=mailbox,
                        page=page,
                        msg_id="0"))

    if ident_address != '0' and mailbox == "inbox":
        daemon_com.update_unread_mail_count(ident_address)

    total_mail_counts = {}
    unread_mail_counts = {}
    for each_identity in Identity.query.all():
        unread_mail_counts[
            each_identity.address] = each_identity.unread_messages
        total_mail_counts[each_identity.address] = each_identity.total_messages

    return render_template("mailbox/mailbox.html",
                           base64_decode=base64_decode,
                           get_messages_from_page=get_messages_from_page,
                           ident_address=ident_address,
                           identities=identities,
                           mailbox=mailbox,
                           msg_id=msg_id,
                           msg_selected=msg_selected,
                           messages=messages,
                           page=page,
                           status_msg=status_msg,
                           timestamp_format=timestamp_format,
                           total_mail_counts=total_mail_counts,
                           unread_mail_counts=unread_mail_counts)
Exemplo n.º 12
0
def process_admin(msg_dict, msg_decrypted_dict):
    """Process message as an admin command"""
    logger.info("{}: Message is an admin command".format(
        msg_dict["msgid"][-config.ID_LENGTH:].upper()))

    # Authenticate sender
    with session_scope(DB_PATH) as new_session:
        chan = new_session.query(Chan).filter(
            Chan.address == msg_dict['toAddress']).first()
        if chan:
            errors, dict_info = process_passphrase(chan.passphrase)
            # Message must be from address in primary or secondary access list
            access = get_access(msg_dict['toAddress'])
            if errors or (msg_dict['fromAddress']
                          not in access["primary_addresses"]
                          and msg_dict['fromAddress']
                          not in access["secondary_addresses"]):
                logger.error(
                    "{}: Unauthorized Admin message. Deleting.".format(
                        msg_dict["msgid"][-config.ID_LENGTH:].upper()))
                daemon_com.trash_message(msg_dict["msgid"])
                return
        else:
            logger.error("{}: Admin message: Chan not found".format(
                msg_dict["msgid"][-config.ID_LENGTH:].upper()))
            daemon_com.trash_message(msg_dict["msgid"])
            return

    logger.info(
        "{}: Admin message received from {} for {} is authentic".format(
            msg_dict["msgid"][-config.ID_LENGTH:].upper(),
            msg_dict['fromAddress'], msg_dict['toAddress']))

    admin_dict = {
        "timestamp_utc": 0,
        "chan_type": None,
        "action": None,
        "action_type": None,
        "options": {},
        "thread_id": None,
        "message_id": None,
        "chan_address": None
    }

    if "timestamp_utc" in msg_decrypted_dict and msg_decrypted_dict[
            "timestamp_utc"]:
        admin_dict["timestamp_utc"] = msg_decrypted_dict["timestamp_utc"]
    if "chan_type" in msg_decrypted_dict and msg_decrypted_dict["chan_type"]:
        admin_dict["chan_type"] = msg_decrypted_dict["chan_type"]
    if "action" in msg_decrypted_dict and msg_decrypted_dict["action"]:
        admin_dict["action"] = msg_decrypted_dict["action"]
    if "action_type" in msg_decrypted_dict and msg_decrypted_dict[
            "action_type"]:
        admin_dict["action_type"] = msg_decrypted_dict["action_type"]
    if "options" in msg_decrypted_dict and msg_decrypted_dict["options"]:
        admin_dict["options"] = msg_decrypted_dict["options"]
    if "thread_id" in msg_decrypted_dict and msg_decrypted_dict["thread_id"]:
        admin_dict["thread_id"] = msg_decrypted_dict["thread_id"]
    if "message_id" in msg_decrypted_dict and msg_decrypted_dict["message_id"]:
        admin_dict["message_id"] = msg_decrypted_dict["message_id"]
    if "chan_address" in msg_decrypted_dict and msg_decrypted_dict[
            "chan_address"]:
        admin_dict["chan_address"] = msg_decrypted_dict["chan_address"]

    access = get_access(msg_dict['toAddress'])

    lf = LF()
    if lf.lock_acquire(config.LOCKFILE_ADMIN_CMD, to=20):
        try:
            # (Owner): set board options
            if (admin_dict["action"] == "set"
                    and admin_dict["action_type"] == "options" and
                    msg_dict['fromAddress'] in access["primary_addresses"]):
                admin_set_options(msg_dict, admin_dict)

            # (Owner, Admin): set thread options
            elif (
                    admin_dict["action"] == "set"
                    and admin_dict["action_type"] == "thread_options" and
                (msg_dict['fromAddress'] in access["primary_addresses"]
                 or msg_dict['fromAddress'] in access["secondary_addresses"])):
                admin_set_thread_options(msg_dict, admin_dict)

            # (Owner, Admin): delete board thread or post
            elif (
                    admin_dict["action"] == "delete"
                    and admin_dict["chan_type"] == "board" and
                (msg_dict['fromAddress'] in access["primary_addresses"]
                 or msg_dict['fromAddress'] in access["secondary_addresses"])):
                admin_delete_from_board(msg_dict, admin_dict)

            # (Owner, Admin): delete board post with comment
            elif (
                    admin_dict["action"] == "delete_comment"
                    and admin_dict["action_type"] == "post"
                    and "options" in admin_dict
                    and "delete_comment" in admin_dict["options"]
                    and "message_id" in admin_dict["options"]["delete_comment"]
                    and "comment" in admin_dict["options"]["delete_comment"]
                    and
                (msg_dict['fromAddress'] in access["primary_addresses"]
                 or msg_dict['fromAddress'] in access["secondary_addresses"])):
                admin_delete_from_board_with_comment(msg_dict, admin_dict)

            # (Owner, Admin): Ban user
            elif (
                    admin_dict["action"]
                    in ["board_ban_silent", "board_ban_public"]
                    and admin_dict["action_type"] in "ban_address"
                    and admin_dict["options"]
                    and "ban_address" in admin_dict["action_type"] and
                (msg_dict['fromAddress'] in access["primary_addresses"]
                 or msg_dict['fromAddress'] in access["secondary_addresses"])):
                admin_ban_address_from_board(msg_dict, admin_dict)

            else:
                logger.error("{}: Unknown Admin command. Deleting. {}".format(
                    msg_dict["msgid"][-config.ID_LENGTH:].upper(), admin_dict))
                daemon_com.trash_message(msg_dict["msgid"])
        except Exception:
            logger.exception(
                "{}: Exception processing Admin command. Deleting.".format(
                    msg_dict["msgid"][-config.ID_LENGTH:].upper()))
            daemon_com.trash_message(msg_dict["msgid"])
        finally:
            time.sleep(config.API_PAUSE)
            lf.lock_release(config.LOCKFILE_API)
Exemplo n.º 13
0
def parse_message(message_id, json_obj):
    file_decoded = None
    file_filename = None
    file_url_type = None
    file_url = None
    file_upload_settings = None
    file_extracts_start_base64 = None
    file_size = None
    file_amount = None
    file_sha256_hash = None
    file_enc_cipher = None
    file_enc_key_bytes = None
    file_enc_password = None
    file_sha256_hashes_match = False
    file_download_successful = False
    file_order = None
    file_progress = None
    media_info = {}
    upload_filename = None
    saved_file_filename = None
    saved_image_thumb_filename = None
    image1_spoiler = None
    image2_spoiler = None
    image3_spoiler = None
    image4_spoiler = None
    op_sha256_hash = None
    sage = None
    message = None
    nation = None
    nation_base64 = None
    nation_name = None
    message_steg = {}
    file_do_not_download = False
    file_path = None

    dict_msg = json_obj['message_decrypted']

    # SHA256 hash of the original encrypted message payload to identify the OP of the thread.
    # Each reply must identify the thread it's replying to by supplying the OP hash.
    # If the OP hash doesn't exist, a new thread is created.
    # This prevents OP hijacking by impersonating an OP with an earlier send timestamp.
    message_sha256_hash = hashlib.sha256(
        json.dumps(json_obj['message']).encode('utf-8')).hexdigest()
    # logger.info("Message SHA256: {}".format(message_sha256_hash))

    # Check if message properly formatted, delete if not.
    if "subject" not in dict_msg or not dict_msg["subject"]:
        logger.error("{}: Message missing required subject. Deleting.".format(
            message_id[-config.ID_LENGTH:].upper()))
        daemon_com.trash_message(message_id)
        return
    else:
        subject = html.escape(
            base64.b64decode(dict_msg["subject"]).decode('utf-8')).strip()
        if len(base64.b64decode(dict_msg["subject"]).decode('utf-8')) > 64:
            logger.error("{}: Subject too large. Deleting".format(
                message_id[-config.ID_LENGTH:].upper()))
            daemon_com.trash_message(message_id)
            return

    if "version" not in dict_msg or not dict_msg["version"]:
        logger.error("{}: Message has no version. Deleting.".format(
            message_id[-config.ID_LENGTH:].upper()))
        daemon_com.trash_message(message_id)
        return
    else:
        version = dict_msg["version"]

    # logger.info("dict_msg: {}".format(dict_msg))

    # Determine if message indicates if it's OP or not
    if "is_op" in dict_msg and dict_msg["is_op"]:
        is_op = dict_msg["is_op"]
    else:
        is_op = False
        if "sage" in dict_msg and dict_msg["sage"]:
            sage = True

    # Determine if message indicates if it's a reply to an OP by supplying OP hash
    if "op_sha256_hash" in dict_msg and dict_msg["op_sha256_hash"]:
        op_sha256_hash = dict_msg["op_sha256_hash"]

    # Determine if message is an OP or a reply
    if is_op:
        thread_id = get_thread_id(message_sha256_hash)
    elif op_sha256_hash:
        thread_id = get_thread_id(op_sha256_hash)
    else:
        logger.error("{}: Message neither OP nor reply: Deleting.".format(
            message_id[-config.ID_LENGTH:].upper()))
        daemon_com.trash_message(message_id)
        return

    # Now that the thread_is id determined, check if there exists an Admin command
    # instructing the deletion of the thread/message
    with session_scope(DB_PATH) as new_session:
        admin_post_delete = new_session.query(Command).filter(
            and_(Command.action == "delete", Command.action_type == "post",
                 Command.chan_address == json_obj['toAddress'],
                 Command.thread_id == thread_id,
                 Command.message_id == message_id)).first()

        admin_thread_delete = new_session.query(Command).filter(
            and_(Command.action == "delete", Command.action_type == "thread",
                 Command.chan_address == json_obj['toAddress'],
                 Command.thread_id == thread_id)).first()

        if admin_post_delete or admin_thread_delete:
            logger.error("{}: Admin deleted this post or thread".format(
                message_id[-config.ID_LENGTH:].upper()))
            daemon_com.trash_message(message_id)
            return

    if ("timestamp_utc" in dict_msg and dict_msg["timestamp_utc"]
            and isinstance(dict_msg["timestamp_utc"], int)):
        timestamp_sent = dict_msg["timestamp_utc"]
    else:
        timestamp_sent = int(json_obj['receivedTime'])

    log_age_and_expiration(message_id, daemon_com.get_utc(), timestamp_sent,
                           get_msg_expires_time(message_id))

    # Check if board is set to automatically clear and message is older than the last clearing
    if chan_auto_clears_and_message_too_old(json_obj['toAddress'],
                                            timestamp_sent):
        logger.info(
            "{}: Message outside current auto clear period. Deleting.".format(
                message_id[-config.ID_LENGTH:].upper()))
        daemon_com.trash_message(message_id)
        return

    if "message" in dict_msg and dict_msg["message"]:
        message = dict_msg["message"]
    if "file_filename" in dict_msg and dict_msg["file_filename"]:
        file_filename = dict_msg["file_filename"]
        logger.info("{} Filename on post: {}".format(
            message_id[-config.ID_LENGTH:].upper(), dict_msg["file_filename"]))
    if "image1_spoiler" in dict_msg and dict_msg["image1_spoiler"]:
        image1_spoiler = dict_msg["image1_spoiler"]
    if "image2_spoiler" in dict_msg and dict_msg["image2_spoiler"]:
        image2_spoiler = dict_msg["image2_spoiler"]
    if "image3_spoiler" in dict_msg and dict_msg["image3_spoiler"]:
        image3_spoiler = dict_msg["image3_spoiler"]
    if "image4_spoiler" in dict_msg and dict_msg["image4_spoiler"]:
        image4_spoiler = dict_msg["image4_spoiler"]
    if "upload_filename" in dict_msg and dict_msg["upload_filename"]:
        upload_filename = dict_msg["upload_filename"]
    if "file_size" in dict_msg and dict_msg["file_size"]:
        file_size = dict_msg["file_size"]
    if "file_amount" in dict_msg and dict_msg["file_amount"]:
        file_amount = dict_msg["file_amount"]
    if "file_url" in dict_msg and dict_msg["file_url"]:
        file_url = dict_msg["file_url"]
    if "file_url_type" in dict_msg and dict_msg["file_url_type"]:
        file_url_type = dict_msg["file_url_type"]
    if "file_upload_settings" in dict_msg and dict_msg["file_upload_settings"]:
        file_upload_settings = dict_msg["file_upload_settings"]
    if "file_extracts_start_base64" in dict_msg and dict_msg[
            "file_extracts_start_base64"] is not None:
        file_extracts_start_base64 = json.loads(
            dict_msg["file_extracts_start_base64"])
    if "file_base64" in dict_msg and dict_msg["file_base64"] is not None:
        try:
            file_decoded = base64.b64decode(dict_msg["file_base64"])
            file_size = len(file_decoded)
        except Exception as err:
            logger.exception("{}: Exception decoding attachments: {}".format(
                message_id[-config.ID_LENGTH:].upper(), err))
    if "file_sha256_hash" in dict_msg and dict_msg["file_sha256_hash"]:
        file_sha256_hash = dict_msg["file_sha256_hash"]
    if "file_enc_cipher" in dict_msg and dict_msg["file_enc_cipher"]:
        file_enc_cipher = dict_msg["file_enc_cipher"]
    if "file_enc_key_bytes" in dict_msg and dict_msg["file_enc_key_bytes"]:
        file_enc_key_bytes = dict_msg["file_enc_key_bytes"]
    if "file_enc_password" in dict_msg and dict_msg["file_enc_password"]:
        file_enc_password = dict_msg["file_enc_password"]
    if "file_order" in dict_msg and dict_msg["file_order"]:
        file_order = dict_msg["file_order"]

    if "nation" in dict_msg and dict_msg["nation"]:
        nation = dict_msg["nation"]
    if "nation_base64" in dict_msg and dict_msg["nation_base64"]:
        nation_base64 = dict_msg["nation_base64"]
    if "nation_name" in dict_msg and dict_msg["nation_name"]:
        nation_name = dict_msg["nation_name"]

    if ((file_amount and file_amount > 4)
            or (file_order and len(file_order) > 4)):
        logger.error(
            "{}: More than 4 files found in message. Deleting.".format(
                message_id[-config.ID_LENGTH:].upper()))
        daemon_com.trash_message(message_id)
        return

    if nation_base64:
        flag_pass = True
        try:
            flag = Image.open(BytesIO(base64.b64decode(nation_base64)))
            flag_width, flag_height = flag.size
            if flag_width > config.FLAG_MAX_WIDTH or flag_height > config.FLAG_MAX_HEIGHT:
                flag_pass = False
                logger.error(
                    "Flag dimensions is too large (max 25x15): {}x{}".format(
                        flag_width, flag_height))
            if len(base64.b64decode(nation_base64)) > config.FLAG_MAX_SIZE:
                flag_pass = False
                logger.error(
                    "Flag file size is too large: {}. Must be less than or equal to 3500 bytes."
                    .format(len(base64.b64decode(nation_base64))))
        except:
            flag_pass = False
            logger.error("Error attempting to open flag image")

        if not nation_name:
            flag_pass = False
            logger.error("{}: Flag name not found".format(
                message_id[-config.ID_LENGTH:].upper()))
        elif len(nation_name) > 64:
            flag_pass = False
            logger.error("{}: Flag name too long: {}".format(
                message_id[-config.ID_LENGTH:].upper(), nation_name))

        if not flag_pass:
            logger.error(
                "{}: Base64 flag didn't pass validation. Deleting.".format(
                    message_id[-config.ID_LENGTH:].upper()))
            daemon_com.trash_message(message_id)
            return

    if file_url or file_decoded:
        save_dir = "{}/{}".format(config.FILE_DIRECTORY, message_id)
        try:
            os.mkdir(save_dir)
        except:
            pass
        saved_file_filename = "{}.zip".format(message_id)
        file_path = "{}/{}".format(config.FILE_DIRECTORY, saved_file_filename)

    if file_url:
        # Create dir to extract files into
        logger.info("{}: Filename on disk: {}".format(
            message_id[-config.ID_LENGTH:].upper(), saved_file_filename))

        if os.path.exists(file_path) and os.path.getsize(file_path) != 0:
            logger.info(
                "{}: Downloaded zip file found. Not attempting to download.".
                format(message_id[-config.ID_LENGTH:].upper()))
            file_size_test = os.path.getsize(file_path)
            file_download_successful = True
            extract_zip(message_id, file_path, save_dir)
        else:
            logger.info("{}: File not found. Attempting to download.".format(
                message_id[-config.ID_LENGTH:].upper()))
            logger.info("{}: Downloading file url: {}".format(
                message_id[-config.ID_LENGTH:].upper(), file_url))

            if upload_filename and file_url_type and file_upload_settings:
                # Pick a download slot to fill (2 slots per domain)
                domain = urlparse(file_url).netloc
                lockfile1 = "/var/lock/upload_{}_1.lock".format(domain)
                lockfile2 = "/var/lock/upload_{}_2.lock".format(domain)

                lf = LF()
                lockfile = random.choice([lockfile1, lockfile2])
                if lf.lock_acquire(lockfile, to=600):
                    try:
                        (file_download_successful, file_size_test,
                         file_amount_test, file_do_not_download,
                         file_sha256_hashes_match, file_progress, media_info,
                         message_steg) = download_and_extract(
                             json_obj['toAddress'], message_id, file_url,
                             file_upload_settings, file_extracts_start_base64,
                             upload_filename, file_path, file_sha256_hash,
                             file_enc_cipher, file_enc_key_bytes,
                             file_enc_password)

                        if file_size_test:
                            file_size = file_size_test

                        if file_amount_test:
                            file_amount = file_amount_test
                    finally:
                        lf.lock_release(lockfile)

        if file_download_successful:
            for dirpath, dirnames, filenames in os.walk(save_dir):
                for f in filenames:
                    fp = os.path.join(dirpath, f)
                    if os.path.islink(fp):  # skip symbolic links
                        continue

                    file_extension = html.escape(
                        os.path.splitext(f)[1].split(".")[-1].lower())
                    if not file_extension:
                        logger.error(
                            "{}: File extension not found. Deleting.".format(
                                message_id[-config.ID_LENGTH:].upper()))
                        daemon_com.trash_message(message_id)
                        return
                    elif len(file_extension) >= config.MAX_FILE_EXT_LENGTH:
                        logger.error(
                            "{}: File extension greater than {} characters. Deleting."
                            .format(message_id[-config.ID_LENGTH:].upper(),
                                    config.MAX_FILE_EXT_LENGTH))
                        daemon_com.trash_message(message_id)
                        return
                    if file_extension in config.FILE_EXTENSIONS_IMAGE:
                        saved_image_thumb_filename = "{}_thumb.{}".format(
                            message_id, file_extension)
                        img_thumb_filename = "{}/{}".format(
                            save_dir, saved_image_thumb_filename)
                        generate_thumbnail(message_id, fp, img_thumb_filename,
                                           file_extension)

    # Bitmessage attachment
    if file_decoded:
        encrypted_zip = "/tmp/{}.zip".format(
            get_random_alphanumeric_string(12,
                                           with_punctuation=False,
                                           with_spaces=False))
        # encrypted_zip_object = BytesIO(file_decoded)
        output_file = open(encrypted_zip, 'wb')
        output_file.write(file_decoded)
        output_file.close()

        if file_enc_cipher == "NONE":
            logger.info("{}: File not encrypted".format(
                message_id[-config.ID_LENGTH:].upper()))
            decrypted_zip = encrypted_zip
        elif file_enc_password:
            # decrypt file
            decrypted_zip = "/tmp/{}.zip".format(
                get_random_alphanumeric_string(12,
                                               with_punctuation=False,
                                               with_spaces=False))
            delete_file(decrypted_zip)  # make sure no file already exists
            logger.info("{}: Decrypting file".format(
                message_id[-config.ID_LENGTH:].upper()))

            try:
                with session_scope(DB_PATH) as new_session:
                    settings = new_session.query(GlobalSettings).first()
                    ret_crypto = crypto_multi_decrypt(
                        file_enc_cipher,
                        file_enc_password + config.PGP_PASSPHRASE_ATTACH,
                        encrypted_zip,
                        decrypted_zip,
                        key_bytes=file_enc_key_bytes,
                        max_size_bytes=settings.max_extract_size * 1024 * 1024)
                    if not ret_crypto:
                        logger.error("{}: Issue decrypting file")
                        return
                    else:
                        logger.info("{}: Finished decrypting file".format(
                            message_id[-config.ID_LENGTH:].upper()))

                    delete_file(encrypted_zip)
                    # z = zipfile.ZipFile(download_path)
                    # z.setpassword(config.PGP_PASSPHRASE_ATTACH.encode())
                    # z.extract(extract_filename, path=extract_path)
            except Exception:
                logger.exception("Error decrypting file")

        # Get the number of files in the zip archive
        try:
            file_amount_test = count_files_in_zip(message_id, decrypted_zip)
        except Exception as err:
            file_amount_test = None
            logger.error("{}: Error checking zip: {}".format(
                message_id[-config.ID_LENGTH:].upper(), err))

        if file_amount_test:
            file_amount = file_amount_test

        if file_amount > config.FILE_ATTACHMENTS_MAX:
            logger.info(
                "{}: Number of attachments ({}) exceed the maximum ({}).".
                format(message_id[-config.ID_LENGTH:].upper(), file_amount,
                       config.FILE_ATTACHMENTS_MAX))
            daemon_com.trash_message(message_id)
            return

        # Check size of zip contents before extraction
        can_extract = True
        with zipfile.ZipFile(decrypted_zip, 'r') as zipObj:
            total_size = 0
            for each_file in zipObj.infolist():
                total_size += each_file.file_size
            logger.info("ZIP contents size: {}".format(total_size))
            with session_scope(DB_PATH) as new_session:
                settings = new_session.query(GlobalSettings).first()
                if (settings.max_extract_size and
                        total_size > settings.max_extract_size * 1024 * 1024):
                    can_extract = False
                    logger.error(
                        "ZIP content size greater than max allowed ({} bytes). "
                        "Not extracting.".format(settings.max_extract_size *
                                                 1024 * 1024))

        if can_extract:
            # Extract zip archive
            extract_path = "{}/{}".format(config.FILE_DIRECTORY, message_id)
            extract_zip(message_id, decrypted_zip, extract_path)
            delete_file(decrypted_zip)  # Secure delete

            errors_files, media_info, message_steg = process_attachments(
                message_id, extract_path)

            if errors_files:
                logger.error(
                    "{}: File extension greater than {} characters. Deleting.".
                    format(message_id[-config.ID_LENGTH:].upper(),
                           config.MAX_FILE_EXT_LENGTH))
                delete_files_recursive(extract_path)
                daemon_com.trash_message(message_id)
                return

    thread_locked = False
    thread_anchored = False
    owner_posting = False
    with session_scope(DB_PATH) as new_session:
        try:
            thread = new_session.query(Threads).filter(
                Threads.thread_hash == thread_id).first()

            if thread:
                admin_cmd = new_session.query(Command).filter(
                    and_(Command.action == "set",
                         Command.action_type == "thread_options",
                         Command.thread_id == thread.thread_hash)).first()
                if admin_cmd:
                    # Check for remote thread lock
                    if (admin_cmd.thread_lock and admin_cmd.thread_lock_ts
                            and timestamp_sent > admin_cmd.thread_lock_ts):
                        thread_locked = "Post timestamp is after remote lock. Deleting."

                    # Check for remote thread anchor
                    if (admin_cmd.thread_anchor and admin_cmd.thread_anchor_ts
                            and timestamp_sent > admin_cmd.thread_anchor_ts):
                        thread_anchored = "Post timestamp is after remote anchor. Not updating thread timestamp."

                # Check for local thread lock
                if thread.locked_local and timestamp_sent > thread.locked_local_ts:
                    thread_locked = "Post timestamp is after local lock. Deleting."

                # Check for local thread anchor
                if thread.anchored_local and timestamp_sent > thread.anchored_local_ts:
                    thread_anchored = "Post timestamp is after local anchor. Not updating thread timestamp."

            if thread_locked:
                chan = new_session.query(Chan).filter(
                    Chan.address == json_obj['toAddress']).first()
                if chan:
                    access = get_access(json_obj['toAddress'])
                    if json_obj['fromAddress'] in access["primary_addresses"]:
                        owner_posting = True
                        logger.error(
                            "{}: Owner posting in locked thread. Allowing.".
                            format(message_id[-config.ID_LENGTH:].upper()))
        except Exception:
            logger.exception("Checking thread lock")

    if thread_locked and not owner_posting:
        logger.info(thread_locked)
        daemon_com.trash_message(message_id)
        return

    with session_scope(DB_PATH) as new_session:
        try:
            chan = new_session.query(Chan).filter(
                Chan.address == json_obj['toAddress']).first()
            chan.last_post_number = chan.last_post_number + 1

            thread = new_session.query(Threads).filter(
                Threads.thread_hash == thread_id).first()

            if not thread and is_op:  # OP received, create new thread
                new_thread = Threads()
                new_thread.thread_hash = thread_id
                new_thread.thread_hash_short = thread_id[-12:]
                new_thread.op_sha256_hash = message_sha256_hash
                if chan:
                    new_thread.chan_id = chan.id
                new_thread.subject = subject
                new_thread.timestamp_sent = timestamp_sent
                new_thread.timestamp_received = int(json_obj['receivedTime'])
                new_session.add(new_thread)

                if timestamp_sent > chan.timestamp_sent:
                    chan.timestamp_sent = timestamp_sent
                if int(json_obj['receivedTime']) > chan.timestamp_received:
                    chan.timestamp_received = int(json_obj['receivedTime'])

                new_session.commit()
                id_thread = new_thread.id

            elif not thread and not is_op:  # Reply received before OP, create thread with OP placeholder
                new_thread = Threads()
                new_thread.thread_hash = thread_id
                new_thread.thread_hash_short = thread_id[-12:]
                new_thread.op_sha256_hash = op_sha256_hash
                if chan:
                    new_thread.chan_id = chan.id
                new_thread.subject = subject
                new_thread.timestamp_sent = timestamp_sent
                new_thread.timestamp_received = int(json_obj['receivedTime'])
                new_session.add(new_thread)

                if timestamp_sent > chan.timestamp_sent:
                    chan.timestamp_sent = timestamp_sent
                if int(json_obj['receivedTime']) > chan.timestamp_received:
                    chan.timestamp_received = int(json_obj['receivedTime'])

                new_session.commit()
                id_thread = new_thread.id

            elif thread and not is_op:  # Reply received after OP, add to current thread
                if thread_anchored:
                    logger.info(thread_anchored)

                if timestamp_sent > thread.timestamp_sent:
                    if not sage and not thread_anchored:
                        thread.timestamp_sent = timestamp_sent
                if int(json_obj['receivedTime']) > thread.timestamp_received:
                    if not sage and thread_anchored:
                        thread.timestamp_received = int(
                            json_obj['receivedTime'])

                if timestamp_sent > chan.timestamp_sent:
                    if not sage and not thread_anchored:
                        chan.timestamp_sent = timestamp_sent
                if int(json_obj['receivedTime']) > chan.timestamp_received:
                    if not sage and not thread_anchored:
                        chan.timestamp_received = int(json_obj['receivedTime'])

                new_session.commit()
                id_thread = thread.id

            elif thread and is_op:
                # Post indicating it is OP but thread already exists
                # Could have received reply before OP
                # Add OP to current thread
                id_thread = thread.id

            lf = LF()
            if lf.lock_acquire(config.LOCKFILE_STORE_POST, to=20):
                try:
                    # Create message
                    new_msg = Messages()
                    new_msg.version = version
                    new_msg.message_id = message_id
                    new_msg.post_id = get_post_id(message_id)
                    new_msg.post_number = chan.last_post_number
                    new_msg.expires_time = get_msg_expires_time(message_id)
                    new_msg.thread_id = id_thread
                    new_msg.address_from = bleach.clean(
                        json_obj['fromAddress'])
                    new_msg.message_sha256_hash = message_sha256_hash
                    new_msg.is_op = is_op
                    if sage:
                        new_msg.sage = sage
                    new_msg.message = message
                    new_msg.subject = subject
                    new_msg.nation = nation
                    new_msg.nation_base64 = nation_base64
                    new_msg.nation_name = nation_name
                    if file_decoded == b"":  # Empty file
                        new_msg.file_decoded = b" "
                    else:
                        new_msg.file_decoded = file_decoded
                    new_msg.file_filename = file_filename
                    new_msg.file_url = file_url
                    new_msg.file_upload_settings = json.dumps(
                        file_upload_settings)
                    new_msg.file_extracts_start_base64 = json.dumps(
                        file_extracts_start_base64)
                    new_msg.file_size = file_size
                    new_msg.file_amount = file_amount
                    new_msg.file_do_not_download = file_do_not_download
                    new_msg.file_progress = file_progress
                    new_msg.file_sha256_hash = file_sha256_hash
                    new_msg.file_enc_cipher = file_enc_cipher
                    new_msg.file_enc_key_bytes = file_enc_key_bytes
                    new_msg.file_enc_password = file_enc_password
                    new_msg.file_sha256_hashes_match = file_sha256_hashes_match
                    new_msg.file_order = json.dumps(file_order)
                    new_msg.file_download_successful = file_download_successful
                    new_msg.upload_filename = upload_filename
                    new_msg.saved_file_filename = saved_file_filename
                    new_msg.saved_image_thumb_filename = saved_image_thumb_filename
                    new_msg.image1_spoiler = image1_spoiler
                    new_msg.image2_spoiler = image2_spoiler
                    new_msg.image3_spoiler = image3_spoiler
                    new_msg.image4_spoiler = image4_spoiler
                    new_msg.timestamp_received = int(json_obj['receivedTime'])
                    new_msg.timestamp_sent = timestamp_sent
                    new_msg.media_info = json.dumps(media_info)
                    new_msg.message_steg = json.dumps(message_steg)
                    new_msg.message_original = json_obj["message"]
                    new_session.add(new_msg)

                    if timestamp_sent > chan.timestamp_sent:
                        chan.timestamp_sent = timestamp_sent
                    if int(json_obj['receivedTime']) > chan.timestamp_received:
                        chan.timestamp_received = int(json_obj['receivedTime'])

                    new_session.commit()

                    message_edit = new_session.query(Messages).filter(
                        Messages.message_id == message_id).first()
                    try:
                        message_edit.popup_html = generate_reply_link_html(
                            message_edit)
                        new_session.commit()
                    except Exception as err:
                        logger.exception(
                            "{}: Couldn't generate popup HTML: {}".format(
                                message_id[-config.ID_LENGTH:].upper(), err))

                    process_message_replies(message_id, message)

                    # Determine if an admin command to delete with comment is present
                    # Replace comment and delete file information
                    commands = new_session.query(Command).filter(
                        and_(Command.action == "delete_comment",
                             Command.action_type == "post",
                             Command.chan_address ==
                             json_obj['toAddress'])).all()
                    for each_cmd in commands:
                        try:
                            options = json.loads(each_cmd.options)
                        except:
                            options = {}
                        if ("delete_comment" in options
                                and "message_id" in options["delete_comment"]
                                and options["delete_comment"]["message_id"]
                                == message_id
                                and "comment" in options["delete_comment"]):

                            if "from_address" in options["delete_comment"]:
                                from_address = options["delete_comment"][
                                    "from_address"]
                            else:
                                from_address = json_obj['fromAddress']

                            # replace comment
                            delete_and_replace_comment(
                                options["delete_comment"]["message_id"],
                                options["delete_comment"]["comment"],
                                from_address=from_address,
                                local_delete=False)

                    # Generate card
                    generate_card(thread_id, force_generate=True)
                except Exception:
                    logger.exception("Saving message to DB")
                finally:
                    time.sleep(config.API_PAUSE)
                    lf.lock_release(config.LOCKFILE_API)

            # Delete message from Bitmessage after parsing and adding to BitChan database
            lf = LF()
            if lf.lock_acquire(config.LOCKFILE_API, to=120):
                try:
                    return_val = api.trashMessage(message_id)
                except Exception as err:
                    logger.error(
                        "{}: Exception during message delete: {}".format(
                            message_id[-config.ID_LENGTH:].upper(), err))
                finally:
                    time.sleep(config.API_PAUSE)
                    lf.lock_release(config.LOCKFILE_API)
        except Exception as err:
            logger.error(
                "{}: Could not write to database. Deleting. Error: {}".format(
                    message_id[-config.ID_LENGTH:].upper(), err))
            logger.exception("1")
            daemon_com.trash_message(message_id)
            return
Exemplo n.º 14
0
def replace_god_song(text, seed, message_id):
    regex = r"(?:\A|\s)(?i)#godsong"
    lines = text.split("\n")
    stichomancy_lf = "/var/lock/stichomancy.lock"
    lf = LF()

    first_match = True
    find_count = 1
    lines_finds = IterFinds(lines, regex)
    for line_index, i in lines_finds:
        for match_index, each_find in enumerate(
                re.finditer(regex, lines[line_index])):
            if find_count > 10:  # Process max of 10 per message
                return "\n".join(lines)
            elif match_index == i:
                match = lines[line_index][each_find.start():each_find.end()]
                start_string = lines[line_index][:each_find.start()]
                end_string = lines[line_index][each_find.end():]
                quote = None
                book_link = None
                if lf.lock_acquire(stichomancy_lf, to=600):
                    try:
                        line_number, quote, book_url, title, author = stichomancy_pull(
                            "{}{}{}".format(seed, line_index, i),
                            select_book_id=10900)

                        if not line_number:
                            continue

                        previous_line = 0

                        for i in replacements_data.bible_books:
                            if i > line_number:
                                break
                            previous_line = i

                        title_str = title
                        if author and "Various" not in author:
                            title_str += " by {}".format(title, author)

                        book_link = '<a class="link" target="_blank" href="{url}">{name}, {title}</a>'.format(
                            url=book_url,
                            name=replacements_data.bible_books[previous_line],
                            title=title_str)
                    finally:
                        lf.lock_release(stichomancy_lf)

                if first_match:  # Only make one God song
                    first_match = False
                    file_path = "{}/{}_god_song.mp3".format(
                        FILE_DIRECTORY, message_id)
                    make_god_song_01(seed=seed, save_path=file_path)
                    audio_rep = '<audio class="volume-75" style="width: 325px" controls>' \
                                '<source src="/files/god_song/{}/god_song.mp3" type="audio/mp3">' \
                                '</audio>'.format(message_id)
                    if quote:
                        middle_string = ' <span class="replace-funcs">{}(</span>{} ' \
                                        '<span class="replace-funcs">{} -{})</span>'.format(
                                            match, audio_rep, quote, book_link)
                    else:
                        middle_string = ' <span class="replace-funcs">{}</span> {}'.format(
                            match, book_link, audio_rep)
                else:  # After God song, only get random Bible quote
                    if quote:
                        middle_string = ' <span class="replace-funcs">{}({} -{})</span>'.format(
                            match, quote, book_link)
                    else:
                        middle_string = ' <span class="replace-funcs">{}</span>'.format(
                            match)

                find_count += 1
                lines[line_index] = start_string + middle_string + end_string

    return "\n".join(lines)
Exemplo n.º 15
0
def block_address(chan_address, block_address, block_type):
    """Block address locally, on single board or across all boards"""
    global_admin, allow_msg = allowed_access(
        check_is_global_admin=True)
    board_list_admin, allow_msg = allowed_access(
        check_is_board_list_admin=True, check_admin_board=chan_address)
    if not global_admin and not board_list_admin:
        return allow_msg

    form_confirm = forms_board.Confirm()
    chan = Chan.query.filter(Chan.address == chan_address).first()

    board = {
        "current_chan": chan,
        "current_thread": None,
    }
    status_msg = {"status_message": []}

    if block_address in daemon_com.get_identities():
        status_msg['status_message'].append("You cannot block your own identity")
        status_msg['status_title'] = "Error"

    elif request.method != 'POST' or not form_confirm.confirm.data:
        return render_template("pages/confirm.html",
                               action="block_address",
                               block_type=block_type,
                               chan=chan,
                               chan_address=chan_address,
                               block_address=block_address)

    elif request.method == 'POST' and form_confirm.confirm.data:
        messages = Messages.query.filter(
            Messages.address_from == block_address).all()

        list_delete_message_ids = []

        for message in messages:
            if block_type == "single_board" and message.thread.chan.address == chan_address:
                list_delete_message_ids.append(message.message_id)
            elif block_type == "global":
                if not global_admin:
                    return allow_msg
                list_delete_message_ids.append(message.message_id)

        lf = LF()
        if lf.lock_acquire(config.LOCKFILE_MSG_PROC, to=60):
            try:
                # First, delete messages from database
                if list_delete_message_ids:
                    for each_id in list_delete_message_ids:
                        delete_post(each_id)
                    daemon_com.signal_generate_post_numbers()

                # Allow messages to be deleted in bitmessage before allowing bitchan to rescan inbox
                time.sleep(1)
            except Exception as err:
                logger.error("Exception while deleting messages: {}".format(err))
            finally:
                lf.lock_release(config.LOCKFILE_MSG_PROC)

        new_cmd = Command()
        new_cmd.do_not_send = True
        new_cmd.action = "block"
        new_cmd.action_type = "block_address"
        new_cmd.options = json.dumps({"block_address": block_address})
        if block_type == "single_board":
            new_cmd.chan_address = chan_address
        elif block_type == "global":
            new_cmd.chan_address = "all"  # global block (all boards)
        new_cmd.save()

        status_msg['status_title'] = "Success"
        status_msg['status_message'].append("Blocked address {}".format(block_address))

    return render_template("pages/alert.html",
                           board=board,
                           status_msg=status_msg)