def main(ctx, fmt, output_dir, export_attachments): """Export all conversations. """ LOGGER.info("Export all conversations into '{output_dir}'.") output_dir = Path(output_dir) output_dir.mkdir(exist_ok=True) if fmt == "sql": db.dump_messages(ctx.obj["config"], output_dir) return messages = db.fetch_messages(ctx.obj["config"], as_dataframe=True) for col in messages.columns: LOGGER.debug(f"Column '{col}' type: {messages[col].dtype}") conv_map = utilities.conversation_mapping(ctx.obj["config"]) for c_id, c_name in conv_map.items(): LOGGER.info(f"Exporting conversation '{c_name}'") conv_dir = output_dir / c_name conv_dir.mkdir(exist_ok=True) if fmt == "csv": messages[messages["conversation_id"] == c_id].to_csv( conv_dir / "messages.csv", index=False ) elif fmt == "json": messages[messages["conversation_id"] == c_id].to_json( conv_dir / "messages.json", orient="records" ) if export_attachments: utilities.export_attachments(ctx.obj["config"], messages)
def main(ctx, show_id, show_message_count): """List all the conversations in Signal. """ LOGGER.debug("Listing all conversations in the database.") count = 0 if show_message_count: messages = db.fetch_messages(ctx.obj["config"]) print(colored("Private conversations:", "white", attrs=["bold"])) conversations = db.fetch_conversations(ctx.obj["config"], conv_type="private") for conv in conversations: count += 1 output = [] output.append(colored("->", "blue")) if show_id: output.append(colored(conv["id"].decode("utf-8"), "green")) output.append( colored(conv["name"] if conv["name"] else conv["profile_name"], "white")) if show_message_count: count = len( [c for c in messages if c["conversation_id"] == conv["id"]]) output.append(colored(f"[{count} messages]")) print(" ".join(output)) print("") print(colored("Group conversations:", "white", attrs=["bold"])) conversations = db.fetch_conversations(ctx.obj["config"], conv_type="group") for conv in conversations: count += 1 output = [] output.append(colored("->", "blue")) if show_id: output.append( colored(base64.b64encode(conv["id"]).decode("utf-8"), "green")) output.append( colored(conv["name"] if conv["name"] else conv["profile_name"], "white")) output.append(f"({len(conv['members'])} members)") if show_message_count: count = len( [c for c in messages if c["conversation_id"] == conv["id"]]) output.append(colored(f"[{count} messages]")) print(" ".join(output)) print("") print(colored(f"{count} conversations in total.", "green"))
def load_messages(): """Load the messages, possibly updating the global variable if needed.""" global LAST_UPDATE global MESSAGES if MESSAGES is None or time.time() - LAST_UPDATE > 60: LOGGER.info("Updating messages...") LAST_UPDATE = time.time() MESSAGES = db.fetch_messages(CONFIG, as_dataframe=True) else: LOGGER.debug("Using pre-fetched messages.") return MESSAGES.copy()
import numpy as np import pandas as pd import plotly.graph_objs as go from dash.dependencies import Input, Output import db import utilities LOGGER = logging.getLogger(__name__) EMOJI_SET = set(emoji.UNICODE_EMOJI) LOGGER.debug("Unpickling configuration") CONFIG = pickle.load(open(".config.pkl", "rb")) LAST_UPDATE = time.time() MESSAGES = db.fetch_messages(CONFIG, as_dataframe=True) CONVS = utilities.conversation_mapping(CONFIG) APP = dash.Dash("signal-statistics") # APP.config["suppress_callback_exceptions"] = True APP.layout = html.Div( [ # Header html.Div( [ html.Span("Signal Conversation Statistics", className="title"), html.Div( [ dcc.Dropdown( id="conversation", options=sorted(
def render_message(): messages = db.fetch_messages(me, you) return render_template('view_message.html', messages=messages)