def main(): env = Env() os.chdir(env.db_dir) db = DB(env) coin = db.coin argc = 1 try: limit = int(sys.argv[argc]) argc += 1 except: limit = 10 for addr in sys.argv[argc:]: print('Address: ', addr) hash168 = coin.address_to_hash168(addr) n = None for n, (tx_hash, height) in enumerate(db.get_history(hash168, limit)): print('History #{:d}: hash: {} height: {:d}'.format( n + 1, bytes(reversed(tx_hash)).hex(), height)) if n is None: print('No history') n = None for n, utxo in enumerate(db.get_utxos(hash168, limit)): print('UTXOs #{:d}: hash: {} pos: {:d} height: {:d} value: {:d}'. format(n + 1, bytes(reversed(utxo.tx_hash)).hex(), utxo.tx_pos, utxo.height, utxo.value)) if n is None: print('No UTXOs') balance = db.get_balance(hash168) print('Balance: {} {}'.format(coin.decimal_value(balance), coin.SHORTNAME))
def upload_apikeys(user): try: apikeys = request.json["entries"] for apikey in apikeys: result = DB("apikeys").collection.update_one( {"name": apikey["name"]}, {"$set": { "apikey": apikey["apikey"] }}, upsert=True, ) # Also updating "plugins" metadata plugins = DB("plugins") plugin = plugins.collection.update_one( {"apikey_names": apikey["name"]}, {"$set": { "apikey_in_ddbb": True }}) return json.dumps(apikeys, default=str) except Exception as e: print(f"[routes/apikeys.upload_apikeys]: {e}") tb1 = traceback.TracebackException.from_exception(e) print("".join(tb1.format())) return jsonify({"error_message": "Error uploading API keys"}), 400
def main(): env = Env() bp = DB(env) coin = env.coin if len(sys.argv) == 1: count_entries(bp.hist_db, bp.utxo_db) return argc = 1 try: limit = int(sys.argv[argc]) argc += 1 except Exception: limit = 10 for addr in sys.argv[argc:]: print('Address: ', addr) hashX = coin.address_to_hashX(addr) for n, (tx_hash, height) in enumerate(bp.get_history(hashX, limit)): print('History #{:d}: hash: {} height: {:d}' .format(n + 1, hash_to_str(tx_hash), height)) n = None for n, utxo in enumerate(bp.get_utxos(hashX, limit)): print('UTXOs #{:d}: hash: {} pos: {:d} height: {:d} value: {:d}' .format(n + 1, hash_to_str(utxo.tx_hash), utxo.tx_pos, utxo.height, utxo.value)) if n is None: print('No UTXOs') balance = bp.get_balance(hashX) print('Balance: {} {}'.format(coin.decimal_value(balance), coin.SHORTNAME))
def save(self): previous = DB("pastebins").collection.find_one( {"paste_key": self.paste_key}) if not previous: if not self.md5: self.not_found = True result = DB("pastebins").collection.insert_one(self.__dict__) return result.inserted_id else: return previous["_id"]
def get_apikeys(user): try: if not user.get("is_admin"): return jsonify({"error_message": "User is not admin"}), 400 results = DB("apikeys").collection.find({}, {"_id": False}) plugins = PluginManager.get_all() plugins = [plugin for plugin in list(plugins) if plugin.get("needs_apikey")] list_results = list(results) for plugin in plugins: apikey_names = plugin.get("apikey_names") apikeys = [] for apikey_name in apikey_names: apikey = {} apikey_value = [ value["apikey"] for value in list_results if value["name"] == apikey_name ] apikey["name"] = apikey_name apikey["value"] = apikey_value[0] if len(apikey_value) > 0 else None apikeys.append(apikey) plugin["apikeys"] = apikeys return dumps(plugins) except Exception as e: print(e) return jsonify({"error_message": "Error getting API keys"}), 400
class IPs: db = DB("ip") @staticmethod def get_by_name(ip_address): if ":" in ip_address: ip_address = ip_address.split(":")[0] ip = IPs.db.collection.find_one({"address": ip_address}) if ip: return IP(ip["_id"]) else: args = { "canonical_name": ip_address, "resource_type": "ip", "address": ip_address, "creation_time": time.time(), "plugins": [], "tags": [], } inserted_one = IPs.db.collection.insert_one(args) return IP(inserted_one.inserted_id) @staticmethod def get_by_id(resource_id): return IP(resource_id)
class URLs: db = DB("url") @staticmethod def get_by_name(url): previous_url = URLs.db.collection.find_one({"full_url": url}) if previous_url: return URL(previous_url["_id"]) else: url_parts = urllib.parse.urlparse(url) args = { "canonical_name": url, "resource_type": "url", "scheme": url_parts.scheme, "netloc": url_parts.netloc, "path": url_parts.path, "params": url_parts.params, "query": url_parts.query, "fragment": url_parts.fragment, "full_url": url, "creation_time": time.time(), "plugins": [], "tags": [], } inserted_one = URLs.db.collection.insert_one(args) return URL(inserted_one.inserted_id) @staticmethod def get_by_id(resource_id): return URL(resource_id)
def get_diff(plugin_name, resource_id, index): try: db = DB(plugin_name) if db: result_set = db.collection.find({ "resource_id": bson.ObjectId(resource_id) }).sort([("timestamp", pymongo.DESCENDING)]) result_set = list(result_set) left = json.dumps(result_set[0]["results"], indent=4, sort_keys=True).split("\n") right = json.dumps(result_set[index]["results"], indent=4, sort_keys=True).split("\n") if left and right: diff = difflib.unified_diff(left, right) return "\n".join(diff) return None except Exception as e: print(f"[PluginManager.get_diff] {e}") tb1 = traceback.TracebackException.from_exception(e) print("".join(tb1.format())) return None
def get_by_name(resource_name, resource_type): """ Lookup database for a resource with name "resource_name" if None is found, then create the resource. """ search = "canonical_name" resource_type = ResourceType.get_type_from_string(resource_type) if resource_type == ResourceType.HASH: search = "hash" db = Resource.collection() result = db.find_one({search: resource_name}) if result: return Resource(result["_id"]) # TODO: Legacy method for old database resources # TODO: Get rid of this legacy method if not result: docs = ["ip", "url", "username", "hash", "email", "domain"] for doc in docs: result = DB(doc).collection.find_one({search: resource_name}) if result: return Resource(result["_id"]) print(f"Tried get_by_name nothing found {result}") return None
class Emails: db = DB("email") @staticmethod def get_by_name(email): existing_email = Emails.db.collection.find_one({"email": email}) if existing_email: return Email(existing_email["_id"]) else: args = { "canonical_name": email, "email": email, "creation_time": time.time(), "domain": email.split("@")[1], "plugins": [], "resource_type": "email", "tags": [], } inserted_one = Emails.db.collection.insert_one(args) return Email(inserted_one.inserted_id) @staticmethod def get_by_id(resource_id): return Email(resource_id)
class Domains: db = DB("domain") @staticmethod def get_by_name(domain_name): domain_exists = Domains.db.collection.find_one({"domain": domain_name}) if domain_exists: return Domain(domain_exists["_id"]) else: args = { "canonical_name": domain_name, "domain": domain_name, "creation_time": time.time(), "plugins": [], "resource_type": "domain", "tags": [], } inserted_one = Domains.db.collection.insert_one(args) return Domain(inserted_one.inserted_id) @staticmethod def get_by_id(resource_id): return Domain(resource_id)
class Projects: db = DB("projects") @staticmethod def create(name, user): # TODO: extend project name validation if name == "": raise ProjectNameException() project = Projects.db.collection.find_one({"name": name}) if project: raise ProjectExistException() result = Projects.db.collection.insert_one( { "name": name, "createdby_ref": bson.ObjectId(user), "creation_date": time.time(), "resource_refs": [], # { resource_id, resource_type } } ) return result.inserted_id @staticmethod def delete(project_id): project_id = bson.ObjectId(project_id) Projects.db.collection.delete_one({"_id": project_id}) @staticmethod def get_project_docs(projects=[], fields=[]): fields_dict = {field: 1 for field in fields} return Projects.db.collection.find({"_id": {"$in": projects}}, fields_dict)
def register_plugins(): """ This function register metadata from enabled plugins upon container startup """ db = DB("plugins") db.collection.delete_many({}) for module in _get_module_names(): module = importlib.import_module(module) if not module.PLUGIN_DISABLE: print(f"registering {module.PLUGIN_NAME}") db.collection.insert_one({ "name": module.PLUGIN_NAME, "is_active": module.PLUGIN_IS_ACTIVE, "description": module.PLUGIN_DESCRIPTION, "autostart": module.PLUGIN_AUTOSTART, "target": [resource.value for resource in module.RESOURCE_TARGET], "needs_apikey": module.PLUGIN_NEEDS_API_KEY, "apikey_in_ddbb": module.API_KEY_IN_DDBB, "apikey_doc": module.API_KEY_DOC, "apikey_names": module.API_KEY_NAMES, })
class Usernames: db = DB("username") @staticmethod def get_by_name(username): existing_username = Usernames.db.collection.find_one( {"username": username}) if existing_username: return Username(existing_username["_id"]) else: args = { "canonical_name": username, "username": username, "creation_time": time.time(), "plugins": [], "resource_type": "username", "tags": [], "sites": [], } inserted_one = Usernames.db.collection.insert_one(args) return Username(inserted_one.inserted_id) @staticmethod def get_by_id(resource_id): return Username(resource_id)
class Hashes: db = DB("hash") @staticmethod def get_by_name(hash_string): hash_string = hash_string.lower() existing_hash = Hashes.db.collection.find_one({"hash": hash_string}) if existing_hash: return Hash(existing_hash["_id"]) else: hash_type = HashType.hash_detection(hash_string) if hash_type == HashType.UNKNOWN: raise Exception(f"Oops. Unknown hash type in Hash.py {hash_string}") args = { "hash": hash_string, "creation_time": time.time(), "hash_type": hash_type.value, "hash_short": hash_string[:8], "canonical_name": hash_string[:8], "resource_type": "hash", "plugins": [], "tags": [], } inserted_one = Hashes.db.collection.insert_one(args) return Hash(inserted_one.inserted_id) @staticmethod def get_by_id(resource_id): return Hash(resource_id)
def upload_apikeys(user): try: apikeys = request.json["entries"] for apikey in apikeys: result = DB("apikeys").collection.find_one( {"name": apikey["name"]}) if not result: DB("apikeys").collection.insert_one({ "name": apikey["name"], "apikey": apikey["apikey"] }) return json.dumps(apikeys, default=str) except Exception as e: print(e) return jsonify({"error_message": "Error getting global tags"}), 400
def to_JSON(self, timestamp_index=0): """ Get the doc from DB and returns a JSON without the ObjectId Client must JSON.parse() it in browser before passing it to Vuex """ # Doc is this resource json itself doc = self.resource # Needed because FE expect it to be there doc["plugins"] = [] # We need how many plugins can deal with this current resource type plugins_names = PluginManager.get_plugins_names_for_resource( self.get_type_value()) # Scan for results in all plugins resource type related for plugin_name in plugins_names: result_cursor = (DB(plugin_name).collection.find({ "resource_id": self.resource_id, "results": { "$exists": True }, }).sort([("timestamp", pymongo.DESCENDING)])) result_cursor = list(result_cursor) if not len(result_cursor) == 0: # Test timestamp index if timestamp_index < 0 or timestamp_index > len( result_cursor) - 1: result = result_cursor[0] else: result = result_cursor[timestamp_index] # Add name of the plugin, because we do not store it in database result["name"] = plugin_name # If this plugin results is a list of external references (case pastebin), load it: _load_external_results(result) # Load all timemachine timestamps timemachine = [] # Add the last LIMIT_OF_TIMEMACHINE_RESULTS timestamps to timemachine for ts in result_cursor: timemachine.append({ "timestamp": ts["timestamp"], "result_status": ts["result_status"], }) # Plug timemachine results in our plugin results result["timemachine"] = timemachine # Plug this plugin results to doc doc["plugins"].append(result) return json.loads(json.dumps(doc, default=str))
def get_plugins_names_for_resource(resource_type_as_string): db = DB("plugins") plugins = db.collection.find({ "target": resource_type_as_string }).sort([("name", pymongo.ASCENDING)]) results = [] for entry in plugins: results.append(entry["name"]) return results
def get_apikeys(user): try: results = DB("apikeys").collection.find({}) results = [result for result in results] return json.dumps(results, default=str) except Exception as e: print(e) return jsonify({"error_message": "Error getting global tags"}), 400
def get_apikeys(user): try: results = DB("apikeys").collection.find({}, {"_id": False}) list_results = list(results) return dumps(list_results) except Exception as e: print(e) return jsonify({"error_message": "Error getting API keys"}), 400
def get_plugin_names(): try: db = DB("plugins") plugins = db.collection.find({"needs_apikey": True}) non_apikeys_fields = [] for plugin in plugins: non_apikeys_fields.extend(plugin["apikey_names"]) db = DB("apikeys") apikeys = [apikey["name"] for apikey in db.collection.find({})] non_apikeys_fields = set(non_apikeys_fields) - set(apikeys) return list(non_apikeys_fields) except Exception as e: print(f"[PluginManager.get_plugin_names] {e}") tb1 = traceback.TracebackException.from_exception(e) print("".join(tb1.format()))
def get_autostart_plugins_for_resource(resource_type_as_string): try: db = DB("plugins") plugins = db.collection.find({ "autostart": True, "target": resource_type_as_string }) return [plugin["name"] for plugin in plugins] except Exception as e: print(f"[PluginManager.get_autostart_plugins_for_resource] {e}") tb1 = traceback.TracebackException.from_exception(e) print("".join(tb1.format()))
def __init__(self, env): self.env = env self.db = DB(env) self.block_cache = BlockCache(env, self.db) self.tasks = [ asyncio.ensure_future(self.block_cache.catch_up()), asyncio.ensure_future(self.block_cache.process_cache()), ] loop = asyncio.get_event_loop() for signame in ('SIGINT', 'SIGTERM'): loop.add_signal_handler(getattr(signal, signame), partial(self.on_signal, signame))
def get_resource_legacy_method(resource_id): """ Lookup the resource_id in all old documents Returns resource and doc name to change global COLLECTION """ print( f"[resource_base.get_resource_legacy_method]: Legacy method called looking for resource {resource_id}" ) docs = ["ip", "url", "username", "hash", "email", "domain"] for doc in docs: collection = DB(doc).collection resource = collection.find_one({"_id": resource_id}) if resource: return (resource, doc) print(f"[resource_base/get_resource_legacy_method]: {resource_id}") return (None, None)
def compact_history(): if sys.version_info < (3, 6): raise RuntimeError('Python >= 3.6 is required to run ElectrumX') environ['DAEMON_URL'] = '' # Avoid Env erroring out env = Env() db = DB(env) assert not db.first_sync history = db.history # Continue where we left off, if interrupted if history.comp_cursor == -1: history.comp_cursor = 0 history.comp_flush_count = max(history.comp_flush_count, 1) limit = 8 * 1000 * 1000 while history.comp_cursor != -1: history._compact_history(limit) # When completed also update the UTXO flush count db.set_flush_count(history.flush_count)
def run_test(db_dir): environ.clear() environ['DB_DIRECTORY'] = db_dir environ['DAEMON_URL'] = '' environ['COIN'] = 'BitcoinCash' env = Env() db = DB(env) # Test abstract compaction check_hashX_compaction(db) # Now test in with random data histories = create_histories(db) check_written(db, histories) compact_history(db) check_written(db, histories)
def get_plugins_for_resource(resource_type_as_string): db = DB("plugins") plugins = db.collection.find({ "target": resource_type_as_string }).sort([("name", pymongo.ASCENDING)]) results = [] for entry in plugins: results.append({ "name": entry["name"], "description": entry["description"], "api_key": entry["needs_apikey"], "api_docs": entry["apikey_doc"], "is_active": entry["is_active"], "apikey_in_ddbb": entry["apikey_in_ddbb"], }) return results
def create_user(user): db = DB("users") ts = time.time() username = user.get("username") password1 = user.get("password1") password2 = user.get("password2") admin = user.get("admin") permissions = user.get("permissions") # Fields check if not (username and password1 and password2): return (False, "Complete required fields please") # Username uniqueness username_exists = db.collection.find_one({"username": username}) if username_exists: return (False, "This username already exists") # Username check if len(username) < 4 or len(username) > 64 or not username.isalnum(): return ( False, "Username must be between 4 and 64 alphanumeric characters") # Password check success, message = check_password(password1, password2) if not success: return (success, message) # Permissions check sucess, message = UsersManager.check_permissions(user) if not success: return (success, message) db.collection.insert_one({ "username": username, "password": hash_password(password1), "admin": admin, "permissions": permissions, "created_at": ts, "last_edit": ts, "disabled": False, "disabled_since": None, }) return (success, "User created")
def auth(): try: username = request.json["data"]["username"] password = request.json["data"]["password"] db = DB("users") cursor = db.collection.find_one({"username": username}) if cursor: password_hash = cursor["password"] if utils.verify_password(password, password_hash): token = tokenizer.generate_auth_token(str(cursor["_id"])) return jsonify({"token": token.decode("utf-8"), "username": username}) else: return jsonify({"error_message": "Bad user or password"}), 401 except Exception as e: tb1 = traceback.TracebackException.from_exception(e) print("".join(tb1.format())) return jsonify({"error_message": "Exception at authentication"}), 401
def authenticate(username, password): db = DB("users") cursor = db.collection.find_one({"username": username}) if cursor: password_hash = cursor["password"] is_admin = cursor.get("admin") or False permissions = cursor.get("permissions") or {} if verify_password(password, password_hash): data = { "_id": str(cursor["_id"]), "is_admin": is_admin, "permissions": permissions, "username": username, } token = tokenizer.generate_auth_token(data) return token return None