diff --git a/docs/Usage.md b/docs/Usage.md index 42a9a860a..45165c87e 100644 --- a/docs/Usage.md +++ b/docs/Usage.md @@ -646,58 +646,70 @@ python nettacker.py --start-api --api-access-log --api-port 8080 --api-debug-mod ![](https://github.com/aman566/DiceGameJS/blob/master/Screencast-from-Tuesday-09-June-2020-02-32-32-IST-_online-video-cutter.com_.gif) # Database -OWASP Nettacker, currently supports two databases: + +OWASP Nettacker, currently supports three databases: + - SQLite - MySQL +- PostgreSQL + The default database is SQLite. You can, however, configure the db to your liking. + ## SQLite configuration -The SQLite database can be configured in `core/config.py` file under the `_database_config()` function. Here is a sample configuration: -``` -return { - "DB": "sqlite", - "DATABASE": _paths()["home_path"] + "/nettacker.db", # This is the location of your db - "USERNAME": "", - "PASSWORD": "", - "HOST": "", - "PORT": "" - } -``` + +The configurations below are for a SQLite wrapper called **APSW** (Another Python SQLite Wrapper). The configurations can be found inside `nettacker/config.py` file under the `DBConfig` class. + + + engine = "sqlite" + name = str(CWD / ".nettacker/data/nettacker.db") + host = "" + port = "" + username = "" + password = "" + ssl_mode = "disable" + journal_mode = "WAL" + synchronous_mode = "NORMAL" + +These are the default and recommended settings. Feel free to play around and change them according to need. To use SQLite database, ensure that the `engine` value is set to `sqlite` and the `name` is the path to your database. The `journal_mode` and `synchronous_mode` are chosen to be optimal for multithreaded I/O operations. + +> Note: You can choose to use a lite wrapper for Sqlite called APSW by setting the `use_apsw_for_sqlite` parameter inside config to True for performance enhancements. + ## MySQL configuration: -The MySQL database can be configured in `core/config.py` file under the `_database_config()` function. Here is a sample configuration: -``` -return { - "DB": "mysql", - "DATABASE": "nettacker", # This is the name of your db - "USERNAME": "username", - "PASSWORD": "password", - "HOST": "localhost or some other host", - "PORT": "3306 or some other custom port" - } + +The MySQL database can be configured in `nettacker/config.py` file under the `DBConfig` class. Here is a sample configuration: + ``` -After this configuration: -1. Open the configuration file of mysql(`/etc/mysql/my.cnf` in case of linux) as a sudo user -2. Add this to the end of the file : -``` -[mysqld] -sql_mode = "STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION" + engine = "mysql" + name = "nettacker" + host = "localhost" + port = 3306 + username = "root" + password = "some-password" + ssl_mode = "disable" + journal_mode = "WAL" + synchronous_mode = "NORMAL" ``` -3. Restart MySQL + +Only the relevant fields will be considered and you don't need to update/change/remove the irrelevant ones (`ssl_mode`, `journal_mode` and `synchronous_mode` aren't relevant in this case). ## Postgres Configuration -The Postgres database can be configured in core/config.py file under the _database_config() function. Here is a sample configuration: -` -return { - "DB": "postgreas", - "DATABASE": "nettacker" # Name of db - "USERNAME": "username", - "PASSWORD": "password", - "HOST": "localhost or some other host", - "PORT": "5432 or some other custom port" - } -` -After this configuration please comment out the following line in database/db.py `connect_args={'check_same_thread': False}` +The Postgres database can be configured in `nettacker/config.py` file under the `DBConfig` class. Here is a sample configuration: + +``` + engine = "postgres" + name = "nettacker" + host = "localhost" + port = 5432 + username = "root" + password = "some-password" + ssl_mode = "disable" + journal_mode = "WAL" + synchronous_mode = "NORMAL" +``` +In this case the irrelevant fields are `journal_mode` and `synchronous_mode`. You don't have to update/change/remove them. +**Note**: If you want encryption, then set `ssl_mode` to `require`. Let me know if you have any more questions. \ No newline at end of file diff --git a/nettacker/config.py b/nettacker/config.py index f573ef515..db2d5de8c 100644 --- a/nettacker/config.py +++ b/nettacker/config.py @@ -82,7 +82,11 @@ class DbConfig(ConfigBase): For sqlite database: fill the name of the DB as sqlite, DATABASE as the name of the db user wants - other details can be left empty + Set the journal_mode (default="WAL") and + synchronous_mode (default="NORMAL"). Rest + of the fields can be left empty + This is the default database: + str(CWD / ".nettacker/data/nettacker.db") For mysql users: fill the ENGINE name of the DB as mysql NAME as the name of the database you want to create @@ -104,6 +108,8 @@ class DbConfig(ConfigBase): username = "" password = "" ssl_mode = "disable" + journal_mode = "WAL" + synchronous_mode = "NORMAL" class PathConfig: @@ -142,6 +148,9 @@ class DefaultSettings(ConfigBase): parallel_module_scan = 1 passwords = None passwords_list = None + use_apsw_for_sqlite = ( + False # Setting to toggle between APSW and SQLAlchemy for sqlite databases + ) ping_before_scan = False ports = None profiles = None @@ -151,6 +160,8 @@ class DefaultSettings(ConfigBase): random_chars=generate_random_token(10), ) retries = 1 + max_retries = 3 + retry_delay = 0.1 scan_ip_range = False scan_subdomains = False selected_modules = None diff --git a/nettacker/core/app.py b/nettacker/core/app.py index 5cd47a98b..939b2208b 100644 --- a/nettacker/core/app.py +++ b/nettacker/core/app.py @@ -158,9 +158,7 @@ def expand_targets(self, scan_id): for target in copy.deepcopy(self.arguments.targets): for row in find_events(target, "subdomain_scan", scan_id): - for sub_domain in json.loads(row.json_event)["response"]["conditions_results"][ - "content" - ]: + for sub_domain in json.loads(row)["response"]["conditions_results"]["content"]: if sub_domain not in self.arguments.targets: self.arguments.targets.append(sub_domain) # icmp_scan diff --git a/nettacker/core/graph.py b/nettacker/core/graph.py index 0c92a604c..747c52489 100644 --- a/nettacker/core/graph.py +++ b/nettacker/core/graph.py @@ -86,7 +86,7 @@ def build_text_table(events): table_headers = ["date", "target", "module_name", "port", "logs"] _table.add_rows([table_headers]) for event in events: - log = merge_logs_to_list(json.loads(event["json_event"]), []) + log = merge_logs_to_list(event, []) _table.add_rows( [ table_headers, @@ -252,7 +252,7 @@ def create_report(options, scan_id): ) index = 1 for event in all_scan_logs: - log_list = merge_logs_to_list(json.loads(event["json_event"]), []) + log_list = merge_logs_to_list(event, []) html_table_content += log_data.table_items.format( event["date"], event["target"], @@ -260,7 +260,7 @@ def create_report(options, scan_id): event["port"], "
".join(log_list) if log_list else "Detected", # event["event"], #log index, - html.escape(event["json_event"]), + html.escape(json.dumps(event)), ) index += 1 html_table_content += ( diff --git a/nettacker/core/lib/base.py b/nettacker/core/lib/base.py index 110e51dc8..2fd12d282 100644 --- a/nettacker/core/lib/base.py +++ b/nettacker/core/lib/base.py @@ -52,7 +52,7 @@ def get_dependent_results_from_database(self, target, module_name, scan_id, even while True: event = find_temp_events(target, module_name, scan_id, event_name) if event: - events.append(json.loads(event.event)["response"]["conditions_results"]) + events.append(json.loads(event)["response"]["conditions_results"]) break time.sleep(0.1) return events diff --git a/nettacker/core/module.py b/nettacker/core/module.py index 17ab60601..fadf7e716 100644 --- a/nettacker/core/module.py +++ b/nettacker/core/module.py @@ -78,7 +78,7 @@ def load(self): if not self.skip_service_discovery and self.module_name not in self.ignored_core_modules: services = {} for service in find_events(self.target, "port_scan", self.scan_id): - service_event = json.loads(service.json_event) + service_event = json.loads(service) port = service_event["port"] protocols = service_event["response"]["conditions_results"].keys() for protocol in protocols: diff --git a/nettacker/core/utils/common.py b/nettacker/core/utils/common.py index 6418bbb90..38a17c1d0 100644 --- a/nettacker/core/utils/common.py +++ b/nettacker/core/utils/common.py @@ -3,6 +3,7 @@ import datetime import hashlib import importlib +import json import math import multiprocessing import random @@ -32,6 +33,10 @@ def replace_dependent_response(log, response_dependent): def merge_logs_to_list(result, log_list=[]): if isinstance(result, dict): + # Doesn't hurt normal operations + if "json_event" in list(result.keys()): + if not isinstance(result["json_event"], dict): + result["json_event"] = json.loads(result["json_event"]) for i in result: if "log" == i: log_list.append(result["log"]) diff --git a/nettacker/database/db.py b/nettacker/database/db.py index 19eda9b2a..d5aa3f48f 100644 --- a/nettacker/database/db.py +++ b/nettacker/database/db.py @@ -1,6 +1,11 @@ import json import time +try: + import apsw +except ImportError: + apsw = None + from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker @@ -11,7 +16,7 @@ from nettacker.database.models import HostsLog, Report, TempEvents config = Config() -log = logger.get_logger() +logger = logger.get_logger() def db_inputs(connection_type): @@ -39,23 +44,45 @@ def create_connection(): """ a function to create connections to db with pessimistic approach - Returns: - connection if success otherwise False + For sqlite, it creates and returns a sqlite connection object + for mysql and postgresql, it returns the connection or False if + connection failed. """ - connection_args = {} + if Config.db.engine.startswith("sqlite") and Config.settings.use_apsw_for_sqlite: + if apsw is None: + raise ImportError("APSW is required for SQLite backend.") + # In case of sqlite, the name parameter is the database path + + try: + DB_PATH = config.db.as_dict()["name"] + connection = apsw.Connection(DB_PATH) + connection.setbusytimeout(int(config.settings.timeout) * 100) + cursor = connection.cursor() + + # Performance enhancing configurations. Put WAL cause that helps with concurrency + cursor.execute(f"PRAGMA journal_mode={Config.db.journal_mode}") + cursor.execute(f"PRAGMA synchronous={Config.db.synchronous_mode}") + + return connection, cursor + except Exception as e: + logger.error(f"Failed to create APSW connection: {e}") + raise - if Config.db.engine.startswith("sqlite"): - connection_args["check_same_thread"] = False + else: + connection_args = {} - db_engine = create_engine( - db_inputs(Config.db.engine), - connect_args=connection_args, - pool_size=50, - pool_pre_ping=True, - ) - Session = sessionmaker(bind=db_engine) + if Config.db.engine.startswith("sqlite"): + connection_args["check_same_thread"] = False + + db_engine = create_engine( + db_inputs(Config.db.engine), + connect_args=connection_args, + pool_size=50, + pool_pre_ping=True, + ) + Session = sessionmaker(bind=db_engine) - return Session() + return Session() def send_submit_query(session): @@ -70,17 +97,34 @@ def send_submit_query(session): Returns: True if submitted success otherwise False """ - try: - for _ in range(1, 100): + if isinstance(session, tuple): + connection, cursor = session + for _ in range(100): try: - session.commit() + connection.execute("COMMIT") return True except Exception: + connection.execute("ROLLBACK") time.sleep(0.1) - except Exception: - log.warn(messages("database_connect_fail")) + finally: + connection.close() + connection.close() + logger.warn(messages("database_connect_fail")) + return False + else: + try: + for _ in range(1, 100): + try: + session.commit() + return True + except Exception: + time.sleep(0.1) + logger.warn(messages("database_connect_fail")) + return False + except Exception: + logger.warn(messages("database_connect_fail")) + return False return False - return False def submit_report_to_db(event): @@ -94,17 +138,44 @@ def submit_report_to_db(event): Returns: return True if submitted otherwise False """ - log.verbose_info(messages("inserting_report_db")) + logger.verbose_info(messages("inserting_report_db")) session = create_connection() - session.add( - Report( - date=event["date"], - scan_unique_id=event["scan_id"], - report_path_filename=event["options"]["report_path_filename"], - options=json.dumps(event["options"]), + + if isinstance(session, tuple): + connection, cursor = session + + try: + cursor.execute("BEGIN") + cursor.execute( + """ + INSERT INTO reports (date, scan_unique_id, report_path_filename, options) + VALUES (?, ?, ?, ?) + """, + ( + str(event["date"]), + event["scan_id"], + event["options"]["report_path_filename"], + json.dumps(event["options"]), + ), + ) + return send_submit_query(session) + except Exception: + cursor.execute("ROLLBACK") + logger.warn("Could not insert report...") + return False + finally: + cursor.close() + connection.close() + else: + session.add( + Report( + date=event["date"], + scan_unique_id=event["scan_id"], + report_path_filename=event["options"]["report_path_filename"], + options=json.dumps(event["options"]), + ) ) - ) - return send_submit_query(session) + return send_submit_query(session) def remove_old_logs(options): @@ -119,19 +190,50 @@ def remove_old_logs(options): True if success otherwise False """ session = create_connection() - session.query(HostsLog).filter( - HostsLog.target == options["target"], - HostsLog.module_name == options["module_name"], - HostsLog.scan_unique_id != options["scan_id"], - HostsLog.scan_unique_id != options["scan_compare_id"], - # Don't remove old logs if they are to be used for the scan reports - ).delete(synchronize_session=False) - return send_submit_query(session) + if isinstance(session, tuple): + connection, cursor = session + + try: + cursor.execute("BEGIN") + cursor.execute( + """ + DELETE FROM scan_events + WHERE target = ? + AND module_name = ? + AND scan_unique_id != ? + AND scan_unique_id != ? + """, + ( + options["target"], + options["module_name"], + options["scan_id"], + options["scan_compare_id"], + ), + ) + return send_submit_query(session) + except Exception: + cursor.execute("ROLLBACK") + logger.warn("Could not remove old logs...") + return False + finally: + cursor.close() + connection.close() + else: + session.query(HostsLog).filter( + HostsLog.target == options["target"], + HostsLog.module_name == options["module_name"], + HostsLog.scan_unique_id != options["scan_id"], + HostsLog.scan_unique_id != options["scan_compare_id"], + # Don't remove old logs if they are to be used for the scan reports + ).delete(synchronize_session=False) + return send_submit_query(session) def submit_logs_to_db(log): """ - this function created to submit new events into database + this function created to submit new events into database. + This requires a little more robust handling in case of + APSW in order to avoid database lock issues. Args: log: log event in JSON type @@ -139,28 +241,83 @@ def submit_logs_to_db(log): Returns: True if success otherwise False """ + if isinstance(log, dict): session = create_connection() - session.add( - HostsLog( - target=log["target"], - date=log["date"], - module_name=log["module_name"], - scan_unique_id=log["scan_id"], - port=json.dumps(log["port"]), - event=json.dumps(log["event"]), - json_event=json.dumps(log["json_event"]), + if isinstance(session, tuple): + connection, cursor = session + try: + for _ in range(Config.settings.max_retries): + try: + if not connection.in_transaction: + connection.execute("BEGIN") + cursor.execute( + """ + INSERT INTO scan_events (target, date, module_name, scan_unique_id, port, event, json_event) + VALUES (?, ?, ?, ?, ?, ?, ?) + """, + ( + log["target"], + str(log["date"]), + log["module_name"], + log["scan_id"], + json.dumps(log["port"]), + json.dumps(log["event"]), + json.dumps(log["json_event"]), + ), + ) + return send_submit_query(session) + + except apsw.BusyError as e: + if "database is locked" in str(e).lower(): + logger.warn( + f"[Retry {_ + 1}/{Config.settings.max_retries}] Database is locked. Retrying..." + ) + if connection.in_transaction: + connection.execute("ROLLBACK") + time.sleep(Config.settings.retry_delay) + continue + else: + if connection.in_transaction: + connection.execute("ROLLBACK") + return False + except Exception: + try: + if connection.in_transaction: + connection.execute("ROLLBACK") + except Exception: + pass + return False + # All retires exhausted but we want to continue operation + logger.warn("All retries exhausted. Skipping this log.") + return True + finally: + cursor.close() + connection.close() + + else: + session.add( + HostsLog( + target=log["target"], + date=log["date"], + module_name=log["module_name"], + scan_unique_id=log["scan_id"], + port=json.dumps(log["port"]), + event=json.dumps(log["event"]), + json_event=json.dumps(log["json_event"]), + ) ) - ) - return send_submit_query(session) + return send_submit_query(session) else: - log.warn(messages("invalid_json_type_to_db").format(log)) + logger.warn(messages("invalid_json_type_to_db").format(log)) return False def submit_temp_logs_to_db(log): """ - this function created to submit new events into database + this function created to submit new events into database. + This requires a little more robust handling in case of + APSW in order to avoid database lock issues. Args: log: log event in JSON type @@ -170,21 +327,79 @@ def submit_temp_logs_to_db(log): """ if isinstance(log, dict): session = create_connection() - session.add( - TempEvents( - target=log["target"], - date=log["date"], - module_name=log["module_name"], - scan_unique_id=log["scan_id"], - event_name=log["event_name"], - port=json.dumps(log["port"]), - event=json.dumps(log["event"]), - data=json.dumps(log["data"]), + if isinstance(session, tuple): + connection, cursor = session + + try: + for _ in range(Config.settings.max_retries): + try: + if not connection.in_transaction: + cursor.execute("BEGIN") + cursor.execute( + """ + INSERT INTO temp_events (target, date, module_name, scan_unique_id, event_name, port, event, data) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + log["target"], + str(log["date"]), + log["module_name"], + log["scan_id"], + log["event_name"], + json.dumps(log["port"]), + json.dumps(log["event"]), + json.dumps(log["data"]), + ), + ) + return send_submit_query(session) + except apsw.BusyError as e: + if "database is locked" in str(e).lower(): + logger.warn( + f"[Retry {_ + 1}/{Config.settings.max_retries}] Database is locked. Retrying..." + ) + try: + if connection.in_transaction: + connection.execute("ROLLBACK") + except Exception: + pass + time.sleep(Config.settings.retry_delay) + continue + else: + try: + if connection.in_transaction: + connection.execute("ROLLBACK") + except Exception: + pass + return False + except Exception: + try: + if connection.in_transaction: + connection.execute("ROLLBACK") + except Exception: + pass + return False + # All retires exhausted but we want to continue operation + logger.warn("All retries exhausted. Skipping this log.") + return True + finally: + cursor.close() + connection.close() + else: + session.add( + TempEvents( + target=log["target"], + date=log["date"], + module_name=log["module_name"], + scan_unique_id=log["scan_id"], + event_name=log["event_name"], + port=json.dumps(log["port"]), + event=json.dumps(log["event"]), + data=json.dumps(log["data"]), + ) ) - ) - return send_submit_query(session) + return send_submit_query(session) else: - log.warn(messages("invalid_json_type_to_db").format(log)) + logger.warn(messages("invalid_json_type_to_db").format(log)) return False @@ -202,25 +417,42 @@ def find_temp_events(target, module_name, scan_id, event_name): an array with JSON events or an empty array """ session = create_connection() - try: - for _ in range(1, 100): - try: - return ( - session.query(TempEvents) - .filter( - TempEvents.target == target, - TempEvents.module_name == module_name, - TempEvents.scan_unique_id == scan_id, - TempEvents.event_name == event_name, - ) - .first() - ) - except Exception: - time.sleep(0.1) - except Exception: - log.warn(messages("database_connect_fail")) - return False - return False + if isinstance(session, tuple): + connection, cursor = session + try: + cursor.execute( + """ + SELECT event + FROM temp_events + WHERE target = ? AND module_name = ? AND scan_unique_id = ? AND event_name = ? + LIMIT 1 + """, + (target, module_name, scan_id, event_name), + ) + + row = cursor.fetchone() + cursor.close() + connection.close() + if row: + return row[0] + return [] + except Exception: + logger.warn(messages("database_connect_fail")) + return [] + return [] + else: + result = ( + session.query(TempEvents) + .filter( + TempEvents.target == target, + TempEvents.module_name == module_name, + TempEvents.scan_unique_id == scan_id, + TempEvents.event_name == event_name, + ) + .first() + ) + + return result.event if result else [] def find_events(target, module_name, scan_id): @@ -236,15 +468,38 @@ def find_events(target, module_name, scan_id): an array with JSON events or an empty array """ session = create_connection() - return ( - session.query(HostsLog) - .filter( - HostsLog.target == target, - HostsLog.module_name == module_name, - HostsLog.scan_unique_id == scan_id, - ) - .all() - ) + if isinstance(session, tuple): + connection, cursor = session + + try: + cursor.execute( + """ + SELECT json_event FROM scan_events + WHERE target = ? AND module_name = ? and scan_unique_id = ? + """, + (target, module_name, scan_id), + ) + + rows = cursor.fetchall() + cursor.close() + connection.close() + if rows: + return [json.dumps((json.loads(row[0]))) for row in rows] + return [] + except Exception: + logger.warn("Database query failed...") + return [] + else: + return [ + row.json_event + for row in session.query(HostsLog) + .filter( + HostsLog.target == target, + HostsLog.module_name == module_name, + HostsLog.scan_unique_id == scan_id, + ) + .all() + ] def select_reports(page): @@ -261,22 +516,56 @@ def select_reports(page): """ selected = [] session = create_connection() - try: - search_data = ( - session.query(Report).order_by(Report.id.desc()).offset((page * 10) - 10).limit(10) - ) - for data in search_data: - tmp = { - "id": data.id, - "date": data.date, - "scan_id": data.scan_unique_id, - "report_path_filename": data.report_path_filename, - "options": json.loads(data.options), - } - selected.append(tmp) - except Exception: - return structure(status="error", msg="database error!") - return selected + if isinstance(session, tuple): + connection, cursor = session + offset = (page - 1) * 10 + + try: + cursor.execute( + """ + SELECT id, date, scan_unique_id, report_path_filename, options + FROM reports + ORDER BY id DESC + LIMIT 10 OFFSET ? + """, + (offset,), + ) + + rows = cursor.fetchall() + + cursor.close() + connection.close() + for row in rows: + tmp = { + "id": row[0], + "date": str(row[1]), + "scan_id": row[2], + "report_path_filename": row[3], + "options": json.loads(row[4]), + } + selected.append(tmp) + return selected + + except Exception: + logger.warn("Could not retrieve report...") + return structure(status="error", msg="database error!") + else: + try: + search_data = ( + session.query(Report).order_by(Report.id.desc()).offset((page * 10) - 10).limit(10) + ) + for data in search_data: + tmp = { + "id": data.id, + "date": data.date, + "scan_id": data.scan_unique_id, + "report_path_filename": data.report_path_filename, + "options": json.loads(data.options), + } + selected.append(tmp) + except Exception: + return structure(status="error", msg="database error!") + return selected def get_scan_result(id): @@ -290,9 +579,38 @@ def get_scan_result(id): result file content (TEXT, HTML, JSON) if success otherwise and error in JSON type. """ session = create_connection() - filename = session.query(Report).filter_by(id=id).first().report_path_filename + if isinstance(session, tuple): + connection, cursor = session + cursor.execute( + """ + SELECT report_path_filename from reports + WHERE id = ? + """, + (id,), + ) + + row = cursor.fetchone() + cursor.close() + connection.close() + if row: + filename = row[0] + try: + return filename, open(str(filename), "rb").read() + except IOError as e: + logger.error(f"Failed to read report file: {e}") + return None + else: + return structure(status="error", msg="database error!") + else: + report = session.query(Report).filter_by(id=id).first() + if not report: + return None - return filename, open(str(filename), "rb").read() + try: + return report.report_path_filename, open(str(report.report_path_filename), "rb").read() + except IOError as e: + logger.error(f"Failed to read report file: {e}") + return None def last_host_logs(page): @@ -307,42 +625,116 @@ def last_host_logs(page): an array of events in JSON type if success otherwise an error in JSON type """ session = create_connection() - hosts = [ - { - "target": host.target, - "info": { - "module_name": [ - _.module_name - for _ in session.query(HostsLog) + if isinstance(session, tuple): + connection, cursor = session + try: + cursor.execute( + """ + SELECT DISTINCT target + FROM scan_events + ORDER BY id DESC + LIMIT 10 OFFSET ? + """, + [(page - 1) * 10], + ) + targets = cursor.fetchall() + + if not targets: + return structure(status="finished", msg="No more search results") + + hosts = [] + + for (target,) in targets: + cursor.execute( + """ + SELECT DISTINCT module_name + FROM scan_events + WHERE target = ? + """, + [target], + ) + module_names = [row[0] for row in cursor.fetchall()] + + cursor.execute( + """ + SELECT date + FROM scan_events + WHERE target = ? + ORDER BY id DESC + LIMIT 1 + """, + [target], + ) + latest_date = cursor.fetchone() + latest_date = latest_date[0] if latest_date else None + + cursor.execute( + """ + SELECT event + FROM scan_events + WHERE target = ? + """, + [target], + ) + events = [row[0] for row in cursor.fetchall()] + + hosts.append( + { + "target": target, + "info": { + "module_name": module_names, + "date": latest_date, + "events": events, + }, + } + ) + cursor.close() + connection.close() + return hosts + + except Exception: + logger.warn("Database query failed...") + return structure(status="error", msg="Database error!") + + else: + hosts = [ + { + "target": host.target, + "info": { + "module_name": [ + _.module_name + for _ in session.query(HostsLog) + .filter(HostsLog.target == host.target) + .group_by(HostsLog.module_name) + .all() + ], + "date": session.query(HostsLog) .filter(HostsLog.target == host.target) - .group_by(HostsLog.module_name) - .all() - ], - "date": session.query(HostsLog) - .filter(HostsLog.target == host.target) - .order_by(HostsLog.id.desc()) - .first() - .date, - # "options": [ # unnecessary data? - # _.options for _ in session.query(HostsLog).filter( - # HostsLog.target == host.target - # ).all() - # ], - "events": [ - _.event - for _ in session.query(HostsLog).filter(HostsLog.target == host.target).all() - ], - }, - } - for host in session.query(HostsLog) - .group_by(HostsLog.target) - .order_by(HostsLog.id.desc()) - .offset((page * 10) - 10) - .limit(10) - ] - if len(hosts) == 0: - return structure(status="finished", msg="No more search results") - return hosts + .order_by(HostsLog.id.desc()) + .first() + .date, + # "options": [ # unnecessary data? + # _.options for _ in session.query(HostsLog).filter( + # HostsLog.target == host.target + # ).all() + # ], + "events": [ + _.event + for _ in session.query(HostsLog) + .filter(HostsLog.target == host.target) + .all() + ], + }, + } + for host in session.query(HostsLog) + .group_by(HostsLog.target) + .order_by(HostsLog.id.desc()) + .offset((page * 10) - 10) + .limit(10) + ] + if len(hosts) == 0: + return structure(status="finished", msg="No more search results") + return hosts def get_logs_by_scan_id(scan_id): @@ -356,18 +748,48 @@ def get_logs_by_scan_id(scan_id): an array with JSON events or an empty array """ session = create_connection() - return [ - { - "scan_id": scan_id, - "target": log.target, - "module_name": log.module_name, - "date": str(log.date), - "port": json.loads(log.port), - "event": json.loads(log.event), - "json_event": log.json_event, - } - for log in session.query(HostsLog).filter(HostsLog.scan_unique_id == scan_id).all() - ] + + if isinstance(session, tuple): + connection, cursor = session + + cursor.execute( + """ + SELECT scan_unique_id, target, module_name, date, port, event, json_event + from scan_events + WHERE scan_unique_id = ? + """, + (scan_id,), # We have to put this as an indexed element + ) + rows = cursor.fetchall() + + cursor.close() + connection.close() + return [ + { + "scan_id": row[0], + "target": row[1], + "module_name": row[2], + "date": str(row[3]), + "port": json.loads(row[4]), + "event": json.loads(row[5]), + "json_event": json.loads(row[6]) if row[6] else {}, + } + for row in rows + ] + + else: + return [ + { + "scan_id": scan_id, + "target": log.target, + "module_name": log.module_name, + "date": str(log.date), + "port": json.loads(log.port), + "event": json.loads(log.event), + "json_event": log.json_event, + } + for log in session.query(HostsLog).filter(HostsLog.scan_unique_id == scan_id).all() + ] def get_options_by_scan_id(scan_id): @@ -379,10 +801,27 @@ def get_options_by_scan_id(scan_id): an array with a dict with stored options or an empty array """ session = create_connection() - return [ - {"options": log.options} - for log in session.query(Report).filter(Report.scan_unique_id == scan_id).all() - ] + if isinstance(session, tuple): + connection, cursor = session + + cursor.execute( + """ + SELECT options from reports + WHERE scan_unique_id = ? + """, + (scan_id,), + ) + rows = cursor.fetchall() + cursor.close() + connection.close() + if rows: + return [{"options": row[0]} for row in rows] + + else: + return [ + {"options": log.options} + for log in session.query(Report).filter(Report.scan_unique_id == scan_id).all() + ] def logs_to_report_json(target): @@ -397,18 +836,46 @@ def logs_to_report_json(target): """ try: session = create_connection() - return_logs = [] - logs = session.query(HostsLog).filter(HostsLog.target == target) - for log in logs: - data = { - "scan_id": log.scan_unique_id, - "target": log.target, - "port": json.loads(log.port), - "event": json.loads(log.event), - "json_event": json.loads(log.json_event), - } - return_logs.append(data) - return return_logs + if isinstance(session, tuple): + connection, cursor = session + return_logs = [] + + cursor.execute( + """ + SELECT scan_unique_id, target, port, event, json_event + FROM scan_events WHERE target = ? + """, + (target,), + ) + rows = cursor.fetchall() + cursor.close() + connection.close() + if rows: + for log in rows: + data = { + "scan_id": log[0], + "target": log[1], + "port": json.loads(log[2]), + "event": json.loads(log[3]), + "json_event": json.loads(log[4]), + } + return_logs.append(data) + return return_logs + + else: + return_logs = [] + logs = session.query(HostsLog).filter(HostsLog.target == target) + for log in logs: + data = { + "scan_id": log.scan_unique_id, + "target": log.target, + "port": json.loads(log.port), + "event": json.loads(log.event), + "json_event": json.loads(log.json_event), + } + return_logs.append(data) + return return_logs + except Exception: return [] @@ -427,45 +894,100 @@ def logs_to_report_html(target): from nettacker.lib.html_log import log_data session = create_connection() - logs = [ - { - "date": log.date, - "target": log.target, - "module_name": log.module_name, - "scan_id": log.scan_unique_id, - "port": log.port, - "event": log.event, - "json_event": log.json_event, - } - for log in session.query(HostsLog).filter(HostsLog.target == target).all() - ] - html_graph = build_graph("d3_tree_v2_graph", logs) - - html_content = log_data.table_title.format( - html_graph, - log_data.css_1, - "date", - "target", - "module_name", - "scan_id", - "port", - "event", - "json_event", - ) - for event in logs: - html_content += log_data.table_items.format( - event["date"], - event["target"], - event["module_name"], - event["scan_id"], - event["port"], - event["event"], - event["json_event"], + if isinstance(session, tuple): + connection, cursor = session + cursor.execute( + """ + SELECT date, target, module_name, scan_unique_id, port, event, json_event + FROM scan_events + WHERE target = ? + """, + (target,), + ) + + rows = cursor.fetchall() + cursor.close() + connection.close() + logs = [ + { + "date": log[0], + "target": log[1], + "module_name": log[2], + "scan_id": log[3], + "port": log[4], + "event": log[5], + "json_event": log[6], + } + for log in rows + ] + + html_graph = build_graph("d3_tree_v2_graph", logs) + + html_content = log_data.table_title.format( + html_graph, + log_data.css_1, + "date", + "target", + "module_name", + "scan_id", + "port", + "event", + "json_event", + ) + for event in logs: + html_content += log_data.table_items.format( + event["date"], + event["target"], + event["module_name"], + event["scan_id"], + event["port"], + event["event"], + event["json_event"], + ) + html_content += ( + log_data.table_end + '" ) - html_content += ( - log_data.table_end + '" - ) - return html_content + return html_content + else: + logs = [ + { + "date": log.date, + "target": log.target, + "module_name": log.module_name, + "scan_id": log.scan_unique_id, + "port": log.port, + "event": log.event, + "json_event": log.json_event, + } + for log in session.query(HostsLog).filter(HostsLog.target == target).all() + ] + html_graph = build_graph("d3_tree_v2_graph", logs) + + html_content = log_data.table_title.format( + html_graph, + log_data.css_1, + "date", + "target", + "module_name", + "scan_id", + "port", + "event", + "json_event", + ) + for event in logs: + html_content += log_data.table_items.format( + event["date"], + event["target"], + event["module_name"], + event["scan_id"], + event["port"], + event["event"], + event["json_event"], + ) + html_content += ( + log_data.table_end + '" + ) + return html_content def search_logs(page, query): @@ -480,72 +1002,154 @@ def search_logs(page, query): Returns: an array with JSON structure of founded events or an empty array """ - session = create_connection() selected = [] - try: - for host in ( - session.query(HostsLog) - .filter( - (HostsLog.target.like("%" + str(query) + "%")) - | (HostsLog.date.like("%" + str(query) + "%")) - | (HostsLog.module_name.like("%" + str(query) + "%")) - | (HostsLog.port.like("%" + str(query) + "%")) - | (HostsLog.event.like("%" + str(query) + "%")) - | (HostsLog.scan_unique_id.like("%" + str(query) + "%")) + session = create_connection() + if isinstance(session, tuple): + connection, cursor = session + try: + # Fetch targets matching the query + cursor.execute( + """ + SELECT DISTINCT target FROM scan_events + WHERE target LIKE ? OR date LIKE ? OR module_name LIKE ? + OR port LIKE ? OR event LIKE ? OR scan_unique_id LIKE ? + ORDER BY id DESC + LIMIT 10 OFFSET ? + """, + ( + f"%{query}%", + f"%{query}%", + f"%{query}%", + f"%{query}%", + f"%{query}%", + f"%{query}%", + (page * 10) - 10, + ), ) - .group_by(HostsLog.target) - .order_by(HostsLog.id.desc()) - .offset((page * 10) - 10) - .limit(10) - ): - for data in ( + targets = cursor.fetchall() + for target_row in targets: + target = target_row[0] + # Fetch data for each target grouped by key fields + cursor.execute( + """ + SELECT date, module_name, port, event, json_event FROM scan_events + WHERE target = ? + GROUP BY module_name, port, scan_unique_id, event + ORDER BY id DESC + """, + (target,), + ) + results = cursor.fetchall() + + tmp = { + "target": target, + "info": { + "module_name": [], + "port": [], + "date": [], + "event": [], + "json_event": [], + }, + } + + for data in results: + date, module_name, port, event, json_event = data + if module_name not in tmp["info"]["module_name"]: + tmp["info"]["module_name"].append(module_name) + if date not in tmp["info"]["date"]: + tmp["info"]["date"].append(date) + parsed_port = json.loads(port) + if parsed_port not in tmp["info"]["port"]: + tmp["info"]["port"].append(parsed_port) + parsed_event = json.loads(event) + if parsed_event not in tmp["info"]["event"]: + tmp["info"]["event"].append(parsed_event) + parsed_json_event = json.loads(json_event) + if parsed_json_event not in tmp["info"]["json_event"]: + tmp["info"]["json_event"].append(parsed_json_event) + + selected.append(tmp) + cursor.close() + connection.close() + + except Exception: + try: + cursor.close() + connection.close() + except Exception: + pass + return structure(status="error", msg="database error!") + + if len(selected) == 0: + return structure(status="finished", msg="No more search results") + return selected + else: + try: + for host in ( session.query(HostsLog) - .filter(HostsLog.target == str(host.target)) - .group_by( - HostsLog.module_name, - HostsLog.port, - HostsLog.scan_unique_id, - HostsLog.event, + .filter( + (HostsLog.target.like("%" + str(query) + "%")) + | (HostsLog.date.like("%" + str(query) + "%")) + | (HostsLog.module_name.like("%" + str(query) + "%")) + | (HostsLog.port.like("%" + str(query) + "%")) + | (HostsLog.event.like("%" + str(query) + "%")) + | (HostsLog.scan_unique_id.like("%" + str(query) + "%")) ) + .group_by(HostsLog.target) .order_by(HostsLog.id.desc()) - .all() + .offset((page * 10) - 10) + .limit(10) ): - n = 0 - capture = None - for selected_data in selected: - if selected_data["target"] == host.target: - capture = n - n += 1 - if capture is None: - tmp = { - "target": data.target, - "info": { - "module_name": [], - "port": [], - "date": [], - "event": [], - "json_event": [], - }, - } - selected.append(tmp) + for data in ( + session.query(HostsLog) + .filter(HostsLog.target == str(host.target)) + .group_by( + HostsLog.module_name, + HostsLog.port, + HostsLog.scan_unique_id, + HostsLog.event, + ) + .order_by(HostsLog.id.desc()) + .all() + ): n = 0 + capture = None for selected_data in selected: if selected_data["target"] == host.target: capture = n n += 1 - if data.target == selected[capture]["target"]: - if data.module_name not in selected[capture]["info"]["module_name"]: - selected[capture]["info"]["module_name"].append(data.module_name) - if data.date not in selected[capture]["info"]["date"]: - selected[capture]["info"]["date"].append(data.date) - if data.port not in selected[capture]["info"]["port"]: - selected[capture]["info"]["port"].append(json.loads(data.port)) - if data.event not in selected[capture]["info"]["event"]: - selected[capture]["info"]["event"].append(json.loads(data.event)) - if data.json_event not in selected[capture]["info"]["json_event"]: - selected[capture]["info"]["json_event"].append(json.loads(data.json_event)) - except Exception: - return structure(status="error", msg="database error!") - if len(selected) == 0: - return structure(status="finished", msg="No more search results") - return selected + if capture is None: + tmp = { + "target": data.target, + "info": { + "module_name": [], + "port": [], + "date": [], + "event": [], + "json_event": [], + }, + } + selected.append(tmp) + n = 0 + for selected_data in selected: + if selected_data["target"] == host.target: + capture = n + n += 1 + if data.target == selected[capture]["target"]: + if data.module_name not in selected[capture]["info"]["module_name"]: + selected[capture]["info"]["module_name"].append(data.module_name) + if data.date not in selected[capture]["info"]["date"]: + selected[capture]["info"]["date"].append(data.date) + if data.port not in selected[capture]["info"]["port"]: + selected[capture]["info"]["port"].append(json.loads(data.port)) + if data.event not in selected[capture]["info"]["event"]: + selected[capture]["info"]["event"].append(json.loads(data.event)) + if data.json_event not in selected[capture]["info"]["json_event"]: + selected[capture]["info"]["json_event"].append( + json.loads(data.json_event) + ) + except Exception: + return structure(status="error", msg="database error!") + if len(selected) == 0: + return structure(status="finished", msg="No more search results") + return selected diff --git a/poetry.lock b/poetry.lock index 0dae0b991..3b553d8a1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -140,6 +140,67 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "apsw" +version = "3.50.0.0" +description = "Another Python SQLite Wrapper" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "apsw-3.50.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6824df28649514c0efa401ec93d23f44a984a089a6e5d404df90ecd657ea290"}, + {file = "apsw-3.50.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1fd2574eb5cbd63603f37a106d41288c3c6d5eb432278c0fe625014d4c15176"}, + {file = "apsw-3.50.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f7a41dacb3011db2bb0b8b099c1cf7e926590ae6bacb59c0c849dd30d4046db"}, + {file = "apsw-3.50.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2568626790104dafb707c40e4b7c2abe41ba555d4590a7d94460cedee6d7ae"}, + {file = "apsw-3.50.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d176e03b54441b0d7e20d435b655dbf358bbfb15d6b17dd5a8432f04ce9f9bf1"}, + {file = "apsw-3.50.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a242a4b5000b2f1c43374b5a7998e8a87202d3b556eb56f269fbac014d2c294e"}, + {file = "apsw-3.50.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9ee61e4c87d23916e8af2256a99df814f8c8367ce51b26628a6e6cb85f956923"}, + {file = "apsw-3.50.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8f33e04eeab64804defdbf1614b586d9e1d87769a09b7f79cd68c961142682a9"}, + {file = "apsw-3.50.0.0-cp310-cp310-win32.whl", hash = "sha256:174dc62afdbf75800b8579ad536e2e189f6b4e1b92ae2e3dbb9d5f583260d6c5"}, + {file = "apsw-3.50.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5c2fef8008376d42603050b1b9772c61545ede1e8dca3824c948eaafc3e7b2ef"}, + {file = "apsw-3.50.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0b05b9d510de3371ec748e9cd1e906bf14ef61f1cd88775358bf3e7a508bac93"}, + {file = "apsw-3.50.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4abac5dd66bdae85def74f78d66c6d28ed9a1e535b31af38a4d474a6095a444"}, + {file = "apsw-3.50.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7167a3b10c0065eebba1320f333b825a0faff9defc355af3d3519272e7ccb931"}, + {file = "apsw-3.50.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50495f4108084ee24a904c37b902d57530ac4f19cd0918c9af3595febd1bd205"}, + {file = "apsw-3.50.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:130878423946e1595d3cc4aa1f202a0bec4ab64826a9526abb9bbc4c28ed61f9"}, + {file = "apsw-3.50.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f843cf8306ebc1117000c2c09d5abd71b53d040212a01d6e4d0f6891ce666a21"}, + {file = "apsw-3.50.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:91b4d6f1964772b21904833a57971ea01a7149dbaa91792a60d2878c58dfbb1c"}, + {file = "apsw-3.50.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:358ff92550b24cb48a5c2d728ff54ac5627c97d93b632ff718b3d89bd9e63544"}, + {file = "apsw-3.50.0.0-cp311-cp311-win32.whl", hash = "sha256:5649e4ef077bd84ef521a09342048c9b86b17d3bec2a0d26e1e1e28be7fa6772"}, + {file = "apsw-3.50.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:6fb61cffb7aa1a29dfd18179aa9a4eea951c467750b4742e6bf6c69fdaee326c"}, + {file = "apsw-3.50.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0e9f74022b707e4b3e159dc7f29fd03b6f3a526544d71486e1a76ee14d15d940"}, + {file = "apsw-3.50.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:118a06db94c7460bd1b9311cb50298b9b7ebb079f71f3a934e79fc5106981255"}, + {file = "apsw-3.50.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c412050878e9dc70b1ba27da8756a18d6106f13428d185b8d05652c450152d8"}, + {file = "apsw-3.50.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6a77ac6987547ee5a64a477c9d0ba54f89c13068d4661567fc9b8a46f3d6c8a"}, + {file = "apsw-3.50.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1d91b61490c6746cf60782be15437727023a221f528dd8d834bf37925670fc8"}, + {file = "apsw-3.50.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:908e01da909c2f4a24733e37c34ecfdb62ad6d06edcd0a924a9f397a9d878195"}, + {file = "apsw-3.50.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5a379742e773be63b56ee32273eb2a67f63d2076747f967f59a4c35a6f7a0eee"}, + {file = "apsw-3.50.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:014da137aa6f1f1bf31e38b300f68baaa3eb600ddd27aedd9cfbb7fa25d5a3ac"}, + {file = "apsw-3.50.0.0-cp312-cp312-win32.whl", hash = "sha256:b80661bc26d68150ad1ee9438f535a6bd1a287b22ceb06e39f4a560691d61348"}, + {file = "apsw-3.50.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:29e244db28833b0657ba212a598733a2fc3be0b8daea36d389241a91833fdb5c"}, + {file = "apsw-3.50.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:42246a2bd568f7e87f63d4468cced6243914841e61f985ace2c8d903b97bb253"}, + {file = "apsw-3.50.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b79d5913faf188e3968febfe7c0b112290f5f4e8fe0dd100ffb2eda063ef1495"}, + {file = "apsw-3.50.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b70e6599c1aa56558eb1058446d9d313a38042040d137c6f01919d18aac4922"}, + {file = "apsw-3.50.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320b621e96783af02a4276afca2635ae56ead6d2b4581ffb17e244beb3fc53bb"}, + {file = "apsw-3.50.0.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b93976b86019e7283c435ded6e6dbe02e46b0838335cafa3d5a1a75a375b663"}, + {file = "apsw-3.50.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a352e97278f8bb135e4015cadf628a5c06515daee8d7b9f51db3160464ee2e99"}, + {file = "apsw-3.50.0.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:74b8610fdca4dec899f732be64d4723b36305a8d08e3d27a20b9c930a4c28fca"}, + {file = "apsw-3.50.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bee4f3596ee14061fab19381762ee23d4b741ecdf70ab9c2ab917aeb01571f0a"}, + {file = "apsw-3.50.0.0-cp313-cp313-win32.whl", hash = "sha256:83830608741210fe229d4c5eb78df6de44eae43f1e76a1e85a4b24150b5b9c3e"}, + {file = "apsw-3.50.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:d4b9191395483171bff456b63639d8b25f1c6124867d60b66699b4594c7ee46e"}, + {file = "apsw-3.50.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c775641dc754f4ac27d4d8141d21ce90427883e7bfb5ffa9ff83986a7dc190f"}, + {file = "apsw-3.50.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c12306a7e9f3a3542523cf1ad39de41d1b42fcffb9378cb22e43c6b449deb9ae"}, + {file = "apsw-3.50.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1f8e943a4e3fea6d39b404900f25196a5461e256c0af56a63233bb068f80a67"}, + {file = "apsw-3.50.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b66321b5412e85401a4752e0e1a279aba97ca11459037e5c9e4d7437b642802"}, + {file = "apsw-3.50.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20aa18ab216043f3bcf1ea88a4e10500cb197a6ad21e06d3a05fe40282f66020"}, + {file = "apsw-3.50.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e54327aec77bbab8cb9b97b75c660fa1e96181cfa6fe80f34ee45f370ba27b4d"}, + {file = "apsw-3.50.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:676dedd3cabea78e85a636fc4608c9b2e471b78e6dc21a5b8e9c3c99d3bfc0bc"}, + {file = "apsw-3.50.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c22e40b34a67737feae75cd719cdb3cda08c403965acd082d1fc830e9fec031d"}, + {file = "apsw-3.50.0.0-cp39-cp39-win32.whl", hash = "sha256:21c815c0edcfa18177eb2f4e0d90a3dff1bf5f5ff03b7a7c23e64e071e4ac49c"}, + {file = "apsw-3.50.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:4015a5daeba0df446e26ca88d33414e5262c88c6763ac51a5a94ebf48fb2ebcd"}, + {file = "apsw-3.50.0.0.tar.gz", hash = "sha256:104540af8231b23d01240a341d66fe94fac56bab707fdc159c35e42d354035d0"}, +] + [[package]] name = "argparse" version = "1.4.0" @@ -973,7 +1034,7 @@ description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version < \"3.10\"" +markers = "python_version == \"3.9\"" files = [ {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, @@ -2023,7 +2084,7 @@ files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -markers = {dev = "python_version < \"3.10\""} +markers = {dev = "python_version == \"3.9\""} [[package]] name = "urllib3" @@ -2254,4 +2315,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9, <3.13" -content-hash = "0e1731401cd6acfc4d45ede5e18668530aae6a6b2e359d7dc8d8d635635a1257" +content-hash = "d2681b890fa92a4a75406de2521b46047b72668bfb9fd54884454f1caa497191" diff --git a/pyproject.toml b/pyproject.toml index e6939b9b5..c9c3aaac9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,7 +65,7 @@ zipp = "^3.19.1" uvloop = "^0.21.0" pymysql = "^1.1.1" impacket = "^0.11.0" - +apsw = "^3.50.0.0" [tool.poetry.group.dev.dependencies] ipython = "^8.16.1" ruff = ">=0.2.1,<0.13.0" @@ -94,6 +94,9 @@ profile = "black" addopts = "--cov=nettacker --cov-config=pyproject.toml --cov-report term --cov-report xml --dist loadscope --no-cov-on-fail --numprocesses auto" asyncio_default_fixture_loop_scope = "function" testpaths = ["tests"] +markers = [ + "asyncio: mark test as async" +] [tool.ruff] line-length = 99 diff --git a/report.html b/report.html new file mode 100644 index 000000000..0a446dffa --- /dev/null +++ b/report.html @@ -0,0 +1 @@ +/*css*/
datetargetmodule_nameportlogsjson_eventnowx
1
diff --git a/tests/core/test_exclude_ports.py b/tests/core/test_exclude_ports.py index dc5b26de4..9b30ea04d 100644 --- a/tests/core/test_exclude_ports.py +++ b/tests/core/test_exclude_ports.py @@ -60,7 +60,7 @@ def test_load_with_service_discovery( mock_loader.return_value = mock_loader_inst mock_find_events.return_value = [ - MagicMock(json_event='{"port": 80, "response": {"conditions_results": {"http": {}}}}') + json.dumps({"port": 80, "response": {"conditions_results": {"http": {}}}}) ] module = Module("test_module", options, **module_args) @@ -94,11 +94,9 @@ def test_sort_loops(mock_loader, mock_find_events, options, module_args): } mock_loader.return_value = mock_loader_inst - mock_event = MagicMock() - mock_event.json_event = json.dumps( - {"port": 80, "response": {"conditions_results": {"http": True}}} - ) - mock_find_events.return_value = [mock_event] + mock_find_events.return_value = [ + json.dumps({"port": 80, "response": {"conditions_results": {"http": True}}}) + ] module = Module("test_module", options, **module_args) module.libraries = ["http"] @@ -119,11 +117,9 @@ def test_start_unsupported_library(mock_loader, mock_find_events, options, modul } mock_loader.return_value = mock_loader_inst - mock_event = MagicMock() - mock_event.json_event = json.dumps( - {"port": 1234, "response": {"conditions_results": {"unsupported_lib": True}}} - ) - mock_find_events.return_value = [mock_event] + mock_find_events.return_value = [ + json.dumps({"port": 1234, "response": {"conditions_results": {"unsupported_lib": True}}}) + ] module = Module("test_module", options, **module_args) module.libraries = ["http"] @@ -179,11 +175,9 @@ def test_sort_loops_behavior(mock_loader_cls, mock_find_events, mock_parse, opti # This one is painful mock_loader_cls.side_effect = template_loader_side_effect - mock_event = MagicMock() - mock_event.json_event = json.dumps( - {"port": 80, "response": {"conditions_results": {"http": True}}} - ) - mock_find_events.return_value = [mock_event] + mock_find_events.return_value = [ + json.dumps({"port": 80, "response": {"conditions_results": {"http": True}}}) + ] module = Module("test_module", options, **module_args) module.libraries = ["http"] @@ -307,12 +301,8 @@ def loader_side_effect_specific(name, inputs): mock_loader_cls.side_effect = loader_side_effect_specific mock_find_events.return_value = [ - MagicMock( - json_event=json.dumps({"port": 80, "response": {"conditions_results": {"http": {}}}}) - ), - MagicMock( - json_event=json.dumps({"port": 443, "response": {"conditions_results": {"http": {}}}}) - ), + json.dumps({"port": 80, "response": {"conditions_results": {"http": {}}}}), + json.dumps({"port": 443, "response": {"conditions_results": {"http": {}}}}), ] module = Module("test_module", options, **module_args) diff --git a/tests/database/test_db.py b/tests/database/test_db.py new file mode 100644 index 000000000..49da9c175 --- /dev/null +++ b/tests/database/test_db.py @@ -0,0 +1,1290 @@ +import json +from datetime import datetime +from unittest.mock import Mock, patch, MagicMock, call, mock_open + +import apsw + +from nettacker.api.helpers import structure +from nettacker.database.db import ( + db_inputs, + create_connection, + send_submit_query, + submit_report_to_db, + remove_old_logs, + submit_logs_to_db, + submit_temp_logs_to_db, + find_temp_events, + find_events, + select_reports, + get_scan_result, + last_host_logs, + get_logs_by_scan_id, + get_options_by_scan_id, + logs_to_report_json, + logs_to_report_html, + search_logs, +) + + +class TestDatabase: + def setup_method(self): + self.sample_event = { + "date": "2024-01-01 10:00:00", + "scan_id": "test_scan_123", + "options": {"report_path_filename": "/tmp/test_report.json", "target": "192.168.1.1"}, + } + + self.sample_log = { + "target": "192.168.1.1", + "date": datetime(2024, 1, 1, 10, 0, 0), + "module_name": "port_scan", + "scan_id": "test_scan_123", + "port": {"port": 80, "protocol": "tcp"}, + "event": {"status": "open"}, + "json_event": {"service": "http"}, + } + + self.sample_log_temp = { + "target": "192.168.1.1", + "date": "2024-01-01", + "module_name": "mod", + "scan_id": "scan123", + "event_name": "eventABC", + "port": {"port": 443}, + "event": {"status": "open"}, + "data": {"info": "some data"}, + } + + # For search_logs + self.page = 1 + self.query = "test" + + self.target = "192.168.1.1" + self.module = "port_scan" + self.scan_id = "scan_123" + self.event_name = "event_abc" + + # ------------------------------------------------------- + # Tests for db_inputs + # ------------------------------------------------------- + + @patch("nettacker.database.db.Config") + def test_db_inputs_postgres(self, mock_config): + mock_config.db.as_dict.return_value = { + "username": "user", + "password": "pass", + "host": "localhost", + "port": "5432", + "name": "testdb", + "ssl_mode": "require", + } + + result = db_inputs("postgres") + expected = "postgresql+psycopg2://user:pass@localhost:5432/testdb?sslmode=require" + assert result == expected + + @patch("nettacker.database.db.Config") + def test_db_inputs_mysql(self, mock_config): + mock_config.db.as_dict.return_value = { + "username": "user", + "password": "pass", + "host": "localhost", + "port": "3306", + "name": "testdb", + "ssl_mode": "disable", + "journal_mode": "WAL", + "synchronous_mode": "NORMAL", + } + + result = db_inputs("mysql") + expected = "mysql+pymysql://user:pass@localhost:3306/testdb" + assert result == expected + + # ------------------------------------------------------- + # tests for create_connection + # ------------------------------------------------------- + + @patch("nettacker.database.db.apsw.Connection") + @patch("nettacker.database.db.Config") + @patch("nettacker.database.db.config") + def test_create_connection_sqlite( + self, mock_config_instance, mock_config_class, mock_apsw_conn + ): + mock_config_class.db.engine = "sqlite:///test.db" + mock_config_class.db.journal_mode = "WAL" + mock_config_class.db.synchronous_mode = "NORMAL" + mock_config_instance.db.as_dict.return_value = {"name": "/tmp/test.db"} + mock_config_instance.settings.timeout = 30 + + mock_connection = Mock() + mock_cursor = Mock() + mock_connection.cursor.return_value = mock_cursor + mock_apsw_conn.return_value = mock_connection + + result = create_connection() + + mock_apsw_conn.assert_called_once_with("/tmp/test.db") + mock_connection.setbusytimeout.assert_called_once_with(3000) + mock_cursor.execute.assert_any_call("PRAGMA journal_mode=WAL") + mock_cursor.execute.assert_any_call("PRAGMA synchronous=NORMAL") + + assert result == (mock_connection, mock_cursor) + + @patch("nettacker.database.db.create_engine") + @patch("nettacker.database.db.sessionmaker") + @patch("nettacker.database.db.Config") + def test_create_connection_mysql(self, mock_config, mock_sessionmaker, mock_create_engine): + mock_config.db.engine = "mysql" + mock_session_class = Mock() + mock_session = Mock() + mock_session_class.return_value = mock_session + mock_sessionmaker.return_value = mock_session_class + + with patch("nettacker.database.db.db_inputs", return_value="mysql://test"): + result = create_connection() + + mock_create_engine.assert_called_once() + mock_sessionmaker.assert_called_once() + assert result == mock_session + + # ------------------------------------------------------- + # tests for send_submit_query + # ------------------------------------------------------- + + def test_send_submit_query_sqlite_success(self): + """Test send_submit_query with SQLite connection - success case""" + mock_connection = Mock() + mock_cursor = Mock() + session = (mock_connection, mock_cursor) + + result = send_submit_query(session) + + mock_connection.execute.assert_called_once_with("COMMIT") + mock_connection.close.assert_called_once() + assert result is True + + def test_send_submit_query_sqlite_retry_then_success(self): + """Test send_submit_query with SQLite connection - retry then success""" + mock_connection = Mock() + mock_cursor = Mock() + session = (mock_connection, mock_cursor) + + # First call fails, second succeeds, third rollback + mock_connection.execute.side_effect = [Exception("Lock error"), None, None] + + with patch("time.sleep"): + result = send_submit_query(session) + + assert mock_connection.execute.call_count == 3 + mock_connection.execute.assert_any_call("ROLLBACK") + mock_connection.execute.assert_any_call("COMMIT") + assert result is True + + @patch("nettacker.database.db.messages", return_value="mocked fail message") + @patch("nettacker.database.db.logger.warn") + def test_send_submit_query_sqlite_failure(self, mock_warn, mock_messages): + def sqlite_execute_side_effect(query): + if query == "COMMIT": + raise Exception("Simulated commit failure") + elif query == "ROLLBACK": + return None + return None + + mock_connection = Mock() + mock_cursor = Mock() + mock_connection.execute.side_effect = sqlite_execute_side_effect + + session = (mock_connection, mock_cursor) + + result = send_submit_query(session) + + assert result is False + mock_warn.assert_called_with("mocked fail message") + + def test_send_submit_query_sqlalchemy_success(self): + mock_session = Mock() + + result = send_submit_query(mock_session) + + mock_session.commit.assert_called_once() + assert result is True + + @patch("nettacker.database.db.messages", return_value="mocked fail message") + @patch("nettacker.database.db.logger.warn") + def test_send_submit_query_sqlalchemy_failure(self, mock_warn, mock_messages): + mock_session = Mock() + mock_session.commit.side_effect = [Exception("fail")] * 100 + + result = send_submit_query(mock_session) + + assert result is False + assert mock_session.commit.call_count >= 99 + mock_warn.assert_called_with("mocked fail message") + + # ------------------------------------------------------- + # tests for submit_report_to_db + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + @patch("nettacker.database.db.send_submit_query") + def test_submit_report_to_db_sqlite(self, mock_send_submit, mock_create_conn): + """Test submit_report_to_db with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + mock_send_submit.return_value = True + + result = submit_report_to_db(self.sample_event) + + mock_cursor.execute.assert_any_call("BEGIN") + mock_cursor.execute.assert_any_call( + """ + INSERT INTO reports (date, scan_unique_id, report_path_filename, options) + VALUES (?, ?, ?, ?) + """, + ( + "2024-01-01 10:00:00", + "test_scan_123", + "/tmp/test_report.json", + json.dumps(self.sample_event["options"]), + ), + ) + assert result is True + + @patch("nettacker.database.db.create_connection") + @patch("nettacker.database.db.send_submit_query") + @patch("nettacker.database.db.Report") + def test_submit_report_to_db_sqlalchemy(self, mock_report, mock_send_submit, mock_create_conn): + """Test submit_report_to_db with SQLAlchemy""" + mock_session = Mock() + mock_create_conn.return_value = mock_session + mock_send_submit.return_value = True + mock_report_instance = Mock() + mock_report.return_value = mock_report_instance + + result = submit_report_to_db(self.sample_event) + + mock_session.add.assert_called_once_with(mock_report_instance) + mock_send_submit.assert_called_once_with(mock_session) + assert result is True + + # ------------------------------------------------------- + # tests for remove_old_logs + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + @patch("nettacker.database.db.send_submit_query") + def test_remove_old_logs_sqlite(self, mock_send_submit, mock_create_conn): + """Test remove_old_logs with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + mock_send_submit.return_value = True + + options = { + "target": "192.168.1.1", + "module_name": "port_scan", + "scan_id": "current_scan", + "scan_compare_id": "compare_scan", + } + + result = remove_old_logs(options) + + mock_cursor.execute.assert_any_call("BEGIN") + mock_cursor.execute.assert_any_call( + """ + DELETE FROM scan_events + WHERE target = ? + AND module_name = ? + AND scan_unique_id != ? + AND scan_unique_id != ? + """, + ("192.168.1.1", "port_scan", "current_scan", "compare_scan"), + ) + assert result is True + + @patch("nettacker.database.db.send_submit_query", return_value=True) + @patch("nettacker.database.db.create_connection") + def test_remove_old_logs_sqlalchemy(self, mock_create_conn, mock_send_submit): + """Test SQLAlchemy path of remove_old_logs""" + + # Create a mock SQLAlchemy session + mock_session = MagicMock() + mock_query = mock_session.query.return_value + mock_filter = mock_query.filter.return_value + + mock_create_conn.return_value = mock_session + + options = { + "target": "192.168.1.1", + "module_name": "port_scan", + "scan_id": "scan_001", + "scan_compare_id": "scan_002", + } + + result = remove_old_logs(options) + + # Assert that delete was called + mock_filter.delete.assert_called_once_with(synchronize_session=False) + + # Assert send_submit_query was called with the session + mock_send_submit.assert_called_once_with(mock_session) + + # Assert final result + assert result is True + + # ------------------------------------------------------- + # tests for submit_logs_to_db + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + @patch("nettacker.database.db.send_submit_query") + @patch("nettacker.database.db.Config") + def test_submit_logs_to_db_sqlite_success( + self, mock_config, mock_send_submit, mock_create_conn + ): + """Test submit_logs_to_db with SQLite - success case""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + mock_send_submit.return_value = True + mock_connection.in_transaction = False + mock_config.settings.max_retries = 3 + + result = submit_logs_to_db(self.sample_log) + + mock_connection.execute.assert_called_with("BEGIN") + mock_cursor.execute.assert_called_with( + """ + INSERT INTO scan_events (target, date, module_name, scan_unique_id, port, event, json_event) + VALUES (?, ?, ?, ?, ?, ?, ?) + """, + ( + "192.168.1.1", + str(self.sample_log["date"]), + "port_scan", + "test_scan_123", + json.dumps({"port": 80, "protocol": "tcp"}), + json.dumps({"status": "open"}), + json.dumps({"service": "http"}), + ), + ) + assert result is True + + @patch("nettacker.database.db.messages", return_value="invalid log") + @patch("nettacker.database.db.logger.warn") + def test_log_not_dict(self, mock_warn, mock_messages): + result = submit_logs_to_db("notadict") + assert result is False + mock_warn.assert_called_once_with("invalid log") + + @patch("nettacker.database.db.send_submit_query", return_value=True) + @patch("nettacker.database.db.create_connection") + def test_sqlite_happy_path(self, mock_create_conn, mock_submit): + mock_conn = Mock() + mock_cursor = Mock() + mock_conn.in_transaction = False + mock_create_conn.return_value = (mock_conn, mock_cursor) + + log = { + "target": "1.1.1.1", + "date": "2024-01-01", + "module_name": "mod", + "scan_id": "abc", + "port": {"p": 80}, + "event": {"e": "open"}, + "json_event": {"j": "data"}, + } + + result = submit_logs_to_db(log) + assert result is True + mock_conn.execute.assert_any_call("BEGIN") + + @patch("nettacker.database.db.Config.settings.retry_delay", 0) + @patch("nettacker.database.db.Config.settings.max_retries", 1) + @patch("nettacker.database.db.logger.warn") + @patch("nettacker.database.db.create_connection") + def test_apsw_busy_error(self, mock_create_conn, mock_warn): + mock_conn = Mock() + mock_cursor = Mock() + mock_conn.in_transaction = True + mock_cursor.execute.side_effect = apsw.BusyError("database is locked") + mock_create_conn.return_value = (mock_conn, mock_cursor) + + log = { + "target": "1.1.1.1", + "date": "2024-01-01", + "module_name": "mod", + "scan_id": "abc", + "port": {"p": 80}, + "event": {"e": "open"}, + "json_event": {"j": "data"}, + } + + submit_logs_to_db(log) + mock_warn.assert_has_calls( + [ + call("[Retry 1/1] Database is locked. Retrying..."), + call("All retries exhausted. Skipping this log."), + ] + ) + + @patch("nettacker.database.db.create_connection") + def test_sqlite_operational_error(self, mock_create_conn): + mock_conn = Mock() + mock_cursor = Mock() + mock_conn.in_transaction = True + mock_cursor.execute.side_effect = apsw.BusyError("other error") + mock_create_conn.return_value = (mock_conn, mock_cursor) + + log = { + "target": "1.1.1.1", + "date": "2024-01-01", + "module_name": "mod", + "scan_id": "abc", + "port": {"p": 80}, + "event": {"e": "open"}, + "json_event": {"j": "data"}, + } + + result = submit_logs_to_db(log) + assert result is False + + @patch("nettacker.database.db.create_connection") + def test_sqlite_generic_exception(self, mock_create_conn): + mock_conn = Mock() + mock_cursor = Mock() + mock_conn.in_transaction = True + mock_cursor.execute.side_effect = Exception("generic") + mock_create_conn.return_value = (mock_conn, mock_cursor) + mock_cursor.execute.side_effect = [Exception("generic"), None] + + log = { + "target": "1.1.1.1", + "date": "2024-01-01", + "module_name": "mod", + "scan_id": "abc", + "port": {"p": 80}, + "event": {"e": "open"}, + "json_event": {"j": "data"}, + } + result = submit_logs_to_db(log) + assert result is False + + @patch("nettacker.database.db.send_submit_query", return_value=True) + @patch("nettacker.database.db.create_connection") + def test_sqlalchemy_path(self, mock_create_conn, mock_submit): + mock_session = Mock() + mock_create_conn.return_value = mock_session + + log = { + "target": "1.1.1.1", + "date": "2024-01-01", + "module_name": "mod", + "scan_id": "abc", + "port": {"p": 80}, + "event": {"e": "open"}, + "json_event": {"j": "data"}, + } + result = submit_logs_to_db(log) + assert result is True + mock_session.add.assert_called() + + @patch("nettacker.database.db.send_submit_query", return_value=False) + @patch("nettacker.database.db.create_connection") + def test_sqlalchemy_submit_fail(self, mock_create_conn, mock_submit): + mock_session = Mock() + mock_create_conn.return_value = mock_session + + log = { + "target": "1.1.1.1", + "date": "2024-01-01", + "module_name": "mod", + "scan_id": "abc", + "port": {"p": 80}, + "event": {"e": "open"}, + "json_event": {"j": "data"}, + } + result = submit_logs_to_db(log) + assert result is False + + # ------------------------------------------------------- + # tests for submit_temp_logs_to_db + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + @patch("nettacker.database.db.send_submit_query") + @patch("nettacker.database.db.Config") + def test_submit_temp_logs_to_db_sqlite_success( + self, mock_config, mock_send_submit, mock_create_conn + ): + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + mock_send_submit.return_value = True + mock_connection.in_transaction = False + mock_config.settings.max_retries = 3 + + result = submit_temp_logs_to_db(self.sample_log_temp) + + mock_cursor.execute.assert_any_call("BEGIN") + sql, params = mock_cursor.execute.call_args[0] + assert "INSERT INTO temp_events" in sql.strip() + assert params == ( + "192.168.1.1", + "2024-01-01", + "mod", + "scan123", + "eventABC", + json.dumps({"port": 443}), + json.dumps({"status": "open"}), + json.dumps({"info": "some data"}), + ) + + assert result is True + + @patch("nettacker.database.db.messages", return_value="invalid log") + @patch("nettacker.database.db.logger.warn") + def test_temp_log_not_dict(self, mock_warn, mock_messages): + result = submit_temp_logs_to_db("notadict") + assert result is False + mock_warn.assert_called_once_with("invalid log") + + @patch("nettacker.database.db.Config.settings.retry_delay", 0) + @patch("nettacker.database.db.Config.settings.max_retries", 1) + @patch("nettacker.database.db.logger.warn") + @patch("nettacker.database.db.create_connection") + def test_temp_log_busy_error(self, mock_create_conn, mock_warn): + mock_conn = Mock() + mock_cursor = Mock() + mock_conn.in_transaction = True + mock_cursor.execute.side_effect = apsw.BusyError("database is locked") + mock_create_conn.return_value = (mock_conn, mock_cursor) + + result = submit_temp_logs_to_db(self.sample_log_temp) + mock_warn.assert_has_calls( + [ + call("[Retry 1/1] Database is locked. Retrying..."), + call("All retries exhausted. Skipping this log."), + ] + ) + assert result is True # we're continuing operation hence it returns True + + @patch("nettacker.database.db.create_connection") + def test_temp_log_operational_error(self, mock_create_conn): + mock_conn = Mock() + mock_cursor = Mock() + mock_conn.in_transaction = True + mock_cursor.execute.side_effect = apsw.BusyError("some other error") + mock_create_conn.return_value = (mock_conn, mock_cursor) + + result = submit_temp_logs_to_db(self.sample_log_temp) + assert result is False + + @patch("nettacker.database.db.create_connection") + def test_temp_log_generic_exception(self, mock_create_conn): + mock_conn = Mock() + mock_cursor = Mock() + mock_conn.in_transaction = True + mock_cursor.execute.side_effect = Exception("unexpected") + mock_create_conn.return_value = (mock_conn, mock_cursor) + + result = submit_temp_logs_to_db(self.sample_log_temp) + assert result is False + + @patch("nettacker.database.db.TempEvents") + @patch("nettacker.database.db.send_submit_query", return_value=True) + @patch("nettacker.database.db.create_connection") + def test_temp_log_sqlalchemy_path(self, mock_create_conn, mock_send, mock_temp): + mock_session = Mock() + mock_create_conn.return_value = mock_session + + result = submit_temp_logs_to_db(self.sample_log_temp) + + mock_session.add.assert_called() + mock_send.assert_called_with(mock_session) + assert result is True + + @patch("nettacker.database.db.create_connection") + def test_submit_temp_logs_to_db_sqlite(self, mock_create_conn): + """Test submit_temp_logs_to_db with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + mock_connection.in_transaction = False + + with patch("nettacker.database.db.send_submit_query", return_value=True): + with patch("nettacker.database.db.Config") as mock_config: + mock_config.settings.max_retries = 3 + + temp_log = { + "target": "192.168.1.1", + "date": datetime(2024, 1, 1), + "module_name": "test_module", + "scan_id": "scan_123", + "event_name": "test_event", + "port": {"port": 80}, + "event": {"status": "test"}, + "data": {"info": "test_data"}, + } + + result = submit_temp_logs_to_db(temp_log) + + mock_cursor.execute.assert_any_call("BEGIN") + mock_cursor.execute.assert_any_call( + """ + INSERT INTO temp_events (target, date, module_name, scan_unique_id, event_name, port, event, data) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + "192.168.1.1", + str(temp_log["date"]), + "test_module", + "scan_123", + "test_event", + json.dumps({"port": 80}), + json.dumps({"status": "test"}), + json.dumps({"info": "test_data"}), + ), + ) + assert result is True + + # ------------------------------------------------------- + # tests for find_temp_events + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + def test_sqlite_successful_lookup(self, mock_create_conn): + mock_conn = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_conn, mock_cursor) + + mock_cursor.fetchone.return_value = ('{"status": "open"}',) + + result = find_temp_events(self.target, self.module, self.scan_id, self.event_name) + assert result == '{"status": "open"}' + mock_cursor.execute.assert_called_once() + mock_cursor.close.assert_called_once() + + @patch("nettacker.database.db.create_connection") + def test_sqlite_no_result(self, mock_create_conn): + mock_conn = Mock() + mock_cursor = Mock() + mock_cursor.fetchone.return_value = None + mock_create_conn.return_value = (mock_conn, mock_cursor) + + result = find_temp_events(self.target, self.module, self.scan_id, self.event_name) + assert result == [] + + @patch("nettacker.database.db.create_connection") + def test_sqlite_exception_and_retry(self, mock_create_conn): + mock_conn = Mock() + mock_cursor = Mock() + mock_cursor.execute.side_effect = Exception("fail") + mock_create_conn.return_value = (mock_conn, mock_cursor) + + with patch("time.sleep"): # Skip delay + result = find_temp_events(self.target, self.module, self.scan_id, self.event_name) + assert result == [] + + @patch("nettacker.database.db.logger.warn") + @patch("nettacker.database.db.messages", return_value="database fail") + @patch("nettacker.database.db.create_connection") + def test_sqlite_outer_exception(self, mock_create_conn, mock_messages, mock_warn): + mock_conn = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_conn, mock_cursor) + mock_cursor.execute.side_effect = Exception("fail") + mock_cursor.close.side_effect = Exception("cursor close fail") + + with patch("time.sleep"), patch("builtins.range", side_effect=Exception("loop fail")): + result = find_temp_events(self.target, self.module, self.scan_id, self.event_name) + assert result == [] + mock_warn.assert_called_once_with("database fail") + + @patch("nettacker.database.db.create_connection") + def test_sqlalchemy_successful_lookup(self, mock_create_conn): + mock_session = MagicMock() + query_mock = MagicMock() + filter_mock = MagicMock() + + fake_result = MagicMock() + fake_result.event = {"foo": "bar"} + + mock_session.query.return_value = query_mock + query_mock.filter.return_value = filter_mock + filter_mock.first.return_value = fake_result + + mock_create_conn.return_value = mock_session + + result = find_temp_events(self.target, self.module, self.scan_id, self.event_name) + assert result == {"foo": "bar"} + + @patch("nettacker.database.db.create_connection") + def test_sqlalchemy_no_result(self, mock_create_conn): + mock_session = MagicMock() + mock_session.query().filter().first.return_value = None + mock_create_conn.return_value = mock_session + + result = find_temp_events(self.target, self.module, self.scan_id, self.event_name) + if result == []: + result = None + assert result is None + + @patch("nettacker.database.db.create_connection") + def test_find_temp_events_sqlite(self, mock_create_conn): + """Test find_temp_events with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + mock_cursor.fetchone.return_value = ('{"test": "data"}',) + + result = find_temp_events("192.168.1.1", "port_scan", "scan_123", "event_1") + + called_query, called_params = mock_cursor.execute.call_args[0] + + expected_query = """ + SELECT event + FROM temp_events + WHERE target = ? AND module_name = ? AND scan_unique_id = ? AND event_name = ? + LIMIT 1 + """ + + # Normalize whitespace (collapse multiple spaces/newlines into one space) + def normalize(sql: str) -> str: + return " ".join(sql.split()) + + assert normalize(called_query) == normalize(expected_query) + assert called_params == ("192.168.1.1", "port_scan", "scan_123", "event_1") + assert result == '{"test": "data"}' + + # ------------------------------------------------------- + # tests for find_events + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + def test_find_events_sqlite(self, mock_create_conn): + """Test find_events with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + mock_cursor.fetchall.return_value = [('{"event1": "data1"}',), ('{"event2": "data2"}',)] + + result = find_events("192.168.1.1", "port_scan", "scan_123") + + mock_cursor.execute.assert_called_with( + """ + SELECT json_event FROM scan_events + WHERE target = ? AND module_name = ? and scan_unique_id = ? + """, + ("192.168.1.1", "port_scan", "scan_123"), + ) + expected = ['{"event1": "data1"}', '{"event2": "data2"}'] + assert result == expected + + @patch("nettacker.database.db.logger.warn") + @patch("nettacker.database.db.create_connection") + def test_find_events_sqlite_exception(self, mock_create_conn, mock_warn): + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + mock_cursor.execute.side_effect = Exception("DB error") + result = find_events("192.168.1.1", "http", "scan_123") + + assert result == [] + mock_warn.assert_called_once_with("Database query failed...") + + @patch("nettacker.database.db.create_connection") + def test_find_events_sqlalchemy(self, mock_create_conn): + mock_session = Mock() + mock_create_conn.return_value = mock_session + + mock_row1 = Mock() + mock_row2 = Mock() + mock_row1.json_event = '{"event": "scan started"}' + mock_row2.json_event = '{"event": "port open"}' + mock_session.query.return_value.filter.return_value.all.return_value = [ + mock_row1, + mock_row2, + ] + + result = find_events("192.168.1.1", "http", "scan_123") + assert result == ['{"event": "scan started"}', '{"event": "port open"}'] + + mock_session.query.assert_called_once() + mock_session.query.return_value.filter.return_value.all.assert_called_once() + + # ------------------------------------------------------- + # tests for select_reports + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + def test_select_reports_sqlite(self, mock_create_conn): + """Test select_reports with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + mock_cursor.fetchall.return_value = [ + (1, "2024-01-01", "scan_123", "/tmp/report.json", '{"target": "192.168.1.1"}') + ] + + result = select_reports(self.page) + + mock_cursor.execute.assert_called_with( + """ + SELECT id, date, scan_unique_id, report_path_filename, options + FROM reports + ORDER BY id DESC + LIMIT 10 OFFSET ? + """, + (0,), + ) + + expected = [ + { + "id": 1, + "date": "2024-01-01", + "scan_id": "scan_123", + "report_path_filename": "/tmp/report.json", + "options": {"target": "192.168.1.1"}, + } + ] + assert result == expected + + @patch("nettacker.database.db.logger.warn") + @patch("nettacker.database.db.create_connection") + def test_select_reports_sqlite_exception(self, mock_create_conn, mock_warn): + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + mock_cursor.query.side_effect = Exception("DB Error") + + result = select_reports(self.page) + assert result == structure(status="error", msg="database error!") + mock_warn.assert_called_once_with("Could not retrieve report...") + + @patch("nettacker.database.db.create_connection") + def test_select_reports_sqlalchemy(self, mock_create_conn): + mock_session = Mock() + mock_create_conn.return_value = mock_session + + mock_report = Mock() + mock_report.id = 1 + mock_report.date = "2024-01-01" + mock_report.scan_unique_id = "scan_123" + mock_report.report_path_filename = "/tmp/report.json" + mock_report.options = json.dumps({"target": "192.168.1.1"}) + + mock_session.query.return_value.order_by.return_value.offset.return_value.limit.return_value = [ + mock_report + ] + result = select_reports(self.page) + + assert result == [ + { + "id": 1, + "date": "2024-01-01", + "scan_id": "scan_123", + "report_path_filename": "/tmp/report.json", + "options": {"target": "192.168.1.1"}, + } + ] + + @patch("nettacker.database.db.create_connection") + def test_select_reports_sqlalchemy_exception(self, mock_create_conn): + mock_session = Mock() + mock_create_conn.return_value = mock_session + mock_session.query.side_effect = Exception("DB Error") + result = select_reports(self.page) + assert result == structure(status="error", msg="database error!") + + # ------------------------------------------------------- + # tests for get_scan_result + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + @patch("builtins.open", create=True) + def test_get_scan_result_sqlite(self, mock_open, mock_create_conn): + """Test get_scan_result with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + mock_cursor.fetchone.return_value = ("/tmp/report.json",) + mock_file = Mock() + mock_file.read.return_value = b'{"result": "data"}' + mock_open.return_value = mock_file + + result = get_scan_result(1) + + mock_cursor.execute.assert_called_with( + """ + SELECT report_path_filename from reports + WHERE id = ? + """, + (1,), + ) + + filename, content = result + assert filename == "/tmp/report.json" + assert content == b'{"result": "data"}' + + @patch("nettacker.database.db.create_connection") + @patch("builtins.open", new_callable=mock_open, read_data=b"mock file content") + def test_get_scan_result_sqlalchemy(self, mock_open_builtin, mock_create_conn): + mock_session = Mock() + mock_create_conn.return_value = mock_session + + mock_report = Mock() + mock_report.report_path_filename = "/tmp/mock_report.json" + + mock_session.query.return_value.filter_by.return_value.first.return_value = mock_report + + filename, content = get_scan_result(1) + assert filename == "/tmp/mock_report.json" + assert content == b"mock file content" + + mock_open_builtin.assert_called_once_with("/tmp/mock_report.json", "rb") + + # ------------------------------------------------------- + # tests for last_host_logs + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + def test_last_host_logs_sqlite(self, mock_create_conn): + """Test last_host_logs with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + # Mock the sequence of database calls + mock_cursor.fetchall.side_effect = [ + [(self.target,)], # targets + [("port_scan",)], # module_names for target + [("port_scan",), ("vuln_scan",)], # events for target + ] + mock_cursor.fetchone.return_value = ("2024-01-01",) # latest_date + + result = last_host_logs(1) + + # Verify the structure of the result + assert len(result) == 1 + assert result[0]["target"] == "192.168.1.1" + assert "info" in result[0] + + # ------------------------------------------------------- + # tests for get_logs_by_scan_id + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + def test_get_logs_by_scan_id_sqlite(self, mock_create_conn): + """Test get_logs_by_scan_id with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + mock_cursor.fetchall.return_value = [ + ( + "scan_123", + "192.168.1.1", + "port_scan", + "2024-01-01", + '{"port": 80}', + '{"status": "open"}', + '{"service": "http"}', + ) + ] + + result = get_logs_by_scan_id("scan_123") + + mock_cursor.execute.assert_called_with( + """ + SELECT scan_unique_id, target, module_name, date, port, event, json_event + from scan_events + WHERE scan_unique_id = ? + """, + ("scan_123",), + ) + + expected = [ + { + "scan_id": "scan_123", + "target": "192.168.1.1", + "module_name": "port_scan", + "date": "2024-01-01", + "port": {"port": 80}, + "event": {"status": "open"}, + "json_event": {"service": "http"}, + } + ] + assert result == expected + + # ------------------------------------------------------- + # tests for get_options_by_scan_id + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + def test_get_options_by_scan_id_sqlite(self, mock_create_conn): + """Test get_options_by_scan_id with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + mock_cursor.fetchall.return_value = [('{"target": "192.168.1.1"}',)] + + result = get_options_by_scan_id("scan_123") + + mock_cursor.execute.assert_called_with( + """ + SELECT options from reports + WHERE scan_unique_id = ? + """, + ("scan_123",), + ) + + expected = [{"options": '{"target": "192.168.1.1"}'}] + assert result == expected + + # ------------------------------------------------------- + # tests for logs_to_report_json + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + def test_logs_to_report_json_sqlite(self, mock_create_conn): + """Test logs_to_report_json with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + mock_cursor.fetchall.return_value = [ + ( + "scan_123", + "192.168.1.1", + '{"port": 80}', + '{"status": "open"}', + '{"service": "http"}', + ) + ] + + result = logs_to_report_json("192.168.1.1") + + mock_cursor.execute.assert_called_with( + """ + SELECT scan_unique_id, target, port, event, json_event + FROM scan_events WHERE target = ? + """, + ("192.168.1.1",), + ) + + expected = [ + { + "scan_id": "scan_123", + "target": "192.168.1.1", + "port": {"port": 80}, + "event": {"status": "open"}, + "json_event": {"service": "http"}, + } + ] + assert result == expected + + # ------------------------------------------------------- + # tests for logs_to_report_html + # ------------------------------------------------------- + + @patch("nettacker.lib.html_log.log_data.table_title", "{}{}") + @patch("nettacker.lib.html_log.log_data.css_1", "css_content") + @patch("nettacker.lib.html_log.log_data.table_items", "...") + @patch("nettacker.lib.html_log.log_data.table_end", "") + @patch("nettacker.core.graph.build_graph") + @patch("nettacker.database.db.create_connection") + @patch("nettacker.database.db.messages") + def test_logs_to_report_html_sqlite(self, mock_messages, mock_create_conn, mock_build_graph): + """Test logs_to_report_html with SQLite""" + + # Setup mock database connection and cursor + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + # Simulated query result from the logs table + mock_cursor.fetchall.return_value = [ + ( + "2024-01-01", + "192.168.1.1", + "port_scan", + "scan_123", + '{"port": 80}', + '{"status": "open"}', + '{"service": "http"}', + ) + ] + + # Simulated return values for graph and messages + mock_build_graph.return_value = "graph_html" + mock_messages.return_value = "Generated by Nettacker" + + # Call the function + result = logs_to_report_html("192.168.1.1") + + # Assertions + assert isinstance(result, str) + assert "graph_html" in result + assert "" in result + assert "Generated by Nettacker" in result + + @patch("nettacker.lib.html_log.log_data.table_title", "{}{}") + @patch("nettacker.lib.html_log.log_data.css_1", "css_content") + @patch( + "nettacker.lib.html_log.log_data.table_items", + "{0}{1}{2}{3}{4}{5}{6}", + ) + @patch("nettacker.lib.html_log.log_data.table_end", "") + @patch("nettacker.core.graph.build_graph") + @patch("nettacker.database.db.create_connection") + @patch("nettacker.database.db.messages", return_value="Generated by Nettacker") + def test_logs_to_report_html_sqlalchemy( + self, mock_messages, mock_create_conn, mock_build_graph + ): + """Test logs_to_report_html with SQLAlchemy fallback""" + + # Simulate SQLAlchemy session + mock_session = MagicMock() + mock_create_conn.return_value = mock_session + + # Fake log row (SQLAlchemy object with attributes) + fake_log = MagicMock() + fake_log.date = "2024-01-01" + fake_log.target = "192.168.1.1" + fake_log.module_name = "port_scan" + fake_log.scan_unique_id = "scan_123" + fake_log.port = '{"port": 80}' + fake_log.event = '{"status": "open"}' + fake_log.json_event = '{"service": "http"}' + + # SQLAlchemy .query().filter().all() returns a list of logs + mock_session.query().filter().all.return_value = [fake_log] + + # Graph output + mock_build_graph.return_value = "graph_html" + + # Call the function + result = logs_to_report_html("192.168.1.1") + + # Assertions + assert isinstance(result, str) + assert "graph_html" in result + assert "" in result + assert "Generated by Nettacker" in result + assert "192.168.1.1" in result + assert "scan_123" in result + + # ------------------------------------------------------- + # tests for search_logs + # ------------------------------------------------------- + + @patch("nettacker.database.db.create_connection") + def test_search_logs_sqlite(self, mock_create_conn): + """Test search_logs with SQLite""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + # Mock the sequence of calls for search + mock_cursor.fetchall.side_effect = [ + [("192.168.1.1",)], # targets matching query + [ + ( + "2024-01-01", + "port_scan", + '{"port": 80}', + '{"status": "open"}', + '{"service": "http"}', + ) + ], # results for target + ] + + result = search_logs(1, "192.168") + + # Verify search query structure + search_call = mock_cursor.execute.call_args_list[0] + assert "%192.168%" in search_call[0][1] + + assert len(result) == 1 + assert result[0]["target"] == "192.168.1.1" + + @patch("nettacker.database.db.create_connection") + @patch("nettacker.database.db.structure") + def test_search_logs_no_results(self, mock_structure, mock_create_conn): + """Test search_logs with no results""" + mock_connection = Mock() + mock_cursor = Mock() + mock_create_conn.return_value = (mock_connection, mock_cursor) + + mock_cursor.fetchall.return_value = [] + mock_structure.return_value = {"status": "finished", "msg": "No more search results"} + + result = search_logs(1, "nonexistent") + + mock_structure.assert_called_with(status="finished", msg="No more search results") + assert result == {"status": "finished", "msg": "No more search results"} + + @patch("nettacker.database.db.create_connection") + def test_sqlite_path_exception(self, mock_create_conn): + mock_conn = Mock() + mock_cursor = Mock() + mock_cursor.execute.side_effect = Exception("db error") + mock_create_conn.return_value = (mock_conn, mock_cursor) + + result = search_logs(self.page, self.query) + assert result["status"] == "error" + assert "database error" in result["msg"] + + @patch("nettacker.database.db.create_connection") + def test_sqlalchemy_path_success(self, mock_create_conn): + mock_session = MagicMock() + mock_create_conn.return_value = mock_session + + host_mock = MagicMock() + host_mock.target = "192.168.1.1" + mock_session.query().filter().group_by().order_by().offset().limit().__iter__.return_value = [ + host_mock + ] + + data_mock = MagicMock() + data_mock.target = "192.168.1.1" + data_mock.module_name = "mod" + data_mock.date = "2024-01-01" + data_mock.port = json.dumps({"port": 80}) + data_mock.event = json.dumps({"event": "open"}) + data_mock.json_event = json.dumps({"svc": "http"}) + mock_session.query().filter().group_by().order_by().all.return_value = [data_mock] + + result = search_logs(self.page, self.query) + assert result[0]["target"] == "192.168.1.1" + assert "mod" in result[0]["info"]["module_name"] + + @patch("nettacker.database.db.create_connection") + def test_sqlalchemy_path_exception(self, mock_create_conn): + mock_session = MagicMock() + mock_create_conn.return_value = mock_session + mock_session.query().filter().group_by().order_by().offset().limit.side_effect = Exception( + "boom" + ) + + result = search_logs(self.page, self.query) + assert result["status"] == "error" + assert "database error" in result["msg"] + + @patch("nettacker.database.db.create_connection") + def test_sqlalchemy_path_no_results(self, mock_create_conn): + mock_session = MagicMock() + mock_create_conn.return_value = mock_session + mock_session.query().filter().group_by().order_by().offset().limit().__iter__.return_value = [] + + result = search_logs(self.page, self.query) + assert result["status"] == "finished" + assert result["msg"] == "No more search results"