diff --git a/modules/sc-mesh-secure-deployment/src/2_0/features.yaml b/modules/sc-mesh-secure-deployment/src/2_0/features.yaml new file mode 100644 index 000000000..d31ad199a --- /dev/null +++ b/modules/sc-mesh-secure-deployment/src/2_0/features.yaml @@ -0,0 +1,7 @@ +# By Defualt the features are disabled. To enable please type true + +PHY: false +RSS: false +IDS: false +jamming: false + diff --git a/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/SP_CRA_v7.py b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/SP_CRA_v7.py index e2a92ba68..ba53ca9f5 100644 --- a/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/SP_CRA_v7.py +++ b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/SP_CRA_v7.py @@ -2,131 +2,276 @@ import threading import random import pickle -#import scipy.io -from mat4py import loadmat import time import logging import numpy as np import os -from getmac import get_mac_address # <-- Import the get_mac_address function +import queue +from getmac import get_mac_address +from mat4py import loadmat class PHYCRA: def __init__(self): - logging.basicConfig(level=logging.INFO) - logging.getLogger().setLevel(logging.INFO) + # Initialization code + log_filename = '/tmp/server_log.log' # Log file location + txt_log_filename = '/tmp/server_log.txt' + # Clear server log at the start of each session + if os.path.exists(log_filename): + os.remove(log_filename) + if os.path.exists(txt_log_filename): + os.remove(txt_log_filename) + + + logging.basicConfig(level=logging.INFO, filename=log_filename, filemode='w', + format='%(asctime)s - %(levelname)s - %(message)s') + logging.info("Server initialized") # Server setup - DataServer = loadmat('ACF_Table.mat') - self.acf = np.array(DataServer['y_C']).transpose() - #self.acf = DataServer['y_C'].transpose() - self.SERVER = self.get_server_ip() - self.BROADCAST_PORT = 5051 - self.PORT = 5050 - self.ADDR = (self.SERVER, self.PORT) - self.FORMAT = 'utf-8' + # Determine the directory in which this script is located + script_dir = os.path.dirname(os.path.realpath(__file__)) + + # Use the script's directory to construct the path to the .mat file + mat_file_path = os.path.join(script_dir, 'ACF_Table.mat') + try: + DataServer = loadmat(mat_file_path) + self.acf = np.array(DataServer['y_C']).transpose() + self.SERVER = self.get_server_ip() + self.BROADCAST_PORT = 5051 + self.PORT = 5050 + self.ADDR = (self.SERVER, self.PORT) + self.FORMAT = 'utf-8' + logging.info("Server setup completed successfully") + except Exception as e: + logging.error("Error during server setup: %s", e) + # Client setup - DataClient = loadmat('ACF_Table.mat') - self.acf_client = np.array(DataClient['y_C']).transpose() - #self.acf_client = DataClient['y_C'].transpose() - - # Clear the server log at the start of each session - if os.path.exists("server_log.txt"): - os.remove("server_log.txt") - - # Start server and client functionalities - server_thread = threading.Thread(target=self.server_start) - server_thread.start() - time.sleep(2) - listen_thread = threading.Thread(target=self.listen_for_broadcast) - listen_thread.start() - - # SERVER FUNCTIONS + try: + DataClient = loadmat(mat_file_path) + self.acf_client = np.array(DataClient['y_C']).transpose() + logging.info("Client setup completed successfully") + except Exception as e: + logging.error("Error during client setup: %s", e) + + # Initialize threads but do not start them + self.server_thread = None + self.listen_thread = None + self.broadcast_thread = None + self.debug = False # Default value + self.results_queue = {'Pass': [], 'Fail': []} + #self.all_attempts = set() # Set to track all unique attempts + + # Create an event for graceful shutdown + self.stop_event = threading.Event() + + def start(self): + """Starts the server and client functionalities in separate threads.""" + try: + self.server_thread = threading.Thread(target=self.server_start) + self.server_thread.start() + logging.info("Server thread started") + + self.listen_thread = threading.Thread(target=self.listen_for_broadcast) + self.listen_thread.start() + logging.info("Broadcast listening thread started") + except Exception as e: + logging.error("Error during server/client threads initialization: %s", e) + + def stop(self): + """Stops all running threads and closes sockets.""" + self.stop_event.set() + + if self.server_thread and self.server_thread.is_alive(): + self.server_thread.join() + if self.listen_thread and self.listen_thread.is_alive(): + self.listen_thread.join() + if self.broadcast_thread and self.broadcast_thread.is_alive(): + self.broadcast_thread.join() + + if hasattr(self, 'server') and self.server: + self.server.close() + if hasattr(self, 'listen_sock') and self.listen_sock: + self.listen_sock.close() + logging.info("PHYCRA stopped successfully") + + def log_authentication(self, node_ip, mac_address, result): timestamp = time.strftime("%Y-%m-%d %H:%M:%S") log_entry = f"{timestamp}\t{node_ip}\t{mac_address}\t{result}\n" - with open("server_log.txt", "a") as log_file: + with open("/tmp/server_log.txt", "a") as log_file: log_file.write(log_entry) + def display_table(self): - print("+---------------------+---------------+-------------------+---------------------------+") - print("| Time | Node IP | MAC Address | Authentication Result |") - print("+---------------------+---------------+-------------------+---------------------------+") - with open("server_log.txt", "r") as log_file: - for line in log_file: - timestamp, node_ip, mac_address, result = line.strip().split("\t") - formatted_line = f"| {timestamp:<19} | {node_ip:<12} | {mac_address:<15} | {result:<23} |" - print(formatted_line) - print("+---------------------+---------------+-------------------+---------------------------+") + if self.debug: + logging.info("Displaying authentication table") + print("+---------------------+---------------+-------------------+---------------------------+") + print("| Time | Node IP | MAC Address | Authentication Result |") + print("+---------------------+---------------+-------------------+---------------------------+") + log_file_path = "/tmp/server_log.txt" + if os.path.exists(log_file_path): + with open(log_file_path, "r") as log_file: + for line in log_file: + timestamp, node_ip, mac_address, result = line.strip().split("\t") + formatted_line = f"| {timestamp:<19} | {node_ip:<12} | {mac_address:<15} | {result:<23} |" + print(formatted_line) + else: + print("No entries found.") + print("+---------------------+---------------+-------------------+---------------------------+") + else: + logging.info("Debug mode is off, skipping table display") + + def handle_client(self, conn, addr): - print(f"Connection request received from {addr}") - index = random.randint(0, len(self.acf) - 1) - acf_tx = pickle.dumps(self.acf[index]) - conn.send(acf_tx) - - # Receive and convert rx_index to int - rx_index_length = int.from_bytes(conn.recv(2), 'big') - rx_index = int(conn.recv(rx_index_length).decode(self.FORMAT)) - - # Then receive MAC address - mac_address_length = int.from_bytes(conn.recv(2), 'big') - mac_address = conn.recv(mac_address_length).decode(self.FORMAT) - - if rx_index == index: - print("Node is authenticated") - self.log_authentication(addr[0], mac_address, "Success") - else: - print('Access denied') - self.log_authentication(addr[0], mac_address, "Access denied") - print("\nUpdated Table:") - self.display_table() - conn.close() + """ + Handles the client connection, sends a challenge (ACF value), + and verifies the response from the client to authenticate. + It also logs the authentication result. + """ + try: + # Sending a random ACF value as a challenge to the client + index = random.randint(0, len(self.acf) - 1) + acf_tx = pickle.dumps(self.acf[index]) + conn.sendall(acf_tx) + # Receive and verify the index from the client + rx_index_length_bytes = conn.recv(2) + rx_index_length = int.from_bytes(rx_index_length_bytes, 'big') + rx_index_bytes = conn.recv(rx_index_length) + rx_index = int(rx_index_bytes.decode(self.FORMAT)) + # Then receive the MAC address from the client + mac_address_length_bytes = conn.recv(2) + mac_address_length = int.from_bytes(mac_address_length_bytes, 'big') + mac_address_bytes = conn.recv(mac_address_length) + mac_address = mac_address_bytes.decode(self.FORMAT) + # Authenticate the client based on the index and MAC address received + if rx_index == index: + print("PASS: Authentication successful") + logging.info("Authentication successful for %s", addr) + self.log_authentication(addr[0], mac_address, "Success") + self.results_queue['Pass'].append((addr[0], mac_address)) + else: + print('FAIL: Authentication failed') + logging.warning("Authentication failed for %s", addr) + self.log_authentication(addr[0], mac_address, "Access denied") + self.results_queue['Fail'].append((addr[0], mac_address)) + + print("\nUpdated Table:") + self.display_table() + except Exception as e: + logging.error("Error during client handling: %s", e) + finally: + conn.close() + + def get_result(self): + """ + Retrieve and clear the latest results from the results queue. + """ + current_results = {k: list(v) for k, v in self.results_queue.items()} + self.results_queue = {'Pass': [], 'Fail': []} # Reset for next batch + return current_results + + def server_start(self): - server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - server.bind(self.ADDR) - server.listen() - broadcast_thread = threading.Thread(target=self.broadcast_status) - broadcast_thread.start() - while True: - conn, addr = server.accept() - thread = threading.Thread(target=self.handle_client, args=(conn, addr)) - thread.start() + try: + self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.server.bind(self.ADDR) + self.server.listen() + self.server.settimeout(1) + logging.info("Server started and listening") + except Exception as e: + logging.error("Error during server binding/listening: %s", e) + return + + try: + self.broadcast_thread = threading.Thread(target=self.broadcast_status) + self.broadcast_thread.start() + logging.info("Broadcast thread started") + while not self.stop_event.is_set(): # Check if the stop event is set + try: + conn, addr = self.server.accept() + except socket.timeout: + continue + thread = threading.Thread(target=self.handle_client, args=(conn, addr)) + thread.start() + except Exception as e: + logging.error("Error during server operations: %s", e) + finally: + self.server.close() + logging.info("Server shutdown") + def broadcast_status(self): - broadcast_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - broadcast_sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) - while True: - msg = "SERVER_AVAILABLE" - broadcast_sock.sendto(msg.encode(), ('', self.BROADCAST_PORT)) - time.sleep(60) + try: + broadcast_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + broadcast_sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + while not self.stop_event.is_set(): # Check if the stop event is set + msg = "SERVER_AVAILABLE" + broadcast_sock.sendto(msg.encode(), ('', self.BROADCAST_PORT)) + time.sleep(20) + except Exception as e: + logging.error("Error during broadcast: %s", e) + finally: + broadcast_sock.close() + logging.info("Broadcast stopped") + # CLIENT FUNCTIONS def get_mac_address(self): return get_mac_address() + def connect_to_server(self, server_ip): - client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - client.connect((server_ip, self.PORT)) - acf_rx = pickle.loads(client.recv(4096)) - index = str(np.where((self.acf_client == acf_rx).all(axis=1))[0][0]) - client.send(len(index).to_bytes(2, 'big')) # Send length of rx_index first - client.send(index.encode(self.FORMAT)) # Send rx_index - mac_address = self.get_mac_address() - client.send(len(mac_address).to_bytes(2, 'big')) # Send length of MAC address - client.send(mac_address.encode(self.FORMAT)) # Then send MAC address - client.close() - + """ + Connects to the server, receives a challenge (ACF value), + calculates the index of the received ACF value in the local acf_client table, + sends back this index and the MAC address to the server for authentication. + """ + try: + client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + client.connect((server_ip, self.PORT)) + # Receive the ACF challenge from the server + acf_rx = pickle.loads(client.recv(4096)) + # Calculate the index of the received ACF + index = str(np.where((self.acf_client == acf_rx).all(axis=1))[0][0]) + # Send the calculated index back to the server + index_bytes = index.encode(self.FORMAT) + client.sendall(len(index_bytes).to_bytes(2, 'big')) # Send length of rx_index first + client.sendall(index_bytes) # Send rx_index + # Retrieve the local MAC address and send it to the server + mac_address = self.get_mac_address() + mac_address_bytes = mac_address.encode(self.FORMAT) + client.sendall(len(mac_address_bytes).to_bytes(2, 'big')) # Send length of MAC address first + client.sendall(mac_address_bytes) # Then send MAC address + logging.info("Successfully sent index and MAC address to the server") + except Exception as e: + logging.error("Error during connection to server: %s", e) + finally: + client.close() + + def listen_for_broadcast(self): - listen_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - listen_sock.bind(('', self.BROADCAST_PORT)) - while True: - data, addr = listen_sock.recvfrom(1024) - if data.decode() == "SERVER_AVAILABLE" and addr[0] != self.SERVER: - self.connect_to_server(addr[0]) + try: + self.listen_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + self.listen_sock.bind(('', self.BROADCAST_PORT)) + self.listen_sock.settimeout(1) + logging.info("Listening for broadcasts") + while not self.stop_event.is_set(): + try: + data, addr = self.listen_sock.recvfrom(1024) + except socket.timeout: + continue + if data.decode() == "SERVER_AVAILABLE" and addr[0] != self.SERVER: + self.connect_to_server(addr[0]) + except Exception as e: + logging.error("Error during broadcast reception: %s", e) + finally: + self.listen_sock.close() + logging.info("Stopped listening for broadcasts") + # Common Functions def get_server_ip(self): @@ -140,5 +285,3 @@ def get_server_ip(self): s.close() return IP -if __name__ == "__main__": - phycra = PHYCRA() diff --git a/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/functional_regression_testcases.py b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/functional_regression_testcases.py new file mode 100644 index 000000000..dfdc66c8e --- /dev/null +++ b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/functional_regression_testcases.py @@ -0,0 +1,143 @@ +import pytest +import socket +from unittest.mock import patch, MagicMock, mock_open +from SP_CRA_mod import PHYCRA +import numpy as np + +@pytest.fixture +def phycra_instance(): + with patch('SP_CRA_mod.socket.socket'): + instance = PHYCRA() + instance.acf = MagicMock() + instance.acf_client = MagicMock() + yield instance + # Ensure proper cleanup + instance.server.close() + if hasattr(instance, 'shutdown_server'): + instance.shutdown_server() + instance.listen_sock.close() + +# Test for __init__ method +def test_init(phycra_instance): + assert phycra_instance.SERVER is not None + assert phycra_instance.BROADCAST_PORT == 5051 + assert phycra_instance.PORT == 5050 + +# Test for log_authentication method +def test_log_authentication(phycra_instance): + with patch('builtins.open', mock_open()) as mock_file: + phycra_instance.log_authentication('127.0.0.1', '00:00:00:00:00:00', 'Success') + mock_file.assert_called_with('server_log.txt', 'a') + mock_file().write.assert_called() + +# Test for display_table method +def test_display_table(phycra_instance, capsys): + with patch('builtins.open', mock_open(read_data='2023-11-03 12:00:00\t127.0.0.1\t00:00:00:00:00:00\tSuccess\n')): + phycra_instance.display_table() + captured = capsys.readouterr() + assert '127.0.0.1' in captured.out + assert '00:00:00:00:00:00' in captured.out + assert 'Success' in captured.out + +# Test for server_start method +def test_server_start(phycra_instance): + with patch('SP_CRA_mod.socket.socket') as mock_socket, \ + patch('SP_CRA_mod.threading.Thread') as mock_thread: + phycra_instance.server_start() + mock_socket.assert_called_with(socket.AF_INET, socket.SOCK_STREAM) + mock_thread.assert_called() + +# Test for broadcast_status method +def test_broadcast_status(phycra_instance): + with patch('SP_CRA_mod.socket.socket') as mock_socket: + phycra_instance.broadcast_status() + mock_socket.assert_called_with(socket.AF_INET, socket.SOCK_DGRAM) + +# Test for get_mac_address method +def test_get_mac_address(phycra_instance): + with patch('SP_CRA_mod.get_mac_address') as mock_get_mac: + mock_get_mac.return_value = '00:00:00:00:00:00' + assert phycra_instance.get_mac_address() == '00:00:00:00:00:00' + +# Test for connect_to_server method +def test_connect_to_server(phycra_instance): + with patch('SP_CRA_mod.socket.socket') as mock_socket, \ + patch('SP_CRA_mod.pickle.loads') as mock_pickle_loads: + mock_socket.return_value.recv.return_value = b'pickle_data' + mock_pickle_loads.return_value = np.array([1, 2, 3]) + phycra_instance.acf_client = np.array([[1, 2, 3]]) + phycra_instance.connect_to_server('127.0.0.1') + mock_socket.assert_called_with(socket.AF_INET, socket.SOCK_STREAM) + +# Test for listen_for_broadcast method +def test_listen_for_broadcast(phycra_instance): + with patch('SP_CRA_mod.socket.socket') as mock_socket, \ + patch('SP_CRA_mod.PHYCRA.connect_to_server') as mock_connect: + phycra_instance.listen_for_broadcast() + mock_socket.assert_called_with(socket.AF_INET, socket.SOCK_DGRAM) + mock_connect.assert_not_called() + +# Test for get_server_ip method +def test_get_server_ip(phycra_instance): + with patch('SP_CRA_mod.socket.socket') as mock_socket: + mock_socket.return_value.getsockname.return_value = ('127.0.0.1', 0) + assert phycra_instance.get_server_ip() == '127.0.0.1' + +@patch('SP_CRA_mod.socket.socket') +def test_handle_client_success(mock_socket, phycra_instance): + # Set up + conn = MagicMock() + addr = ('192.168.1.1', 5050) + phycra_instance.acf = ['dummy_acf'] + random_index = 0 + + with patch('SP_CRA_mod.random.randint', return_value=random_index), \ + patch('SP_CRA_mod.pickle.dumps', return_value=b'encoded_acf'), \ + patch('SP_CRA_mod.PHYCRA.log_authentication') as mock_log_auth: + + conn.recv.side_effect = [ + (2).to_bytes(2, 'big'), # rx_index_length + str(random_index).encode(phycra_instance.FORMAT), # rx_index + (2).to_bytes(2, 'big'), # mac_address_length + 'AA:BB:CC:DD:EE:FF'.encode(phycra_instance.FORMAT) # mac_address + ] + + # Invoke + phycra_instance.handle_client(conn, addr) + + # Check + conn.sendall.assert_called_with(b'encoded_acf') + mock_log_auth.assert_called_with(addr[0], 'AA:BB:CC:DD:EE:FF', "Success") + +@patch('SP_CRA_mod.socket.socket') +def test_handle_client_fail(mock_socket, phycra_instance): + # Set up + conn = MagicMock() + addr = ('192.168.1.1', 5050) + phycra_instance.acf = ['dummy_acf'] + random_index = 0 + wrong_index = 1 # Different index to simulate failure + + with patch('SP_CRA_mod.random.randint', return_value=random_index), \ + patch('SP_CRA_mod.pickle.dumps', return_value=b'encoded_acf'), \ + patch('SP_CRA_mod.PHYCRA.log_authentication') as mock_log_auth: + + conn.recv.side_effect = [ + (2).to_bytes(2, 'big'), # rx_index_length + str(wrong_index).encode(phycra_instance.FORMAT), # rx_index + (2).to_bytes(2, 'big'), # mac_address_length + 'AA:BB:CC:DD:EE:FF'.encode(phycra_instance.FORMAT) # mac_address + ] + + # Invoke + phycra_instance.handle_client(conn, addr) + + # Check + conn.sendall.assert_called_with(b'encoded_acf') + mock_log_auth.assert_called_with(addr[0], 'AA:BB:CC:DD:EE:FF', "Access denied") + +# Main function to run the tests +if __name__ == '__main__': + options = ['-v', '-rA'] + pytest.main(options) + diff --git a/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/performance_testcases.py b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/performance_testcases.py new file mode 100644 index 000000000..725fc62c5 --- /dev/null +++ b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/performance_testcases.py @@ -0,0 +1,66 @@ +import threading +import time +import psutil +from SP_CRA_v7 import PHYCRA # SP_CRA_v7 is the main script/implementation + +class PerformanceTest: + def __init__(self, num_clients): + self.num_clients = num_clients + self.phycra_instance = PHYCRA() + self.client_threads = [] + self.start_time = None + self.end_time = None + self.cpu_usage = [] + self.memory_usage = [] + self.network_usage_start = psutil.net_io_counters() + self.network_usage_end = None + + def run_test(self): + self.start_time = time.time() + for _ in range(self.num_clients): + client_thread = threading.Thread(target=self.simulate_client) + self.client_threads.append(client_thread) + client_thread.start() + + # Monitor system resources in a separate thread + monitor_thread = threading.Thread(target=self.monitor_resources) + monitor_thread.start() + + for thread in self.client_threads: + thread.join() + + self.network_usage_end = psutil.net_io_counters() + self.end_time = time.time() + monitor_thread.join() + self.phycra_instance.stop_event.set() + self.phycra_instance.server_thread.join() + self.phycra_instance.listen_thread.join() + self.phycra_instance.broadcast_thread.join() + self.display_results() + + def simulate_client(self): + self.phycra_instance.connect_to_server(self.phycra_instance.SERVER) + + def monitor_resources(self): + while any(thread.is_alive() for thread in self.client_threads): + self.cpu_usage.append(psutil.cpu_percent(interval=1)) + self.memory_usage.append(psutil.virtual_memory().percent) + + def display_results(self): + total_time = self.end_time - self.start_time + avg_cpu_usage = sum(self.cpu_usage) / len(self.cpu_usage) + avg_memory_usage = sum(self.memory_usage) / len(self.memory_usage) + sent_bytes = self.network_usage_end.bytes_sent - self.network_usage_start.bytes_sent + recv_bytes = self.network_usage_end.bytes_recv - self.network_usage_start.bytes_recv + print(f"Total time for {self.num_clients} clients: {total_time:.2f} seconds") + print(f"Average time per client: {total_time / self.num_clients:.2f} seconds") + print(f"Average CPU usage during the test: {avg_cpu_usage:.2f}%") + print(f"Average memory usage during the test: {avg_memory_usage:.2f}%") + print(f"Total data sent: {sent_bytes} bytes") + print(f"Total data received: {recv_bytes} bytes") + +if __name__ == "__main__": + num_clients = 10 # Change this to the number of simulated clients to be tested + performance_test = PerformanceTest(num_clients) + performance_test.run_test() + diff --git a/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/security_testcases_file1.py b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/security_testcases_file1.py new file mode 100644 index 000000000..74b1684d0 --- /dev/null +++ b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/security_testcases_file1.py @@ -0,0 +1,42 @@ +import pytest +from unittest.mock import MagicMock +import numpy as np +from SP_CRA_v7 import PHYCRA + +@pytest.fixture +def phycra_instance(): + phycra = PHYCRA() + phycra.acf = np.array([[1, 2, 3], [4, 5, 6]]) # Simplified ACF for testing + phycra.acf_client = phycra.acf # Ensure client and server ACF are the same + return phycra + +def test_handle_client_success(phycra_instance): + conn = MagicMock() + addr = ("127.0.0.1", 5050) + phycra_instance.handle_client(conn, addr) + assert conn.close.called + +def test_handle_client_failure(phycra_instance): + # Simulate a client sending a wrong index + conn = MagicMock() + conn.recv.side_effect = [b'\x00\x01', b'1', b'\x00\x0c', b'fake-mac-address'] + addr = ("127.0.0.1", 5050) + phycra_instance.handle_client(conn, addr) + assert conn.close.called + +def test_handle_client_exception(phycra_instance): + # Simulate an exception during client handling + conn = MagicMock() + conn.recv.side_effect = Exception("Test Exception") + addr = ("127.0.0.1", 5050) + phycra_instance.handle_client(conn, addr) + assert conn.close.called + +def test_handle_client_disconnection(phycra_instance): + # Simulate a client disconnecting unexpectedly + conn = MagicMock() + conn.recv.side_effect = [b'\x00\x01', b'0', b'\x00\x0c', ConnectionResetError("Client disconnected")] + addr = ("127.0.0.1", 5050) + phycra_instance.handle_client(conn, addr) + assert conn.close.called + diff --git a/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/security_testcases_file2.py b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/security_testcases_file2.py new file mode 100644 index 000000000..127df6518 --- /dev/null +++ b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/PHY_CRA_scripts/security_testcases_file2.py @@ -0,0 +1,39 @@ +import pytest +import socket +from unittest.mock import MagicMock, patch, call +from SP_CRA_v7 import PHYCRA + +@pytest.fixture +def phycra_instance(): + instance = PHYCRA() + instance.server_thread = MagicMock() + instance.listen_thread = MagicMock() + instance.broadcast_thread = MagicMock() + instance.stop_event = MagicMock() + instance.server = MagicMock() + # Mock the broadcast_sock if it doesn't exist in PHYCRA + instance.broadcast_sock = MagicMock() + return instance + +# Mocking incorrect index transmission +def test_incorrect_index_transmission(phycra_instance): + with patch('socket.socket'): + conn = MagicMock() + phycra_instance.acf = MagicMock() + # Simulate receiving a malformed packet + conn.recv.return_value = b'\x00\x00\x00' + phycra_instance.handle_client(conn, ('127.0.0.1', 5050)) + conn.send.assert_not_called() # Assuming it shouldn't send anything on error + +# Basic security test cases +def test_replay_attack_prevention(phycra_instance): + with patch('socket.socket'): + conn = MagicMock() + addr = ('127.0.0.1', 5050) + phycra_instance.acf = MagicMock() + phycra_instance.acf.check_validity = MagicMock(return_value=False) + phycra_instance.handle_client(conn, addr) + conn.send.assert_not_called() # Assuming it shouldn't send anything if replay attack detected + + + diff --git a/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/F_RSS_Auth.py b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/F_RSS_Auth.py new file mode 100755 index 000000000..f5f921eca --- /dev/null +++ b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/F_RSS_Auth.py @@ -0,0 +1,181 @@ +import csv +import threading +import subprocess +import time +from datetime import datetime +from tabulate import tabulate +import os + +class RSS_Auth: + def __init__(self): + self.default_macs = [] + self.current_macs = [] + self.rssi_avg = [] + self.updated_rssi_avg = [] + self.threshold = 4 + self.process = None + self.results = {'Pass': [], 'Fail': []} + # Hardcoded script and CSV file paths + self.start_script = "./features/PHY/RSS_auth/F_rssi_capture.py" + self.mac_address_file = "/tmp/mac_addresses.csv" + self.rssi_file = "/tmp/output_processed.csv" + + def start(self): + """Starts the RSSI capturing process in a separate thread.""" + # Starts the RSSI capturing script as a subprocess + self.process = subprocess.Popen(["python3", self.start_script], stdout=subprocess.PIPE) + + # Start the run method in a separate thread + self.thread = threading.Thread(target=self.run) + self.thread.start() + + def stop(self): + """Stops the RSSI capturing process.""" + if self.thread and self.thread.is_alive(): + # Set a flag here to stop the run method loop if necessary + self.thread.join() + if self.process: + self.process.terminate() + self.process.wait() + self.process = None + + def start_rssi_capture(self): + """Starts the RSSI capturing script as a subprocess.""" + self.process = subprocess.Popen(["python3", self.start_script], stdout=subprocess.PIPE) + + def stop_rssi_capture(self): + """Stops the RSSI capturing script.""" + if self.process: + self.process.terminate() + self.process = None + + def log_authentication(self, mac, result): + log_file_path = './authentication_log.csv' + """Log the authentication result to a CSV file.""" + try: + with open('authentication_log.csv', mode='a', newline='') as file: + writer = csv.writer(file) + # Write headers if file is empty + if file.tell() == 0: + writer.writerow(['Date/Time', 'MAC Address', 'Result']) + # Write the authentication result + writer.writerow([datetime.now().strftime('%Y-%m-%d %H:%M:%S'), mac, result]) + except Exception as e: + print(f"An error occurred while logging: {e}") + + def load_mac_addresses(self): + macs = [] + try: + with open(self.mac_address_file, 'r') as csv_file: + reader = csv.reader(csv_file) + next(reader) # Skip header + for row in reader: + if row: # Skip empty rows + macs.append(row) + except FileNotFoundError: + print(f"File {self.mac_address_file} not found.") + except Exception as e: + print(f"An error occurred: {e}") + return macs + + def print_macs(self, macs, description): + # Printing MAC addresses for demonstration + print(f"\n{description}") + print(tabulate(macs, headers=['MAC Addresses'])) + + def load_rssi_values(self): + rssi_values = [] + try: + with open(self.rssi_file, 'r') as csv_file: + reader = csv.reader(csv_file) + for row in reader: + rssi_values.append(row) + except FileNotFoundError: + print(f"File {self.rssi_file} not found.") + except Exception as e: + print(f"An error occurred: {e}") + return rssi_values + + def calculate_average_rssi(self, rssi_values): + averages = [] + for row in rssi_values: + values = [float(val) for val in row if self.is_float(val)] + if values: + averages.append(sum(values) / len(values)) + return averages + + @staticmethod + def is_float(value): + try: + float(value) + return True + except ValueError: + return False + + def authenticate_nodes(self): + additional_macs = set(tuple(mac) for mac in self.current_macs) - set(tuple(mac) for mac in self.default_macs) + missing_macs = set(tuple(mac) for mac in self.default_macs) - set(tuple(mac) for mac in self.current_macs) + validated_nodes = set() + intruder_nodes = set() + + for i, mac in enumerate(self.current_macs): + mac_tuple = tuple(mac) # Convert list to tuple for comparison + if mac_tuple in additional_macs: + print(f"New node detected with MAC {mac}, Checks needed.") + self.log_authentication('-'.join(mac), 'Fail') + intruder_nodes.add(mac_tuple) + elif mac_tuple in missing_macs: + print(f"Node with MAC {mac} has left the network.") + self.log_authentication('-'.join(mac), 'Left') + else: + if abs(self.updated_rssi_avg[i] - self.rssi_avg[i]) <= self.threshold: + validated_nodes.add(mac_tuple) + self.log_authentication('-'.join(mac), 'Pass') + else: + print(f"Node with MAC {mac} has different RSS: Checks required.") + self.log_authentication('-'.join(mac), 'Fail') + intruder_nodes.add(mac_tuple) + + # Update results without unpacking tuple + self.results['Pass'] = [list(mac) for mac in validated_nodes] + self.results['Fail'] = [list(mac) for mac in intruder_nodes] + return validated_nodes, intruder_nodes + + + def get_result(self): + """ + Returns the latest results of the RSS authentication process. + """ + # Return a copy of the results to avoid unintentional modifications + return self.results.copy() + + + def run(self): + self.default_macs = self.load_mac_addresses(self.mac_address_file) + self.print_macs(self.default_macs, "Default MAC Addresses:") + default_rssi_values = self.load_rssi_values(self.rssi_file) + self.rssi_avg = self.calculate_average_rssi(default_rssi_values) + + while True: # Continuous monitoring loop + self.current_macs = self.load_mac_addresses(self.mac_address_file) + self.print_macs(self.current_macs, "Updated MAC Addresses:") + updated_rssi_values = self.load_rssi_values(self.rssi_file) + self.updated_rssi_avg = self.calculate_average_rssi(updated_rssi_values) + + validated_nodes, intruder_nodes = self.authenticate_nodes() + print("Validated Nodes:", validated_nodes) + print("Intruder Nodes:", intruder_nodes) + + time.sleep(5) # The sleep time can be adjusted as needed + +# The 'if __name__ == "__main__":' block is omitted as it's not needed for module use + + +#if __name__ == "__main__": + # Example usage + # rss_auth = RSS_Auth("F_RSSI_Capture_v1.py", "mac_addresses.csv", "output_processed.csv") + #rss_auth.start() + # Assume the script runs for a certain period or until a condition is met + #time.sleep(60) # Example: run for 60 seconds + #rss_auth.stop() + diff --git a/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/F_mon_rssi.py b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/F_mon_rssi.py new file mode 100644 index 000000000..450dcf361 --- /dev/null +++ b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/F_mon_rssi.py @@ -0,0 +1,79 @@ +import argparse +import subprocess +import sys +import csv +import datetime +import time +from time import sleep +from threading import Thread +from getmac import get_mac_address +from netaddr import * +import yaml +import os + +rssi_values = [] + +def get_mac_oui(): + mac = EUI(get_mac_address(interface)) + oui = mac.oui + print(oui.registration().address) + return oui + +def get_rssi(): + global interface + cmd = "iw dev " + interface + " station dump | grep 'signal:' | awk '{print $2}'" + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True) + rssi = proc.communicate()[0].decode('utf-8') + return rssi + +def log_rssi(): + global rssi_mon_interval + global debug + flag_file_path = '/tmp/run_flag.txt' # Path to the flag file + + fn_suffix = datetime.datetime.now().strftime('%m_%d_%Y_%H_%M_%S') + log_file_path = './' + log_file_name = 'rssi' + fn_suffix + '.csv' + header_written = False + + while os.path.exists(flag_file_path): # Check if the flag file exists + rssi_sta = get_rssi() + if debug: + print(rssi_sta) + + with open(log_file_path + log_file_name, 'a', newline='') as csvfile: + writer = csv.writer(csvfile) + if not header_written: + writer.writerow(['Timestamp', 'RSSI']) # Write the header + header_written = True + writer.writerow([time.time(), rssi_sta]) + + sleep(rssi_mon_interval) + + print("RSSI capturing stopped.") + +if __name__ == '__main__': + # Construct the argument parser + phy_cfg = argparse.ArgumentParser() + + # Add the arguments to the parser + phy_cfg.add_argument("-r", "--rssi_period", required=True, help="RSSI monitoring period Ex: 5 (equals to 5 sec)") + phy_cfg.add_argument("-i", "--interface", required=True) + args = phy_cfg.parse_args() + + # Get the physical parameter monitoring configuration + print('> Loading yaml conf... ') + conf = yaml.safe_load(open("phy_param.conf", 'r')) + debug = conf['debug'] + rssi_mon_interval = conf['rssi_poll_interval'] + interface = conf['interface'] + capture_rssi = conf['rssi'] + + # Populate args + rssi_mon_interval = int(args.rssi_period) + interface = args.interface + + # Capture RSSI if enabled in config file + if capture_rssi: + Thread(target=log_rssi).start() + diff --git a/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/F_rssi_capture.py b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/F_rssi_capture.py new file mode 100644 index 000000000..3751876e6 --- /dev/null +++ b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/F_rssi_capture.py @@ -0,0 +1,77 @@ +import subprocess +import time +import csv +import os +import glob + +def execute_script(script_name, args=None): + """Execute a script with optional arguments.""" + if args is None: + args = [] + subprocess.Popen(["python3", script_name] + args) + +def find_latest_csv(pattern): + """Find the latest CSV file matching a given pattern.""" + csv_files = glob.glob('/tmp/' + pattern) + if not csv_files: + print("No CSV file found.") + return None + return max(csv_files, key=os.path.getmtime) + +def process_csv(input_file, output_file): + """Process CSV file and write to output.""" + + input_path = os.path.join('./', input_file) + output_path = os.path.join('./', output_file) + with open(input_path, 'r') as csv_in, open(output_path, 'w', newline='') as csv_out: + reader = csv.reader(csv_in) + writer = csv.writer(csv_out) + next(reader) # Skip the header row + + transposed_rows = list(map(list, zip(*(extract_values(row) for row in reader)))) + writer.writerows(transposed_rows) + +def extract_values(row): + """Extract and process values from a row.""" + values_str = row[1] + values_list = [value.strip() for value in values_str.split('\n') if value.strip()] + return [int(value) for value in values_list if is_valid_number(value)] + +def is_valid_number(value): + """Check if a string represents a valid number.""" + return value.isdigit() or (value.startswith('-') and value[1:].isdigit()) + +def create_flag_file(): + """Create a flag file to signal the scripts to continue.""" + open('/tmp/run_flag.txt', 'w').close() + +def delete_flag_file(): + """Delete the flag file to signal the scripts to stop.""" + if os.path.exists('/tmp/run_flag.txt'): + os.remove('/tmp/run_flag.txt') + +def main(): + create_flag_file() + + # Start mon_rssi.py for continuous RSSI monitoring + execute_script("F_mon_rssi.py", ["-i", "wlp1s0", "-r", "1"]) + + try: + while True: + # Periodically execute bat_mac_list_v1.py + execute_script("bat_mac_list_v1.py") + time.sleep(10) # Adjust the sleep time as needed for periodic execution + + # Periodically check and process new CSV files + latest_csv_file = find_latest_csv('rssi*.csv') + if latest_csv_file: + process_csv(latest_csv_file, 'output_processed.csv') + time.sleep(5) # Adjust the sleep time as needed for processing + except KeyboardInterrupt: + print("Stopping scripts and processing...") + finally: + delete_flag_file() + +if __name__ == "__main__": + main() + diff --git a/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/bat_mac_list_v1.py b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/bat_mac_list_v1.py new file mode 100644 index 000000000..dbecd5b19 --- /dev/null +++ b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/bat_mac_list_v1.py @@ -0,0 +1,49 @@ +import subprocess +import re +import csv + +def get_mac_addresses(): + try: + # Execute the 'batctl n' command and decode the output + output = subprocess.check_output(['batctl', 'n']).decode('utf-8') + except subprocess.CalledProcessError as e: + print(f"Error executing batctl command: {e}") + return [] + except Exception as e: + print(f"An unexpected error occurred: {e}") + return [] + + lines = output.split('\n') + mac_addresses = [] + + for line in lines: + # Search for MAC addresses in each line + match = re.search(r'\s+(\w{2}:\w{2}:\w{2}:\w{2}:\w{2}:\w{2})\s+', line) + if match: + mac_address = match.group(1) + mac_addresses.append(mac_address) + + return mac_addresses + +def save_to_csv(mac_addresses, filename): + try: + full_path = filename # Assuming full path is provided + with open(full_path, 'w', newline='') as csvfile: + writer = csv.writer(csvfile) + writer.writerow(['MAC Address']) + for mac_address in mac_addresses: + writer.writerow([mac_address]) + except IOError as e: + print(f"Error writing to file: {e}") + +# Retrieve MAC addresses and save them to a CSV file +try: + mac_addresses = get_mac_addresses() + if mac_addresses: + save_to_csv(mac_addresses, './mac_addresses.csv') + else: + print("No MAC addresses found.") +except Exception as e: + print(f"An error occurred: {e}") + + diff --git a/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/phy_param.conf b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/phy_param.conf new file mode 100644 index 000000000..a1ae0764e --- /dev/null +++ b/modules/sc-mesh-secure-deployment/src/2_0/features/PHY/RSS_auth/phy_param.conf @@ -0,0 +1,20 @@ +--- +debug: True +#Supported formats are "nexmom", "esp", "ath" +csi: False +csi_format: +rssi: True +rssi_avg: False +rssi_poll_interval: 5 +#csi or rssi monitoring interface +interface: "wlan1" +#source mac addr of the rx frame +mac_addr_filter: "00:30:1a:4f:8d:2c" +#monitoring channel +channel: 36 +#bandwidth of the monitoring link, ex: 20/40/80 +bandwidth: 80 +# serial port config is only applicable for ESP CSI format +serial_port: "ttyUSB0" +csi_max_records: 5 + diff --git a/modules/sc-mesh-secure-deployment/src/2_0/main.py b/modules/sc-mesh-secure-deployment/src/2_0/main.py old mode 100644 new mode 100755 index e69de29bb..924e39942 --- a/modules/sc-mesh-secure-deployment/src/2_0/main.py +++ b/modules/sc-mesh-secure-deployment/src/2_0/main.py @@ -0,0 +1,105 @@ +import yaml +import threading +import time + +# Import classes for features +from features.PHY.PHY_CRA_scripts.SP_CRA_mainDE1 import PHYCRA +from features.PHY.RSS_auth.F_RSS_Auth import RSS_Auth +# from features.IDS.IDS import IDS +# from features.jamming.jamming import Jamming + +def launch_PHY(): + # Place holder to launch PHY + phycra = PHYCRA() + phycra.start() + return phycra + +def launch_RSS(): + # Place holder to launch RSS + rss_authen = RSS_Auth() + rss_authen.start() + return rss_authen + +def launch_IDS(): + # Place holder to launch IDS + # Currently, this function does nothing + pass + +def launch_jamming(): + # Place holder to launch jamming + # Currently, this function does nothing + pass + +def stop_PHY(phycra): + phycra.stop() + +def stop_RSS(rss_authn): + rss_authn.stop() + +def stop_jamming(jamming): + jamming.stop() + +def stop_IDS(ids): + ids.stop() + +def launch_decision_engine(sensors): + # Place holder to launch decision engine + collected_data = {} + # decision_engine = DecisionEngine(sensors) + + # Periodically gets results from security sensors + while True: + print("Executing decision engine with collected data: ") + for sensor_name in sensors: + sensor_data = sensors[sensor_name].get_result() + if sensor_data: # Check if there is new data + collected_data[sensor_name] = sensor_data + else: + collected_data[sensor_name] = {'Pass': [], 'Fail': []} # Default to empty lists if no new data + + for sensor_name in collected_data: + print(sensor_name) + print(collected_data) + + + # Process data with the decision engine + #decisions = decision_engine.make_decision(collected_data) + + # Place holder to call quarantine/ MBA if necessary + + time.sleep(40) # Period can be adjusted + + + +def readfile(): + with open("features.yaml", "r") as stream: + try: + return yaml.safe_load(stream) + except yaml.YAMLError as exc: + print(exc) + return None + +def initialize(feature): + if feature == 'PHY': + phycra = launch_PHY() + sensors[feature] = phycra + if feature == 'RSS': + rss_authen = launch_RSS() + sensors[feature] = rss_authen + if feature == 'IDS': + ids = launch_IDS() + sensors[feature] = ids + if feature == 'jamming': + launch_jamming() + +if __name__ == "__main__": + features = readfile() + sensors = {} # Stores sensor_name: sensor_object for each sensor/ feature that publishes results for decision engine + + # Start features that are true in features.yaml + for feature in features: + if features[feature]: + initialize(feature) + + # Call decision engine + launch_decision_engine(sensors)