From 4252b718d23d0d6ccf5893686acf9146ab3d0afe Mon Sep 17 00:00:00 2001 From: Advaith Rao Date: Sun, 26 Nov 2023 01:17:46 -0500 Subject: [PATCH] Moved loss function comparison back to logit vs onehot labels --- detector/modeler.py | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/detector/modeler.py b/detector/modeler.py index cb00c73..19b55c9 100644 --- a/detector/modeler.py +++ b/detector/modeler.py @@ -153,17 +153,17 @@ def train( # Forward pass outputs = self.model(b_input_ids, attention_mask=b_input_mask, labels=b_labels) logits = outputs.logits + + # sigmoid_output = torch.sigmoid(logits[:, 1]) - sigmoid_output = torch.sigmoid(logits[:, 1]) - - # Thresholding to convert probabilities to binary values (0 or 1) - binary_output = (sigmoid_output > 0.5) + # # Thresholding to convert probabilities to binary values (0 or 1) + # binary_output = (sigmoid_output > 0.5) # # Convert labels to one-hot encoding - # b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float() + b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float() # Calculate the loss using the weighted loss function - loss = loss_function(binary_output, b_labels.float()) + loss = loss_function(logits, b_labels_one_hot) total_train_loss += loss.item() # Backward pass @@ -197,16 +197,16 @@ def train( # loss = outputs[0] logits = outputs.logits - sigmoid_output = torch.sigmoid(logits[:, 1]) + # sigmoid_output = torch.sigmoid(logits[:, 1]) - # Thresholding to convert probabilities to binary values (0 or 1) - binary_output = (sigmoid_output > 0.5) + # # Thresholding to convert probabilities to binary values (0 or 1) + # binary_output = (sigmoid_output > 0.5) # # Convert labels to one-hot encoding - # b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float() + b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float() # Calculate the loss using the weighted loss function - loss = loss_function(binary_output, b_labels.float()) + loss = loss_function(logits, b_labels_one_hot) total_eval_loss += loss.item() logits = logits.detach().cpu().numpy() label_ids = b_labels.detach().cpu().numpy() @@ -460,17 +460,17 @@ def train( # Forward pass outputs = self.model(b_input_ids, attention_mask=b_input_mask, labels=b_labels) logits = outputs.logits + + # sigmoid_output = torch.sigmoid(logits[:, 1]) - sigmoid_output = torch.sigmoid(logits[:, 1]) - - # Thresholding to convert probabilities to binary values (0 or 1) - binary_output = (sigmoid_output > 0.5) + # # Thresholding to convert probabilities to binary values (0 or 1) + # binary_output = (sigmoid_output > 0.5) # # Convert labels to one-hot encoding - # b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float() + b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float() # Calculate the loss using the weighted loss function - loss = loss_function(binary_output, b_labels.float()) + loss = loss_function(logits, b_labels_one_hot) total_train_loss += loss.item() @@ -504,16 +504,16 @@ def train( outputs = self.model(b_input_ids, attention_mask=b_input_mask, labels=b_labels) logits = outputs.logits - sigmoid_output = torch.sigmoid(logits[:, 1]) + # sigmoid_output = torch.sigmoid(logits[:, 1]) - # Thresholding to convert probabilities to binary values (0 or 1) - binary_output = (sigmoid_output > 0.5) + # # Thresholding to convert probabilities to binary values (0 or 1) + # binary_output = (sigmoid_output > 0.5) # # Convert labels to one-hot encoding - # b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float() + b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float() # Calculate the loss using the weighted loss function - loss = loss_function(binary_output, b_labels.float()) + loss = loss_function(logits, b_labels_one_hot) total_eval_loss += loss.item() logits = logits.detach().cpu().numpy() label_ids = b_labels.detach().cpu().numpy()