Skip to content

Commit

Permalink
Moved loss function comparison back to logit vs onehot labels
Browse files Browse the repository at this point in the history
  • Loading branch information
advaithsrao committed Nov 26, 2023
1 parent f6b00b1 commit 4252b71
Showing 1 changed file with 22 additions and 22 deletions.
44 changes: 22 additions & 22 deletions detector/modeler.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,17 +153,17 @@ def train(
# Forward pass
outputs = self.model(b_input_ids, attention_mask=b_input_mask, labels=b_labels)
logits = outputs.logits

# sigmoid_output = torch.sigmoid(logits[:, 1])

sigmoid_output = torch.sigmoid(logits[:, 1])

# Thresholding to convert probabilities to binary values (0 or 1)
binary_output = (sigmoid_output > 0.5)
# # Thresholding to convert probabilities to binary values (0 or 1)
# binary_output = (sigmoid_output > 0.5)

# # Convert labels to one-hot encoding
# b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float()
b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float()

# Calculate the loss using the weighted loss function
loss = loss_function(binary_output, b_labels.float())
loss = loss_function(logits, b_labels_one_hot)
total_train_loss += loss.item()

# Backward pass
Expand Down Expand Up @@ -197,16 +197,16 @@ def train(
# loss = outputs[0]
logits = outputs.logits

sigmoid_output = torch.sigmoid(logits[:, 1])
# sigmoid_output = torch.sigmoid(logits[:, 1])

# Thresholding to convert probabilities to binary values (0 or 1)
binary_output = (sigmoid_output > 0.5)
# # Thresholding to convert probabilities to binary values (0 or 1)
# binary_output = (sigmoid_output > 0.5)

# # Convert labels to one-hot encoding
# b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float()
b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float()

# Calculate the loss using the weighted loss function
loss = loss_function(binary_output, b_labels.float())
loss = loss_function(logits, b_labels_one_hot)
total_eval_loss += loss.item()
logits = logits.detach().cpu().numpy()
label_ids = b_labels.detach().cpu().numpy()
Expand Down Expand Up @@ -460,17 +460,17 @@ def train(
# Forward pass
outputs = self.model(b_input_ids, attention_mask=b_input_mask, labels=b_labels)
logits = outputs.logits

# sigmoid_output = torch.sigmoid(logits[:, 1])

sigmoid_output = torch.sigmoid(logits[:, 1])

# Thresholding to convert probabilities to binary values (0 or 1)
binary_output = (sigmoid_output > 0.5)
# # Thresholding to convert probabilities to binary values (0 or 1)
# binary_output = (sigmoid_output > 0.5)

# # Convert labels to one-hot encoding
# b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float()
b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float()

# Calculate the loss using the weighted loss function
loss = loss_function(binary_output, b_labels.float())
loss = loss_function(logits, b_labels_one_hot)

total_train_loss += loss.item()

Expand Down Expand Up @@ -504,16 +504,16 @@ def train(
outputs = self.model(b_input_ids, attention_mask=b_input_mask, labels=b_labels)
logits = outputs.logits

sigmoid_output = torch.sigmoid(logits[:, 1])
# sigmoid_output = torch.sigmoid(logits[:, 1])

# Thresholding to convert probabilities to binary values (0 or 1)
binary_output = (sigmoid_output > 0.5)
# # Thresholding to convert probabilities to binary values (0 or 1)
# binary_output = (sigmoid_output > 0.5)

# # Convert labels to one-hot encoding
# b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float()
b_labels_one_hot = F.one_hot(b_labels, num_classes=2).float()

# Calculate the loss using the weighted loss function
loss = loss_function(binary_output, b_labels.float())
loss = loss_function(logits, b_labels_one_hot)
total_eval_loss += loss.item()
logits = logits.detach().cpu().numpy()
label_ids = b_labels.detach().cpu().numpy()
Expand Down

0 comments on commit 4252b71

Please sign in to comment.