Skip to content

Commit

Permalink
Rename customized reset_parameters to init_weights (Fix #123)
Browse files Browse the repository at this point in the history
  • Loading branch information
xpai committed Nov 5, 2024
1 parent 7b98a49 commit e0d5495
Show file tree
Hide file tree
Showing 8 changed files with 23 additions and 19 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@
[Doing] Add support for saving pb file, exporting embeddings
[Doing] Add support of multi-gpu training

**FuxiCTR v2.3.4, 2024-11-05**
+ [Fix] Fix OOV token update ([#119](https://github.com/reczoo/FuxiCTR/issues/119))
+ [Fix] Speed up parquet dataset reading ([#121](https://github.com/reczoo/FuxiCTR/issues/121))
+ [Fix] Fix add_loss() isue that does not work after renaming to compute_loss() ([#122](https://github.com/reczoo/FuxiCTR/issues/122))
+ [Fix] Rename customized reset_parameters to init_weights ([#123](https://github.com/reczoo/FuxiCTR/issues/123))

**FuxiCTR v2.3.3, 2024-10-14**
+ [Feature] Add EulerNet and DCNv3 models
+ [Feature] Add support to parquet as input, like csv format
Expand Down
6 changes: 3 additions & 3 deletions fuxictr/pytorch/layers/embeddings/feature_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def __init__(self,
padding_idx=padding_idx)
elif feature_spec["type"] == "embedding":
self.embedding_layers[feature] = nn.Identity()
self.reset_parameters()
self.init_weights()

def get_feature_encoder(self, encoder):
try:
Expand All @@ -127,13 +127,13 @@ def get_feature_encoder(self, encoder):
except:
raise ValueError("feature_encoder={} is not supported.".format(encoder))

def reset_parameters(self):
def init_weights(self):
embedding_initializer = get_initializer(self.embedding_initializer)
for k, v in self.embedding_layers.items():
if "share_embedding" in self._feature_map.features[k]:
continue
if type(v) == PretrainedEmbedding: # skip pretrained
v.reset_parameters(embedding_initializer)
v.init_weights(embedding_initializer)
elif type(v) == nn.Embedding:
if v.padding_idx is not None:
embedding_initializer(v.weight[1:, :]) # set padding_idx to zero
Expand Down
2 changes: 1 addition & 1 deletion fuxictr/pytorch/layers/embeddings/pretrained_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def __init__(self,
if pretrain_usage == "concat":
self.proj = nn.Linear(pretrain_dim + embedding_dim, embedding_dim, bias=False)

def reset_parameters(self, embedding_initializer):
def init_weights(self, embedding_initializer):
if self.pretrain_usage in ["sum", "concat"]:
nn.init.zeros_(self.id_embedding.weight) # set oov token embeddings to zeros
embedding_initializer(self.id_embedding.weight[1:self.oov_idx, :])
Expand Down
8 changes: 4 additions & 4 deletions fuxictr/pytorch/layers/interactions/bilinear_interaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ def __init__(self, num_fields, embedding_dim, bilinear_type="field_interaction")
self.bilinear_W = nn.Parameter(torch.Tensor(self.interact_dim, embedding_dim, embedding_dim))
else:
raise NotImplementedError
self.reset_parameters()
self.init_weights()

def reset_parameters(self):
def init_weights(self):
nn.init.xavier_normal_(self.bilinear_W)

def forward(self, feature_emb):
Expand Down Expand Up @@ -70,9 +70,9 @@ def __init__(self, num_fields, embedding_dim, bilinear_type="field_interaction")
else:
raise NotImplementedError
self.triu_index = nn.Parameter(torch.triu_indices(num_fields, num_fields, offset=1), requires_grad=False)
self.reset_parameters()
self.init_weights()

def reset_parameters(self):
def init_weights(self):
nn.init.xavier_normal_(self.bilinear_W)

def forward(self, feature_emb):
Expand Down
14 changes: 7 additions & 7 deletions fuxictr/pytorch/models/rank_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,18 +92,18 @@ def compute_loss(self, return_dict, y_true):
return loss

def reset_parameters(self):
def reset_default_params(m):
def default_reset_params(m):
# initialize nn.Linear/nn.Conv1d layers by default
if type(m) in [nn.Linear, nn.Conv1d]:
nn.init.xavier_normal_(m.weight)
if m.bias is not None:
m.bias.data.fill_(0)
def reset_custom_params(m):
# initialize layers with customized reset_parameters
if hasattr(m, 'reset_custom_params'):
m.reset_custom_params()
self.apply(reset_default_params)
self.apply(reset_custom_params)
def custom_reset_params(m):
# initialize layers with customized init_weights()
if hasattr(m, 'init_weights'):
m.init_weights()
self.apply(default_reset_params)
self.apply(custom_reset_params)

def get_inputs(self, inputs, feature_source=None):
X_dict = dict()
Expand Down
2 changes: 0 additions & 2 deletions model_zoo/APG/config/model_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -151,5 +151,3 @@ APG_DCNv2: # This is a config template
seed: 2019
monitor: {'AUC': 1, 'logloss': -1}
monitor_mode: 'max'


2 changes: 1 addition & 1 deletion model_zoo/FinalNet/src/FinalNet.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def __init__(self, num_fields, gate_residual="concat"):
assert gate_residual in ["concat", "sum"]
self.gate_residual = gate_residual

def reset_custom_params(self):
def init_weights(self):
nn.init.zeros_(self.linear.weight)
nn.init.ones_(self.linear.bias)

Expand Down
2 changes: 1 addition & 1 deletion tests/test_torch.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ home="$(pwd)/../model_zoo"
echo "=== Testing AFM ===" && cd $home/AFM && python run_expid.py --expid AFM_test && \
echo "=== Testing AFN ===" && cd $home/AFN && python run_expid.py --expid AFN_test && \
echo "=== Testing AOANet ===" && cd $home/AOANet && python run_expid.py --expid AOANet_test && \
echo "=== Testing APG ===" && cd $home/APG && python run_expid.py --expid APG_test && \
echo "=== Testing APG ===" && cd $home/APG && python run_expid.py --expid APG_DeepFM_test && \
echo "=== Testing AutoInt ===" && cd $home/AutoInt && python run_expid.py --expid AutoInt_test && \
echo "=== Testing BST ===" && cd $home/BST && python run_expid.py --expid BST_test && \
echo "=== Testing CCPM ===" && cd $home/CCPM && python run_expid.py --expid CCPM_test && \
Expand Down

0 comments on commit e0d5495

Please sign in to comment.