Skip to content

Commit

Permalink
Fix number of inferers considered in the reward cycle
Browse files Browse the repository at this point in the history
  • Loading branch information
fernandofcampos committed Dec 4, 2024
1 parent 69582a1 commit c091ee2
Show file tree
Hide file tree
Showing 3 changed files with 150 additions and 14 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
* [#651](https://github.com/allora-network/allora-chain/pull/651) Refactor: Fuzzer rename invariants test to fuzz test
* [#653](https://github.com/allora-network/allora-chain/pull/653) Fuzzer Bugfixes, Allow User to Set Fuzzer Transition Probability Distribution
* [#686](https://github.com/allora-network/allora-chain/pull/686) CLI query commands alignment
* [#691](https://github.com/allora-network/allora-chain/pull/691/files) Fix number of inferers considered in the reward cycle

### Deprecated

Expand Down
50 changes: 36 additions & 14 deletions x/emissions/keeper/keeper.go
Original file line number Diff line number Diff line change
Expand Up @@ -1112,19 +1112,13 @@ func (k *Keeper) AppendInference(
return errors.New("inference already submitted")
}

// Get active inferers for topic
workerAddresses, err := k.GetActiveInferersForTopic(ctx, topic.Id)
if err != nil {
return errorsmod.Wrap(err, "error getting active inferers for topic")
}
// If there are less than maxTopInferersToReward, add the current inferer
if uint64(len(workerAddresses)) < maxTopInferersToReward {
err := k.AddActiveInferer(ctx, topic.Id, inference.Inferer)
if err != nil {
return errorsmod.Wrap(err, "error adding active inferer")
}
return k.InsertInference(ctx, topic.Id, *inference)
}

// Get previous EMA score for the current inferer
previousEmaScore, err := k.GetInfererScoreEma(ctx, topic.Id, inference.Inferer)
if err != nil {
return errorsmod.Wrapf(err, "Error getting inferer score ema")
Expand All @@ -1134,21 +1128,39 @@ func (k *Keeper) AppendInference(
return types.ErrCantUpdateEmaMoreThanOncePerWindow
}

// Get lowest inferer score ema for the topic
lowestEmaScore, found, err := k.GetLowestInfererScoreEma(ctx, topic.Id)
if err != nil {
return errorsmod.Wrap(err, "error getting lowest inferer score ema")
// If there are no lowest inferer score ema, calculate it
} else if !found {
lowestEmaScore, err = GetLowestScoreFromAllInferers(ctx, k, topic.Id, workerAddresses)
if err != nil {
return errorsmod.Wrap(err, "error getting lowest score from all inferers")
}
}
// If there are no lowest inferer score ema, it means it is the first inference for the topic
if !found {
lowestEmaScore = previousEmaScore
err = k.SetLowestInfererScoreEma(ctx, topic.Id, lowestEmaScore)
if err != nil {
return errorsmod.Wrap(err, "error setting lowest inferer score ema")
}
}

// If there are less than maxTopInferersToReward, add the current inferer, update the lowest inferer score ema if needed, and return
if uint64(len(workerAddresses)) < maxTopInferersToReward {
// Update lowest inferer score ema if needed
if uint64(len(workerAddresses)) == 0 || lowestEmaScore.Score.Gt(previousEmaScore.Score) {
err = k.SetLowestInfererScoreEma(ctx, topic.Id, previousEmaScore)
if err != nil {
return errorsmod.Wrap(err, "error setting lowest inferer score ema")
}
}

err = k.AddActiveInferer(ctx, topic.Id, inference.Inferer)
if err != nil {
return errorsmod.Wrap(err, "error adding active inferer")
}
return k.InsertInference(ctx, topic.Id, *inference)
}

// Else ...
// Checks if the inferer's previous EMA score is greater than the lowest EMA score
if previousEmaScore.Score.Gt(lowestEmaScore.Score) {
// Update EMA score for the lowest score inferer, who is not the current inferer
err = k.CalcAndSaveInfererScoreEmaWithLastSavedTopicQuantile(
Expand All @@ -1160,6 +1172,16 @@ func (k *Keeper) AppendInference(
if err != nil {
return errorsmod.Wrap(err, "error calculating and saving inferer score ema with last saved topic quantile")
}

// Check if the inferer with lowest score is active before removing it, because remove will not fail if the inferer is not active
isActive, err := k.IsActiveInferer(ctx, topic.Id, lowestEmaScore.Address)
if err != nil {
return errorsmod.Wrap(err, "error checking if inferer is active")
}
if !isActive {
return errors.New("inferer with lowest score is not active")
}

// Remove inferer with lowest score
err = k.RemoveActiveInferer(ctx, topic.Id, lowestEmaScore.Address)
if err != nil {
Expand Down
113 changes: 113 additions & 0 deletions x/emissions/keeper/keeper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3768,6 +3768,119 @@ func (s *KeeperTestSuite) TestAppendInference() {
s.Require().Equal(updateAttemptForWorker2.BlockHeight, updatedWorker2Score.BlockHeight, "unchanged height")
}

func getNewAddress() string {
addr := sdk.AccAddress(secp256k1.GenPrivKey().PubKey().Address())
return addr.String()
}

func (s *KeeperTestSuite) TestAppendInferenceWithResetActiveWorkers() {
ctx := s.ctx
k := s.emissionsKeeper
// Topic IDs
topicId := s.CreateOneTopic(10801)
nonce := types.Nonce{BlockHeight: 10}
blockHeightInferences := int64(10)

// Set previous topic quantile inferer score ema
err := k.SetPreviousTopicQuantileInfererScoreEma(ctx, topicId, alloraMath.MustNewDecFromString("1000"))
s.Require().NoError(err)

topic, err := k.GetTopic(ctx, topicId)
s.Require().NoError(err)

worker1 := getNewAddress()
worker2 := getNewAddress()
worker3 := getNewAddress()
worker4 := getNewAddress()
worker5 := getNewAddress()
worker6 := getNewAddress()
// score1 := types.Score{TopicId: topicId, BlockHeight: 2, Address: worker1, Score: alloraMath.NewDecFromInt64(91)}
score2 := types.Score{TopicId: topicId, BlockHeight: 2, Address: worker2, Score: alloraMath.NewDecFromInt64(92)}
score3 := types.Score{TopicId: topicId, BlockHeight: 2, Address: worker3, Score: alloraMath.NewDecFromInt64(93)}
score4 := types.Score{TopicId: topicId, BlockHeight: 2, Address: worker4, Score: alloraMath.NewDecFromInt64(94)}
score5 := types.Score{TopicId: topicId, BlockHeight: 2, Address: worker5, Score: alloraMath.NewDecFromInt64(95)}
score6 := types.Score{TopicId: topicId, BlockHeight: 2, Address: worker6, Score: alloraMath.NewDecFromInt64(96)}
// err = k.SetInfererScoreEma(ctx, topicId, worker1, score1)
// s.Require().NoError(err)
err = k.SetInfererScoreEma(ctx, topicId, worker2, score2)
s.Require().NoError(err)
err = k.SetInfererScoreEma(ctx, topicId, worker3, score3)
s.Require().NoError(err)
err = k.SetInfererScoreEma(ctx, topicId, worker4, score4)
s.Require().NoError(err)
err = k.SetInfererScoreEma(ctx, topicId, worker5, score5)
s.Require().NoError(err)
err = k.SetInfererScoreEma(ctx, topicId, worker6, score6)
s.Require().NoError(err)

// Ensure that the number of top inferers is capped at the max top inferers to reward
// New high-score entrant should replace earlier low-score entrant
params := types.DefaultParams()
params.MaxTopInferersToReward = 4
err = k.SetParams(ctx, params)
s.Require().NoError(err)

allInferences := types.Inferences{
Inferences: []*types.Inference{
{TopicId: topicId, BlockHeight: blockHeightInferences, Inferer: worker1, Value: alloraMath.MustNewDecFromString("0.11")},
{TopicId: topicId, BlockHeight: blockHeightInferences, Inferer: worker2, Value: alloraMath.MustNewDecFromString("0.12")},
{TopicId: topicId, BlockHeight: blockHeightInferences, Inferer: worker3, Value: alloraMath.MustNewDecFromString("0.13")},
{TopicId: topicId, BlockHeight: blockHeightInferences, Inferer: worker4, Value: alloraMath.MustNewDecFromString("0.14")},
{TopicId: topicId, BlockHeight: blockHeightInferences, Inferer: worker5, Value: alloraMath.MustNewDecFromString("0.15")},
},
}
for _, inference := range allInferences.Inferences {
err = k.AppendInference(ctx, topic, nonce.BlockHeight, inference, params.MaxTopInferersToReward)
s.Require().NoError(err)
}

activeInferers, err := k.GetActiveInferersForTopic(ctx, topicId)
s.Require().NoError(err)
s.Require().Equal(params.MaxTopInferersToReward, uint64(len(activeInferers)))

lowestEmaScore, found, err := k.GetLowestInfererScoreEma(ctx, topicId)
s.Require().NoError(err)
s.Require().True(found)
s.Require().Equal(lowestEmaScore.Address, worker2)

err = k.ResetActiveWorkersForTopic(ctx, topicId)
s.Require().NoError(err)

activeInferers, err = k.GetActiveInferersForTopic(ctx, topicId)
s.Require().NoError(err)
s.Require().Empty(activeInferers)

lowestEmaScore, found, err = k.GetLowestInfererScoreEma(ctx, topicId)
s.Require().NoError(err)
s.Require().True(found)
s.Require().Equal(lowestEmaScore.Address, worker2)

blockHeightInferences = blockHeightInferences + topic.EpochLength
allInferences = types.Inferences{
Inferences: []*types.Inference{
{TopicId: topicId, BlockHeight: blockHeightInferences, Inferer: worker2, Value: alloraMath.MustNewDecFromString("0.22")},
{TopicId: topicId, BlockHeight: blockHeightInferences, Inferer: worker3, Value: alloraMath.MustNewDecFromString("0.23")},
{TopicId: topicId, BlockHeight: blockHeightInferences, Inferer: worker4, Value: alloraMath.MustNewDecFromString("0.24")},
{TopicId: topicId, BlockHeight: blockHeightInferences, Inferer: worker5, Value: alloraMath.MustNewDecFromString("0.25")},
{TopicId: topicId, BlockHeight: blockHeightInferences, Inferer: worker6, Value: alloraMath.MustNewDecFromString("0.26")},
},
}
nonce.BlockHeight++
for _, inference := range allInferences.Inferences {
err = k.AppendInference(ctx, topic, nonce.BlockHeight, inference, params.MaxTopInferersToReward)
s.Require().NoError(err)
}

activeInferers, err = k.GetActiveInferersForTopic(ctx, topicId)
s.Require().NoError(err)
s.Require().Equal(params.MaxTopInferersToReward, uint64(len(activeInferers)))

lowestEmaScore, found, err = k.GetLowestInfererScoreEma(ctx, topicId)
s.Require().NoError(err)
s.Require().True(found)
s.Require().Equal(lowestEmaScore.Address, worker3)
}

func mockUninitializedParams() types.Params {
return types.Params{
Version: "v2",
Expand Down

0 comments on commit c091ee2

Please sign in to comment.