Skip to content

Commit

Permalink
feat: draft - response rate scoring surface
Browse files Browse the repository at this point in the history
- add SCORE_SCALING constant
- update response rate scoring logic
  • Loading branch information
Shr1ftyy committed Jan 23, 2025
1 parent 25d24c4 commit 402479d
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 3 deletions.
3 changes: 3 additions & 0 deletions storb/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@
PIECE_LENGTH_SCALING = 0.5
PIECE_LENGTH_OFFSET = 8.39

# Scaling factor for scoring
SCORE_SCALING = 1

MAX_UPLOAD_SIZE = 1 * 1024 * 1024 * 1024 * 1024 # 1 TiB

# Error correction encoding parameters
Expand Down
24 changes: 21 additions & 3 deletions storb/validator/reward.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import numpy as np

from storb.constants import SCORE_SCALING


def get_response_rate_scores(
self,
Expand Down Expand Up @@ -28,9 +30,25 @@ def get_response_rate_scores(
uids.append(uid)
ret_attempts = max(miner_stats.get("retrieval_attempts", 1), 1)
store_attempts = max(miner_stats.get("store_attempts", 1), 1)
retrieval_rate = abs(miner_stats.get("retrieval_successes", 0) / ret_attempts)
store_rate = abs(miner_stats.get("store_successes", 0) / store_attempts)
weighted_rate_sum = (retrieval_rate / 2) + (store_rate / 2)
retrieval_success_rate = abs(
miner_stats.get("retrieval_successes", 0) / ret_attempts
)
store_success_rate = abs(miner_stats.get("store_successes", 0) / store_attempts)

# TODO: do we want to "reset" the total attempts after a certain threshold?
# because at the moment for larger attempt values,
# the curve is very steep (small changes in success rate have a larger impact)
retrieval_rate_score = (
(1 / SCORE_SCALING) * np.log(ret_attempts) * np.log(retrieval_success_rate) + 1
)
store_rate_score = (
(1 / SCORE_SCALING) * np.log(store_attempts) * np.log(store_success_rate) + 1
)

retrieval_rate_score = np.clip(retrieval_rate_score, 0, 1)
store_rate_score = np.clip(store_rate_score, 0, 1)

weighted_rate_sum = (retrieval_rate_score / 2) + (store_rate_score / 2)
weighted_rate_sums.append(weighted_rate_sum)

uids = np.array(uids)
Expand Down

0 comments on commit 402479d

Please sign in to comment.