Skip to content

Commit

Permalink
wip: updated weights
Browse files Browse the repository at this point in the history
  • Loading branch information
functor-flow committed Nov 20, 2024
1 parent cd1ea32 commit 407e2a1
Show file tree
Hide file tree
Showing 5 changed files with 232 additions and 111 deletions.
1 change: 0 additions & 1 deletion pallets/offworker/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
// TODO:
// make sure that not only yuma subnets work
#![cfg_attr(not(feature = "std"), no_std)]

Expand Down
69 changes: 51 additions & 18 deletions pallets/subnet_emission/src/decryption.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ use crate::{
};
use pallet_subspace::UseWeightsEncryption;
use sp_runtime::traits::Get;
use sp_std::collections::btree_map::BTreeMap;

use subnet_consensus::util::params::ModuleKey;
use types::KeylessBlockWeights;

Expand Down Expand Up @@ -136,8 +138,8 @@ impl<T: Config> Pallet<T> {
netuid
);

Self::update_decrypted_weights(netuid, valid_weights);
match Self::process_decrypted_weights(netuid) {
let weights = Self::update_decrypted_weights(netuid, valid_weights);
match Self::process_decrypted_weights(netuid, weights) {
Ok(()) => {
log::info!("decrypted weights have been processed for {netuid}")
}
Expand All @@ -150,8 +152,10 @@ impl<T: Config> Pallet<T> {

/// TODO: For this fn to work properely make sure that the decrypted weights extend their first
/// weights by the `Weights` and then "continue extending themselves"
fn process_decrypted_weights(netuid: u16) -> Result<(), &'static str> {
let weights = DecryptedWeights::<T>::get(netuid);
fn process_decrypted_weights(
netuid: u16,
weights: Option<Vec<KeylessBlockWeights>>,
) -> Result<(), &'static str> {
if let Some(weights) = weights {
// Sorts from oldest weights to newest
let mut sorted_weights = weights;
Expand Down Expand Up @@ -270,19 +274,50 @@ impl<T: Config> Pallet<T> {
Self::validate_input(uid, &uids, &values, netuid).ok()
}

/// TODO: we should be able to get rid of the `DecryptedWeights` and return the result directly
/// for processing. if we extend with the `Weights` storage, everything should work as expected
/// Updates and combines weights from storage with newly validated weights.
///
/// TODO: the first decrypted weights here need to be extended by the
/// `Weights` storage. This should not be a problem even for activity cutoff, as the last
/// updates are "cached" in the consensus parameters, therefore the consensus is able to
/// determine which of those standalone extended weights would still be valid
fn update_decrypted_weights(netuid: u16, valid_weights: Vec<KeylessBlockWeights>) {
// Extends cached weights with new weights, including blocks with empty weight vectors
DecryptedWeights::<T>::mutate(netuid, |cached| match cached {
Some(cached) => cached.extend(valid_weights),
None => *cached = Some(valid_weights),
});
/// For a given network UID, this function:
/// 1. Collects existing weights from storage
/// 2. For each validator in valid_weights, either:
/// - Overwrites their existing weights if they already set weights
/// - Adds new weights if they haven't set weights before
///
/// # Arguments
/// * `netuid` - The network UID to update weights for
/// * `valid_weights` - New validated weights in format (block_number, vec[(validator, vec[(src,
/// dst)])])
///
/// # Returns
/// * `Option<Vec<KeylessBlockWeights>>` - Combined weights organized by block number
pub fn update_decrypted_weights(
netuid: u16,
valid_weights: Vec<KeylessBlockWeights>,
) -> Option<Vec<KeylessBlockWeights>> {
// Collect baseline weights from storage
let baseline_weights: Vec<(u16, Vec<(u16, u16)>)> =
Weights::<T>::iter_prefix(netuid).map(|(dst, weights)| (dst, weights)).collect();

// Process each block's weights
let result: Vec<KeylessBlockWeights> = valid_weights
.into_iter()
.map(|(block, new_weights)| {
// Convert baseline weights to a BTreeMap for easier merging
let mut weight_map: BTreeMap<u16, Vec<(u16, u16)>> =
baseline_weights.iter().cloned().collect();

// Update or insert new weights
for (uid, weights) in new_weights {
weight_map.insert(uid, weights);
}

// Convert back to vec
let block_weights: Vec<(u16, Vec<(u16, u16)>)> = weight_map.into_iter().collect();

(block, block_weights)
})
.collect();

Some(result)
}

fn rotate_decryption_node_if_needed(netuid: u16, info: SubnetDecryptionInfo<T>) {
Expand Down Expand Up @@ -431,8 +466,6 @@ impl<T: Config> Pallet<T> {
) -> u64 {
// --- Cleanup The Core ---

// Clear decrypted weights
DecryptedWeights::<T>::remove(subnet_id);
// Clear hashes & encrypted weights
let _ = WeightEncryptionData::<T>::clear_prefix(subnet_id, u32::MAX, None);
// Sum up and clear ConsensusParameters
Expand Down
5 changes: 0 additions & 5 deletions pallets/subnet_emission/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ pub mod pallet {
use pallet_subnet_emission_api::SubnetConsensus;
use pallet_subspace::{define_module_includes, define_subnet_includes, TotalStake};
use subnet_pricing::root::RootPricing;
use types::KeylessBlockWeights;

#[cfg(feature = "testnet")]
const STORAGE_VERSION: StorageVersion = StorageVersion::new(13);
Expand Down Expand Up @@ -119,7 +118,6 @@ pub mod pallet {
ConsensusParameters
},
maps: {
DecryptedWeights,
SubnetDecryptionData,
SubnetConsensusType
}
Expand Down Expand Up @@ -150,9 +148,6 @@ pub mod pallet {
pub type WeightEncryptionData<T> =
StorageDoubleMap<_, Identity, u16, Identity, u16, EncryptionMechanism>;

#[pallet::storage]
pub type DecryptedWeights<T> = StorageMap<_, Identity, u16, Vec<KeylessBlockWeights>>;

/// Decryption Node Info assigned to subnet
#[pallet::storage]
pub type SubnetDecryptionData<T> = StorageMap<_, Identity, u16, SubnetDecryptionInfo<T>>;
Expand Down
96 changes: 96 additions & 0 deletions tests/src/encryption.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use std::iter::zip;

use crate::mock::*;
use pallet_subnet_emission::Weights;
use rand::Rng;
// use rand::{rngs::OsRng, thread_rng, Rng};
// use rsa::{traits::PublicKeyParts, BigUint, Pkcs1v15Encrypt};
Expand Down Expand Up @@ -82,3 +83,98 @@ fn weights_to_blob(weights: &[(u16, u16)]) -> Vec<u8> {

encoded
}

#[test]
fn test_update_decrypted_weights() {
new_test_ext().execute_with(|| {
let netuid = 1u16;

// Set up baseline weights in storage
Weights::<Test>::insert(netuid, 2u16, vec![(1u16, 2u16)]);
Weights::<Test>::insert(netuid, 3u16, vec![(2u16, 3u16)]);

// Verify initial storage state
let initial_storage: Vec<_> = Weights::<Test>::iter().collect();

assert_eq!(
initial_storage,
vec![
(netuid, 2u16, vec![(1u16, 2u16)]),
(netuid, 3u16, vec![(2u16, 3u16)])
]
);

// Create new valid weights for both blocks
let block_number_1 = 100u64;
let block_number_2 = 200u64;
let new_weights_1 = vec![(5u16, vec![(10u16, 20u16)])];
let new_weights_2 = vec![(6u16, vec![(15u16, 25u16)])];

let valid_weights = vec![
(block_number_1, new_weights_1),
(block_number_2, new_weights_2),
];

// Call the function
let result =
pallet_subnet_emission::Pallet::<Test>::update_decrypted_weights(netuid, valid_weights);

// Update storage with new weights
if let Some(weights_map) = result.clone() {
weights_map.iter().for_each(|(_, inner_weights)| {
inner_weights.iter().for_each(|(uid, weights)| {
Weights::<Test>::set(netuid, *uid, Some(weights.clone()));
});
});
}

// Verify results
let updated_weights = result.unwrap();

// Check block 100
let (_, block_weights_1) = &updated_weights[0];
assert!(block_weights_1.contains(&(2u16, vec![(1u16, 2u16)])));
assert!(block_weights_1.contains(&(3u16, vec![(2u16, 3u16)])));
assert!(block_weights_1.contains(&(5u16, vec![(10u16, 20u16)])));
assert_eq!(block_weights_1.len(), 3); // Only baseline + block 100 weights

// Check block 200
let (_, block_weights_2) = &updated_weights[1];
assert!(block_weights_2.contains(&(2u16, vec![(1u16, 2u16)])));
assert!(block_weights_2.contains(&(3u16, vec![(2u16, 3u16)])));
assert!(block_weights_2.contains(&(6u16, vec![(15u16, 25u16)])));
assert_eq!(block_weights_2.len(), 3); // Only baseline + block 200 weights

// Try to overwrite some already existing weights for both blocks
let new_weights_1 = vec![(2u16, vec![(30u16, 30u16)])];
let new_weights_2 = vec![(3u16, vec![(40u16, 40u16)])];

let new_valid_weights = vec![
(block_number_1, new_weights_1),
(block_number_2, new_weights_2),
];

let result = pallet_subnet_emission::Pallet::<Test>::update_decrypted_weights(
netuid,
new_valid_weights,
);

let updated_weights = result.unwrap();

dbg!(updated_weights.clone());

// Check updated block 100
let (_, block_weights_1) = &updated_weights[0];
assert!(block_weights_1.contains(&(2u16, vec![(30u16, 30u16)]))); // New value for uid 2
assert!(block_weights_1.contains(&(3u16, vec![(2u16, 3u16)]))); // Unchanged
assert!(block_weights_1.contains(&(5u16, vec![(10u16, 20u16)]))); // From first update
assert_eq!(block_weights_1.len(), 4);

// Check updated block 200
let (_, block_weights_2) = &updated_weights[1];
assert!(block_weights_2.contains(&(2u16, vec![(1u16, 2u16)]))); // Unchanged
assert!(block_weights_2.contains(&(3u16, vec![(40u16, 40u16)]))); // New value for uid 3
assert!(block_weights_2.contains(&(6u16, vec![(15u16, 25u16)]))); // From first update
assert_eq!(block_weights_2.len(), 4);
});
}
Loading

0 comments on commit 407e2a1

Please sign in to comment.