From 2a2cd11216e36796742983d138cd18c5c522e900 Mon Sep 17 00:00:00 2001 From: fi3 Date: Wed, 13 Nov 2024 12:48:52 +0100 Subject: [PATCH 01/27] Update channel factory, coinbase input script handling. Coinbase signature is not part of the Sv2 protocol, some pool maybe want to use it other not. The first part of the extranonce could also be reserved for things that are not a pool signature. This pr rename the pool_signature field of the channel factory into additional_coinbase_script_data and change the type from Strgin to Vec, since can be anything. --- .../src/channel_logic/channel_factory.rs | 33 +++++++++++-------- .../v2/roles-logic-sv2/src/job_creator.rs | 18 +++++----- roles/jd-client/src/lib/downstream.rs | 2 +- .../src/lib/upstream_sv2/upstream.rs | 2 +- roles/pool/src/lib/mining_pool/mod.rs | 2 +- 5 files changed, 31 insertions(+), 26 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 943349ebd2..49161f5617 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -975,7 +975,9 @@ pub struct PoolChannelFactory { inner: ChannelFactory, job_creator: JobsCreators, pool_coinbase_outputs: Vec, - pool_signature: String, + // Additional data that the pool may want to include in the coinbase input script as first part + // of the extranonce. This can be used to put things like the pool signature. + additional_coinbase_script_data: Vec, // extedned_channel_id -> SetCustomMiningJob negotiated_jobs: HashMap, BuildNoHashHasher>, } @@ -988,7 +990,7 @@ impl PoolChannelFactory { share_per_min: f32, kind: ExtendedChannelKind, pool_coinbase_outputs: Vec, - pool_signature: String, + additional_coinbase_script_data: Vec, ) -> Self { let inner = ChannelFactory { ids, @@ -1015,7 +1017,7 @@ impl PoolChannelFactory { inner, job_creator, pool_coinbase_outputs, - pool_signature, + additional_coinbase_script_data, negotiated_jobs: HashMap::with_hasher(BuildNoHashHasher::default()), } } @@ -1083,7 +1085,7 @@ impl PoolChannelFactory { m, true, self.pool_coinbase_outputs.clone(), - self.pool_signature.clone(), + &self.additional_coinbase_script_data, )?; self.inner.on_new_extended_mining_job(new_job) } @@ -1160,10 +1162,13 @@ impl PoolChannelFactory { if self.negotiated_jobs.contains_key(&m.channel_id) { let referenced_job = self.negotiated_jobs.get(&m.channel_id).unwrap(); let merkle_path = referenced_job.merkle_path.to_vec(); - let pool_signature = self.pool_signature.clone(); - let extended_job = - job_creator::extended_job_from_custom_job(referenced_job, pool_signature, 32) - .unwrap(); + let additional_coinbase_script_data = self.additional_coinbase_script_data.clone(); + let extended_job = job_creator::extended_job_from_custom_job( + referenced_job, + additional_coinbase_script_data.as_ref(), + 32, + ) + .unwrap(); let prev_blockhash = crate::utils::u256_to_block_hash(referenced_job.prev_hash.clone()); let bits = referenced_job.nbits; self.inner.check_target( @@ -1295,7 +1300,7 @@ pub struct ProxyExtendedChannelFactory { inner: ChannelFactory, job_creator: Option, pool_coinbase_outputs: Option>, - pool_signature: String, + additional_coinbase_script_data: String, // Id assigned to the extended channel by upstream extended_channel_id: u32, } @@ -1309,7 +1314,7 @@ impl ProxyExtendedChannelFactory { share_per_min: f32, kind: ExtendedChannelKind, pool_coinbase_outputs: Option>, - pool_signature: String, + additional_coinbase_script_data: String, extended_channel_id: u32, ) -> Self { match &kind { @@ -1349,7 +1354,7 @@ impl ProxyExtendedChannelFactory { inner, job_creator, pool_coinbase_outputs, - pool_signature, + additional_coinbase_script_data, extended_channel_id, } } @@ -1444,7 +1449,7 @@ impl ProxyExtendedChannelFactory { m, true, pool_coinbase_outputs.clone(), - self.pool_signature.clone(), + self.additional_coinbase_script_data.as_ref(), )?; let id = new_job.job_id; if !new_job.is_future() && self.inner.last_prev_hash.is_some() { @@ -1862,7 +1867,7 @@ mod test { // Initialize a Channel of type Pool let out = TxOut {value: BLOCK_REWARD, script_pubkey: decode_hex("4104c6d0969c2d98a5c19ba7c36c7937c5edbd60ff2a01397c4afe54f16cd641667ea0049ba6f9e1796ba3c8e49e1b504c532ebbaaa1010c3f7d9b83a8ea7fd800e2ac").unwrap().into()}; - let pool_signature = "".to_string(); + let additional_coinbase_script_data = "".to_string(); let creator = JobsCreators::new(7); let share_per_min = 1.0; // Create an ExtendedExtranonce of len 7: @@ -1883,7 +1888,7 @@ mod test { share_per_min, channel_kind, vec![out], - pool_signature, + additional_coinbase_script_data, ); // Build a NewTemplate diff --git a/protocols/v2/roles-logic-sv2/src/job_creator.rs b/protocols/v2/roles-logic-sv2/src/job_creator.rs index 1ed6537629..6f1ce1b67a 100644 --- a/protocols/v2/roles-logic-sv2/src/job_creator.rs +++ b/protocols/v2/roles-logic-sv2/src/job_creator.rs @@ -70,7 +70,7 @@ impl JobsCreators { template: &mut NewTemplate, version_rolling_allowed: bool, mut pool_coinbase_outputs: Vec, - pool_signature: String, + additional_coinbase_script_data: &[u8], ) -> Result, Error> { let server_tx_outputs = template.coinbase_tx_outputs.to_vec(); let mut outputs = tx_outputs_to_costum_scripts(&server_tx_outputs); @@ -87,7 +87,7 @@ impl JobsCreators { new_extended_job( template, &mut pool_coinbase_outputs, - pool_signature, + additional_coinbase_script_data, next_job_id, version_rolling_allowed, self.extranonce_len, @@ -137,7 +137,7 @@ impl JobsCreators { pub fn extended_job_from_custom_job( referenced_job: &mining_sv2::SetCustomMiningJob, - pool_signature: String, + additional_coinbase_script_data: &[u8], extranonce_len: u8, ) -> Result, Error> { let mut outputs = @@ -158,7 +158,7 @@ pub fn extended_job_from_custom_job( new_extended_job( &mut template, &mut outputs, - pool_signature, + additional_coinbase_script_data, 0, true, extranonce_len, @@ -177,7 +177,7 @@ pub fn extended_job_from_custom_job( fn new_extended_job( new_template: &mut NewTemplate, coinbase_outputs: &mut [TxOut], - pool_signature: String, + additional_coinbase_script_data: &[u8], job_id: u32, version_rolling_allowed: bool, extranonce_len: u8, @@ -193,7 +193,7 @@ fn new_extended_job( .map_err(|_| Error::TxVersionTooBig)?; let bip34_bytes = get_bip_34_bytes(new_template, tx_version)?; - let script_prefix_len = bip34_bytes.len() + pool_signature.as_bytes().len(); + let script_prefix_len = bip34_bytes.len() + additional_coinbase_script_data.len(); let coinbase = coinbase( bip34_bytes, @@ -201,7 +201,7 @@ fn new_extended_job( new_template.coinbase_tx_locktime, new_template.coinbase_tx_input_sequence, coinbase_outputs, - pool_signature, + additional_coinbase_script_data, extranonce_len, ); @@ -327,7 +327,7 @@ fn coinbase( lock_time: u32, sequence: u32, coinbase_outputs: &[TxOut], - pool_signature: String, + additional_coinbase_script_data: &[u8], extranonce_len: u8, ) -> Transaction { // If script_prefix_len is not 0 we are not in a test enviornment and the coinbase have the 0 @@ -336,7 +336,7 @@ fn coinbase( 0 => Witness::from_vec(vec![]), _ => Witness::from_vec(vec![vec![0; 32]]), }; - bip34_bytes.extend_from_slice(pool_signature.as_bytes()); + bip34_bytes.extend_from_slice(additional_coinbase_script_data); bip34_bytes.extend_from_slice(&vec![0; extranonce_len as usize]); let tx_in = TxIn { previous_output: OutPoint::null(), diff --git a/roles/jd-client/src/lib/downstream.rs b/roles/jd-client/src/lib/downstream.rs index 82a8076022..59703491fa 100644 --- a/roles/jd-client/src/lib/downstream.rs +++ b/roles/jd-client/src/lib/downstream.rs @@ -492,7 +492,7 @@ impl share_per_min, kind, coinbase_outputs, - "SOLO".to_string(), + "SOLO".as_bytes().to_vec(), ); self.status.set_channel(channel_factory); diff --git a/roles/jd-client/src/lib/upstream_sv2/upstream.rs b/roles/jd-client/src/lib/upstream_sv2/upstream.rs index db580f563b..3dc7b33915 100644 --- a/roles/jd-client/src/lib/upstream_sv2/upstream.rs +++ b/roles/jd-client/src/lib/upstream_sv2/upstream.rs @@ -564,7 +564,7 @@ impl ParseUpstreamMiningMessages Result, RolesLogicError> { info!("Receive open extended mining channel success"); let ids = Arc::new(Mutex::new(roles_logic_sv2::utils::GroupId::new())); - let pool_signature = self.pool_signature.clone(); + let pool_signature = self.pool_signature.clone().into(); let prefix_len = m.extranonce_prefix.to_vec().len(); let self_len = 0; let total_len = prefix_len + m.extranonce_size as usize; diff --git a/roles/pool/src/lib/mining_pool/mod.rs b/roles/pool/src/lib/mining_pool/mod.rs index 2b179c7885..d9e426e5db 100644 --- a/roles/pool/src/lib/mining_pool/mod.rs +++ b/roles/pool/src/lib/mining_pool/mod.rs @@ -618,7 +618,7 @@ impl Pool { share_per_min, kind, pool_coinbase_outputs.expect("Invalid coinbase output in config"), - config.pool_signature.clone(), + config.pool_signature.clone().into_bytes(), ))); let pool = Arc::new(Mutex::new(Pool { downstreams: HashMap::with_hasher(BuildNoHashHasher::default()), From e06375c82b6445231d447b71d35bd4dc8ada0337 Mon Sep 17 00:00:00 2001 From: fi3 Date: Wed, 13 Nov 2024 18:30:23 +0100 Subject: [PATCH 02/27] Update channel factory, coinbase input script handling. The coinbase input script additional data should be sent as part of the extranonce_prefix and not as part of the coinbase_prefix. So that a JDC can see what the pool want as coinbase input script additional data without the need to observ the coinbase prefix in job constructed by the pool. --- .../src/channel_logic/channel_factory.rs | 119 +++++++++++++----- .../roles-logic-sv2/src/channel_logic/mod.rs | 6 +- .../src/channel_logic/proxy_group_channel.rs | 3 + .../v2/roles-logic-sv2/src/job_creator.rs | 32 +++-- .../v2/roles-logic-sv2/src/job_dispatcher.rs | 7 +- protocols/v2/roles-logic-sv2/src/utils.rs | 32 ++++- protocols/v2/subprotocols/mining/src/lib.rs | 26 +++- roles/mining-proxy/src/lib/upstream_mining.rs | 1 - roles/test-utils/mining-device-sv1/src/job.rs | 1 + roles/translator/src/lib/proxy/bridge.rs | 1 - 10 files changed, 159 insertions(+), 69 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 49161f5617..18f289a46a 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -224,15 +224,20 @@ impl ChannelFactory { downstream_hash_rate: f32, is_header_only: bool, id: u32, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result, Error> { match is_header_only { - true => { - self.new_standard_channel_for_hom_downstream(request_id, downstream_hash_rate, id) - } + true => self.new_standard_channel_for_hom_downstream( + request_id, + downstream_hash_rate, + id, + additional_coinbase_script_data, + ), false => self.new_standard_channel_for_non_hom_downstream( request_id, downstream_hash_rate, id, + additional_coinbase_script_data, ), } } @@ -248,6 +253,7 @@ impl ChannelFactory { request_id: u32, hash_rate: f32, min_extranonce_size: u16, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result>, Error> { let extended_channels_group = 0; let max_extranonce_size = self.extranonces.get_range2_len() as u16; @@ -278,7 +284,10 @@ impl ChannelFactory { .next_extended(max_extranonce_size as usize) .unwrap(); let extranonce_prefix = extranonce - .into_prefix(self.extranonces.get_prefix_len()) + .into_prefix( + self.extranonces.get_prefix_len(), + additional_coinbase_script_data.unwrap_or(&[]), + ) .unwrap(); let success = OpenExtendedMiningChannelSuccess { request_id, @@ -344,6 +353,7 @@ impl ChannelFactory { request_id: u32, downstream_hash_rate: f32, id: u32, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result, Error> { let hom_group_id = 0; let mut result = vec![]; @@ -384,7 +394,11 @@ impl ChannelFactory { group_channel_id: hom_group_id, }, )); - self.prepare_standard_jobs_and_p_hash(&mut result, channel_id)?; + self.prepare_standard_jobs_and_p_hash( + &mut result, + channel_id, + additional_coinbase_script_data, + )?; self.channel_to_group_id.insert(channel_id, hom_group_id); Ok(result) } @@ -396,6 +410,7 @@ impl ChannelFactory { request_id: u32, downstream_hash_rate: f32, group_id: u32, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result, Error> { let mut result = vec![]; let channel_id = self @@ -429,6 +444,14 @@ impl ChannelFactory { self.standard_channels_for_non_hom_downstreams .insert(complete_id, standard_channel); + let extranonce = match additional_coinbase_script_data { + Some(data) => { + let mut data = data.to_vec(); + data.extend_from_slice(extranonce.as_ref()); + extranonce + } + None => extranonce, + }; // First message to be sent is OpenStandardMiningChannelSuccess result.push(Mining::OpenStandardMiningChannelSuccess( OpenStandardMiningChannelSuccess { @@ -450,6 +473,7 @@ impl ChannelFactory { &mut self, result: &mut Vec, channel_id: u32, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result<(), Error> { // Safe cause the function is private and we always add the channel before calling this // funtion @@ -466,9 +490,10 @@ impl ChannelFactory { .map(|j| { extended_to_standard_job( &j.0, - &standard_channel.extranonce.clone().to_vec()[..], + standard_channel.extranonce.as_ref(), standard_channel.channel_id, Some(job_id), + additional_coinbase_script_data, ) }) .collect(); @@ -478,9 +503,10 @@ impl ChannelFactory { Some((j, _)) => Some( extended_to_standard_job( j, - &standard_channel.extranonce.clone().to_vec(), + standard_channel.extranonce.as_ref(), standard_channel.channel_id, Some(self.job_ids.next()), + additional_coinbase_script_data, ) .ok_or(Error::ImpossibleToCalculateMerkleRoot)?, ), @@ -677,11 +703,16 @@ impl ChannelFactory { fn on_new_extended_mining_job( &mut self, m: NewExtendedMiningJob<'static>, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result, BuildNoHashHasher>, Error> { match (m.is_future(), &self.last_prev_hash) { (true, _) => { let mut result = HashMap::with_hasher(BuildNoHashHasher::default()); - self.prepare_jobs_for_downstream_on_new_extended(&mut result, &m)?; + self.prepare_jobs_for_downstream_on_new_extended( + &mut result, + &m, + additional_coinbase_script_data, + )?; let mut ids = vec![]; for complete_id in self.standard_channels_for_non_hom_downstreams.keys() { let group_id = GroupId::into_group_id(*complete_id); @@ -694,7 +725,11 @@ impl ChannelFactory { } (false, Some(_)) => { let mut result = HashMap::with_hasher(BuildNoHashHasher::default()); - self.prepare_jobs_for_downstream_on_new_extended(&mut result, &m)?; + self.prepare_jobs_for_downstream_on_new_extended( + &mut result, + &m, + additional_coinbase_script_data, + )?; // If job is not future it must always be paired with the last received prev hash let mut ids = vec![]; for complete_id in self.standard_channels_for_non_hom_downstreams.keys() { @@ -722,14 +757,16 @@ impl ChannelFactory { &mut self, result: &mut HashMap>, m: &NewExtendedMiningJob<'static>, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result<(), Error> { for (id, channel) in &self.standard_channels_for_hom_downstreams { let job_id = self.job_ids.next(); let mut standard_job = extended_to_standard_job( m, - &channel.extranonce.clone().to_vec()[..], + channel.extranonce.as_ref(), *id, Some(job_id), + additional_coinbase_script_data, ) .unwrap(); standard_job.channel_id = *id; @@ -769,6 +806,7 @@ impl ChannelFactory { coinbase_tx_suffix: &[u8], prev_blockhash: hash_types::BlockHash, bits: u32, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result { debug!("Checking target for share {:?}", m); let upstream_target = match &self.kind { @@ -806,6 +844,7 @@ impl ChannelFactory { coinbase_tx_suffix, &extranonce[..], &merkle_path[..], + additional_coinbase_script_data.unwrap_or(&[]), ) .ok_or(Error::InvalidCoinbase)? .try_into() @@ -1029,8 +1068,13 @@ impl PoolChannelFactory { is_header_only: bool, id: u32, ) -> Result, Error> { - self.inner - .add_standard_channel(request_id, downstream_hash_rate, is_header_only, id) + self.inner.add_standard_channel( + request_id, + downstream_hash_rate, + is_header_only, + id, + Some(&self.additional_coinbase_script_data), + ) } /// Calls [`ChannelFactory::new_extended_channel`] pub fn new_extended_channel( @@ -1039,8 +1083,12 @@ impl PoolChannelFactory { hash_rate: f32, min_extranonce_size: u16, ) -> Result>, Error> { - self.inner - .new_extended_channel(request_id, hash_rate, min_extranonce_size) + self.inner.new_extended_channel( + request_id, + hash_rate, + min_extranonce_size, + Some(&self.additional_coinbase_script_data), + ) } /// Called when we want to replicate a channel already opened by another actor. /// is used only in the jd client from the template provider module to mock a pool. @@ -1085,9 +1133,10 @@ impl PoolChannelFactory { m, true, self.pool_coinbase_outputs.clone(), - &self.additional_coinbase_script_data, + self.additional_coinbase_script_data.len() as u8, )?; - self.inner.on_new_extended_mining_job(new_job) + self.inner + .on_new_extended_mining_job(new_job, Some(&self.additional_coinbase_script_data)) } /// Called when a `SubmitSharesStandard` message is received from the downstream. We check the /// shares against the channel's respective target and return `OnNewShare` to let us know if @@ -1131,6 +1180,7 @@ impl PoolChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + Some(&self.additional_coinbase_script_data), ) } None => { @@ -1162,11 +1212,10 @@ impl PoolChannelFactory { if self.negotiated_jobs.contains_key(&m.channel_id) { let referenced_job = self.negotiated_jobs.get(&m.channel_id).unwrap(); let merkle_path = referenced_job.merkle_path.to_vec(); - let additional_coinbase_script_data = self.additional_coinbase_script_data.clone(); let extended_job = job_creator::extended_job_from_custom_job( referenced_job, - additional_coinbase_script_data.as_ref(), - 32, + self.additional_coinbase_script_data.len() as u8, + self.inner.extranonces.get_len() as u8, ) .unwrap(); let prev_blockhash = crate::utils::u256_to_block_hash(referenced_job.prev_hash.clone()); @@ -1181,6 +1230,7 @@ impl PoolChannelFactory { extended_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + Some(&self.additional_coinbase_script_data), ) } else { let referenced_job = self @@ -1215,6 +1265,7 @@ impl PoolChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + Some(&self.additional_coinbase_script_data), ) } } @@ -1300,7 +1351,6 @@ pub struct ProxyExtendedChannelFactory { inner: ChannelFactory, job_creator: Option, pool_coinbase_outputs: Option>, - additional_coinbase_script_data: String, // Id assigned to the extended channel by upstream extended_channel_id: u32, } @@ -1314,7 +1364,6 @@ impl ProxyExtendedChannelFactory { share_per_min: f32, kind: ExtendedChannelKind, pool_coinbase_outputs: Option>, - additional_coinbase_script_data: String, extended_channel_id: u32, ) -> Self { match &kind { @@ -1354,7 +1403,6 @@ impl ProxyExtendedChannelFactory { inner, job_creator, pool_coinbase_outputs, - additional_coinbase_script_data, extended_channel_id, } } @@ -1367,7 +1415,7 @@ impl ProxyExtendedChannelFactory { id: u32, ) -> Result, Error> { self.inner - .add_standard_channel(request_id, downstream_hash_rate, id_header_only, id) + .add_standard_channel(request_id, downstream_hash_rate, id_header_only, id, None) } /// Calls [`ChannelFactory::new_extended_channel`] pub fn new_extended_channel( @@ -1377,7 +1425,7 @@ impl ProxyExtendedChannelFactory { min_extranonce_size: u16, ) -> Result, Error> { self.inner - .new_extended_channel(request_id, hash_rate, min_extranonce_size) + .new_extended_channel(request_id, hash_rate, min_extranonce_size, None) } /// Called only when a new prev hash is received by a Template Provider when job declaration is /// used. It matches the message with a `job_id`, creates a new custom job, and calls @@ -1445,12 +1493,7 @@ impl ProxyExtendedChannelFactory { self.job_creator.as_mut(), self.pool_coinbase_outputs.as_mut(), ) { - let new_job = job_creator.on_new_template( - m, - true, - pool_coinbase_outputs.clone(), - self.additional_coinbase_script_data.as_ref(), - )?; + let new_job = job_creator.on_new_template(m, true, pool_coinbase_outputs.clone(), 0)?; let id = new_job.job_id; if !new_job.is_future() && self.inner.last_prev_hash.is_some() { let prev_hash = self.last_prev_hash().unwrap(); @@ -1473,7 +1516,7 @@ impl ProxyExtendedChannelFactory { future_job: m.future_template, }; return Ok(( - self.inner.on_new_extended_mining_job(new_job)?, + self.inner.on_new_extended_mining_job(new_job, None)?, Some(custom_mining_job), id, )); @@ -1482,7 +1525,11 @@ impl ProxyExtendedChannelFactory { .future_templates .insert(new_job.job_id, m.clone()); } - Ok((self.inner.on_new_extended_mining_job(new_job)?, None, id)) + Ok(( + self.inner.on_new_extended_mining_job(new_job, None)?, + None, + id, + )) } else { panic!("Either channel factory has no job creator or pool_coinbase_outputs are not yet set") } @@ -1551,6 +1598,7 @@ impl ProxyExtendedChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + None, ) } else { let bitcoin_target = [0; 32]; @@ -1577,6 +1625,7 @@ impl ProxyExtendedChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + None, ) } } @@ -1632,6 +1681,7 @@ impl ProxyExtendedChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + None, ) } else { let bitcoin_target = [0; 32]; @@ -1658,6 +1708,7 @@ impl ProxyExtendedChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + None, ) } } @@ -1690,7 +1741,7 @@ impl ProxyExtendedChannelFactory { &mut self, m: NewExtendedMiningJob<'static>, ) -> Result, BuildNoHashHasher>, Error> { - self.inner.on_new_extended_mining_job(m) + self.inner.on_new_extended_mining_job(m, None) } pub fn set_target(&mut self, new_target: &mut Target) { self.inner.kind.set_target(new_target); @@ -1888,7 +1939,7 @@ mod test { share_per_min, channel_kind, vec![out], - additional_coinbase_script_data, + additional_coinbase_script_data.into_bytes(), ); // Build a NewTemplate diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/mod.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/mod.rs index 7b5f0feed7..d66a64d69e 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/mod.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/mod.rs @@ -7,15 +7,17 @@ use std::convert::TryInto; /// convert extended to standard job by calculating the merkle root pub fn extended_to_standard_job<'a>( extended: &NewExtendedMiningJob, - coinbase_script: &[u8], + extranonce: &[u8], channel_id: u32, job_id: Option, + additional_coinbase_script_data: Option<&[u8]>, ) -> Option> { let merkle_root = crate::utils::merkle_root_from_path( extended.coinbase_tx_prefix.inner_as_ref(), extended.coinbase_tx_suffix.inner_as_ref(), - coinbase_script, + extranonce, &extended.merkle_path.inner_as_ref(), + additional_coinbase_script_data.unwrap_or(&[]), ); Some(NewMiningJob { diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/proxy_group_channel.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/proxy_group_channel.rs index 65f4cdb3d8..cee3d160e7 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/proxy_group_channel.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/proxy_group_channel.rs @@ -115,6 +115,7 @@ impl GroupChannel { &channel.extranonce.clone().to_vec(), channel.channel_id, None, + None, ) .ok_or(Error::ImpossibleToCalculateMerkleRoot)?; res.push(Mining::NewMiningJob(standard_job)); @@ -126,6 +127,7 @@ impl GroupChannel { &channel.extranonce.clone().to_vec(), channel.channel_id, None, + None, ) .ok_or(Error::ImpossibleToCalculateMerkleRoot)?; @@ -192,6 +194,7 @@ impl GroupChannel { &downstream.extranonce.clone().to_vec(), downstream.channel_id, None, + None, ) .ok_or(Error::ImpossibleToCalculateMerkleRoot) } diff --git a/protocols/v2/roles-logic-sv2/src/job_creator.rs b/protocols/v2/roles-logic-sv2/src/job_creator.rs index 6f1ce1b67a..0e65c35b35 100644 --- a/protocols/v2/roles-logic-sv2/src/job_creator.rs +++ b/protocols/v2/roles-logic-sv2/src/job_creator.rs @@ -70,7 +70,7 @@ impl JobsCreators { template: &mut NewTemplate, version_rolling_allowed: bool, mut pool_coinbase_outputs: Vec, - additional_coinbase_script_data: &[u8], + additional_coinbase_script_data_len: u8, ) -> Result, Error> { let server_tx_outputs = template.coinbase_tx_outputs.to_vec(); let mut outputs = tx_outputs_to_costum_scripts(&server_tx_outputs); @@ -87,7 +87,7 @@ impl JobsCreators { new_extended_job( template, &mut pool_coinbase_outputs, - additional_coinbase_script_data, + additional_coinbase_script_data_len, next_job_id, version_rolling_allowed, self.extranonce_len, @@ -137,7 +137,7 @@ impl JobsCreators { pub fn extended_job_from_custom_job( referenced_job: &mining_sv2::SetCustomMiningJob, - additional_coinbase_script_data: &[u8], + additional_coinbase_script_data_len: u8, extranonce_len: u8, ) -> Result, Error> { let mut outputs = @@ -158,7 +158,7 @@ pub fn extended_job_from_custom_job( new_extended_job( &mut template, &mut outputs, - additional_coinbase_script_data, + additional_coinbase_script_data_len, 0, true, extranonce_len, @@ -177,7 +177,7 @@ pub fn extended_job_from_custom_job( fn new_extended_job( new_template: &mut NewTemplate, coinbase_outputs: &mut [TxOut], - additional_coinbase_script_data: &[u8], + additional_coinbase_script_data_len: u8, job_id: u32, version_rolling_allowed: bool, extranonce_len: u8, @@ -193,7 +193,7 @@ fn new_extended_job( .map_err(|_| Error::TxVersionTooBig)?; let bip34_bytes = get_bip_34_bytes(new_template, tx_version)?; - let script_prefix_len = bip34_bytes.len() + additional_coinbase_script_data.len(); + let script_prefix_len = bip34_bytes.len(); let coinbase = coinbase( bip34_bytes, @@ -201,7 +201,7 @@ fn new_extended_job( new_template.coinbase_tx_locktime, new_template.coinbase_tx_input_sequence, coinbase_outputs, - additional_coinbase_script_data, + additional_coinbase_script_data_len, extranonce_len, ); @@ -327,7 +327,7 @@ fn coinbase( lock_time: u32, sequence: u32, coinbase_outputs: &[TxOut], - additional_coinbase_script_data: &[u8], + additional_coinbase_script_data_len: u8, extranonce_len: u8, ) -> Transaction { // If script_prefix_len is not 0 we are not in a test enviornment and the coinbase have the 0 @@ -336,7 +336,7 @@ fn coinbase( 0 => Witness::from_vec(vec![]), _ => Witness::from_vec(vec![vec![0; 32]]), }; - bip34_bytes.extend_from_slice(additional_coinbase_script_data); + bip34_bytes.extend_from_slice(&vec![0_u8; additional_coinbase_script_data_len as usize]); bip34_bytes.extend_from_slice(&vec![0; extranonce_len as usize]); let tx_in = TxIn { previous_output: OutPoint::null(), @@ -421,9 +421,9 @@ impl StrippedCoinbaseTx { } /// the coinbase tx prefix is the LE bytes concatenation of the tx version and all - /// of the tx inputs minus the 32 bytes after the bip34 bytes in the script + /// of the tx inputs minus the extranonce bytes after the bip34 bytes in the script /// and the last input's sequence (used as the first entry in the coinbase tx suffix). - /// The last 32 bytes after the bip34 bytes in the script will be used to allow extranonce + /// The last bytes after the bip34 bytes in the script will be used to allow extranonce /// space for the miner. We remove the bip141 marker and flag since it is only used for /// computing the `wtxid` and the legacy `txid` is what is used for computing the merkle root // clippy allow because we dont want to consume self @@ -557,7 +557,7 @@ pub mod tests { let mut jobs_creators = JobsCreators::new(32); let job = jobs_creators - .on_new_template(template.borrow_mut(), false, vec![out], "".to_string()) + .on_new_template(template.borrow_mut(), false, vec![out]) .unwrap(); assert_eq!( @@ -581,8 +581,7 @@ pub mod tests { assert_eq!(jobs_creators.lasts_new_template.len(), 0); - let _ = - jobs_creators.on_new_template(template.borrow_mut(), false, vec![out], "".to_string()); + let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out]); assert_eq!(jobs_creators.lasts_new_template.len(), 1); assert_eq!(jobs_creators.lasts_new_template[0], template); @@ -616,8 +615,7 @@ pub mod tests { let mut jobs_creators = JobsCreators::new(32); //Create a template - let _ = - jobs_creators.on_new_template(template.borrow_mut(), false, vec![out], "".to_string()); + let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out]); let test_id = template.template_id; // Create a SetNewPrevHash with matching template_id @@ -705,7 +703,7 @@ pub mod tests { let extranonce = &[0_u8; 32]; let path: &[binary_sv2::U256] = &[]; let stripped_merkle_root = - merkle_root_from_path(&prefix[..], &suffix[..], extranonce, path).unwrap(); + merkle_root_from_path(&prefix[..], &suffix[..], extranonce, path, &[]).unwrap(); let og_merkle_root = coinbase.txid().to_vec(); assert!( stripped_merkle_root == og_merkle_root, diff --git a/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs b/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs index 019f9f2742..a0f4cb6172 100644 --- a/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs +++ b/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs @@ -18,8 +18,7 @@ use std::{collections::HashMap, convert::TryInto, sync::Arc}; use stratum_common::bitcoin::hashes::{sha256d, Hash, HashEngine}; -/// Used to convert an extended mining job to a standard mining job. The `extranonce` field must -/// be exactly 32 bytes. +/// Used to convert an extended mining job to a standard mining job pub fn extended_to_standard_job_for_group_channel<'a>( extended: &NewExtendedMiningJob, extranonce: &[u8], @@ -31,6 +30,7 @@ pub fn extended_to_standard_job_for_group_channel<'a>( extended.coinbase_tx_suffix.inner_as_ref(), extranonce, &extended.merkle_path.inner_as_ref(), + &[], ); Some(NewMiningJob { @@ -322,7 +322,7 @@ mod tests { template.template_id = template.template_id % u64::MAX; template.future_template = true; let extended_mining_job = jobs_creators - .on_new_template(&mut template, false, vec![out], pool_signature) + .on_new_template(&mut template, false, vec![out], pool_signature.len() as u8) .expect("Failed to create new job"); // create GroupChannelJobDispatcher @@ -381,6 +381,7 @@ mod tests { extended_mining_job.coinbase_tx_suffix.inner_as_ref(), extranonce.to_vec().as_slice(), &extended_mining_job.merkle_path.inner_as_ref(), + &[], ) .unwrap(); // Assertions diff --git a/protocols/v2/roles-logic-sv2/src/utils.rs b/protocols/v2/roles-logic-sv2/src/utils.rs index d4b6f89441..d85fef1c87 100644 --- a/protocols/v2/roles-logic-sv2/src/utils.rs +++ b/protocols/v2/roles-logic-sv2/src/utils.rs @@ -142,10 +142,16 @@ pub fn merkle_root_from_path>( coinbase_tx_suffix: &[u8], extranonce: &[u8], path: &[T], + additional_coinbase_script_data: &[u8], ) -> Option> { - let mut coinbase = - Vec::with_capacity(coinbase_tx_prefix.len() + coinbase_tx_suffix.len() + extranonce.len()); + let mut coinbase = Vec::with_capacity( + coinbase_tx_prefix.len() + + coinbase_tx_suffix.len() + + extranonce.len() + + additional_coinbase_script_data.len(), + ); coinbase.extend_from_slice(coinbase_tx_prefix); + coinbase.extend_from_slice(additional_coinbase_script_data); coinbase.extend_from_slice(extranonce); coinbase.extend_from_slice(coinbase_tx_suffix); let coinbase = match Transaction::deserialize(&coinbase[..]) { @@ -549,6 +555,7 @@ fn test_merkle_root_from_path() { &coinbase_bytes[30..], &coinbase_bytes[20..30], &path, + &[], ) .unwrap(); assert_eq!(expected_root, root); @@ -565,13 +572,20 @@ fn test_merkle_root_from_path() { &coinbase_bytes[30..], &coinbase_bytes[20..30], &path, + &[], ) .unwrap(); assert_eq!(coinbase_id, root); //Target None return path on serialization assert_eq!( - merkle_root_from_path(&coinbase_bytes, &coinbase_bytes, &coinbase_bytes, &path), + merkle_root_from_path( + &coinbase_bytes, + &coinbase_bytes, + &coinbase_bytes, + &path, + &[] + ), None ); } @@ -676,6 +690,7 @@ pub fn get_target( coinbase_tx_suffix, extranonce, &(merkle_path[..]), + &[], ) .unwrap() .try_into() @@ -778,9 +793,14 @@ impl<'a> From> for bitcoin::Block { let id = id.as_ref().to_vec(); path.push(id); } - let merkle_root = - merkle_root_from_path(&coinbase_pre[..], &coinbase_suf[..], &extranonce[..], &path) - .expect("Invalid coinbase"); + let merkle_root = merkle_root_from_path( + &coinbase_pre[..], + &coinbase_suf[..], + &extranonce[..], + &path, + &[], + ) + .expect("Invalid coinbase"); let merkle_root = Hash::from_inner(merkle_root.try_into().unwrap()); let prev_blockhash = u256_to_block_hash(message.prev_hash.into_static()); diff --git a/protocols/v2/subprotocols/mining/src/lib.rs b/protocols/v2/subprotocols/mining/src/lib.rs index 83be80d0be..d71d5605ad 100644 --- a/protocols/v2/subprotocols/mining/src/lib.rs +++ b/protocols/v2/subprotocols/mining/src/lib.rs @@ -106,6 +106,7 @@ //! //! This protocol explicitly expects that upstream server software is able to manage the size of //! the hashing space correctly for its clients and can provide new jobs quickly enough. +use alloc::vec::Vec; use binary_sv2::{B032, U256}; use core::{ cmp::{Ord, PartialOrd}, @@ -278,6 +279,12 @@ impl core::convert::TryFrom> for Extranonce { } } +impl AsRef<[u8]> for Extranonce { + fn as_ref(&self) -> &[u8] { + self.extranonce.as_ref() + } +} + impl Extranonce { pub fn new(len: usize) -> Option { if len > MAX_EXTRANONCE_LEN { @@ -312,12 +319,21 @@ impl Extranonce { /// Return only the prefix part of the extranonce /// If the required size is greater than the extranonce len it return None - pub fn into_prefix(&self, prefix_len: usize) -> Option> { + pub fn into_prefix( + &self, + prefix_len: usize, + additional_coinbase_script_data: &[u8], + ) -> Option> { if prefix_len > self.extranonce.len() { None } else { - let mut prefix = self.extranonce.clone(); - prefix.resize(prefix_len, 0); + let mut prefix = Vec::with_capacity(prefix_len + additional_coinbase_script_data.len()); + for i in 0..prefix_len { + prefix.push(self.extranonce[i]); + } + for b in additional_coinbase_script_data { + prefix.push(*b); + } // unwrap is sage as prefix_len can not be greater than 32 cause is not possible to // contruct Extranonce with the inner vecto greater than 32. Some(prefix.try_into().unwrap()) @@ -1103,7 +1119,7 @@ pub mod tests { fn test_extranonce_to_prefix() { let inner = vec![1, 2, 3, 4, 5, 6, 7, 8, 9]; let extranone = Extranonce { extranonce: inner }; - let prefix = extranone.into_prefix(4).unwrap(); + let prefix = extranone.into_prefix(4, &[]).unwrap(); assert!(vec![1, 2, 3, 4] == prefix.to_vec()) } @@ -1111,7 +1127,7 @@ pub mod tests { fn test_extranonce_to_prefix_not_greater_than_inner() { let inner = vec![1, 2, 3, 4, 5, 6, 7, 8, 9]; let extranone = Extranonce { extranonce: inner }; - let prefix = extranone.into_prefix(20); + let prefix = extranone.into_prefix(20, &[]); assert!(prefix.is_none()) } diff --git a/roles/mining-proxy/src/lib/upstream_mining.rs b/roles/mining-proxy/src/lib/upstream_mining.rs index dde1e4c04b..af4ea8f80a 100644 --- a/roles/mining-proxy/src/lib/upstream_mining.rs +++ b/roles/mining-proxy/src/lib/upstream_mining.rs @@ -87,7 +87,6 @@ impl ChannelKind { downstream_share_per_minute, kind, Some(vec![]), - String::from(""), up_id, ); *self = Self::Extended(Some(factory)); diff --git a/roles/test-utils/mining-device-sv1/src/job.rs b/roles/test-utils/mining-device-sv1/src/job.rs index 1d6b3d2bcd..73ef57c8dd 100644 --- a/roles/test-utils/mining-device-sv1/src/job.rs +++ b/roles/test-utils/mining-device-sv1/src/job.rs @@ -49,6 +49,7 @@ impl Job { &coinbase_tx_suffix, &extranonce, &path, + &[], ) .unwrap(); let merkle_root: [u8; 32] = merkle_root.try_into().unwrap(); diff --git a/roles/translator/src/lib/proxy/bridge.rs b/roles/translator/src/lib/proxy/bridge.rs index 1525217570..ac65140c46 100644 --- a/roles/translator/src/lib/proxy/bridge.rs +++ b/roles/translator/src/lib/proxy/bridge.rs @@ -101,7 +101,6 @@ impl Bridge { share_per_min, ExtendedChannelKind::Proxy { upstream_target }, None, - String::from(""), up_id, ), future_jobs: vec![], From b43cec0776dd2648d4f6c29b19edf5ff54d6a574 Mon Sep 17 00:00:00 2001 From: fi3 Date: Thu, 14 Nov 2024 17:35:54 +0100 Subject: [PATCH 03/27] Update pool to use shorter extranonce Update the pool to use an extranonce of 16 bytes rather then 32 so that there is enaugh space to add the additional coinbase input script data. --- roles/pool/src/lib/mining_pool/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/roles/pool/src/lib/mining_pool/mod.rs b/roles/pool/src/lib/mining_pool/mod.rs index d9e426e5db..f274b26d4f 100644 --- a/roles/pool/src/lib/mining_pool/mod.rs +++ b/roles/pool/src/lib/mining_pool/mod.rs @@ -597,11 +597,11 @@ impl Pool { sender_message_received_signal: Sender<()>, status_tx: status::Sender, ) -> Arc> { - let extranonce_len = 32; + let extranonce_len = 16; let range_0 = std::ops::Range { start: 0, end: 0 }; - let range_1 = std::ops::Range { start: 0, end: 16 }; + let range_1 = std::ops::Range { start: 0, end: 8 }; let range_2 = std::ops::Range { - start: 16, + start: 8, end: extranonce_len, }; let ids = Arc::new(Mutex::new(roles_logic_sv2::utils::GroupId::new())); From 71d0abfe3ce745ba5738bfbb9feaaf04f7f01f97 Mon Sep 17 00:00:00 2001 From: fi3 Date: Thu, 14 Nov 2024 17:40:13 +0100 Subject: [PATCH 04/27] Update channel factory to support more active jobs in the same moment Right now the channel factory only support one active job at time. That means that if we receive a share for a job right after we sent downstream a new job that share will be invalid. Now the channel factory keep track of the last 3 jobs, so we give time to the dowstream to receive the job and propagate it down before stop accepting shares for older job. This is useful, and the system can be more responsive: as soon as we change the coinbase additional input script data we can send a new job dowsntream without worrying of invalidating miner's shares. When the pool receive a prev hash it immidiatly invalidate all the previous jobs, we still want to refuse shares for stale jobs. The client can easly handle this situation: when a pool refuse a share it should start a timer and if do not receive a new prev hash (or already have) within n seconds it change pool. --- .../src/channel_logic/channel_factory.rs | 194 ++++++++++++++---- 1 file changed, 155 insertions(+), 39 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 18f289a46a..56ba006161 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -15,7 +15,7 @@ use mining_sv2::{ }; use nohash_hasher::BuildNoHashHasher; -use std::{collections::HashMap, convert::TryInto, sync::Arc}; +use std::{collections::{HashMap, HashSet}, convert::TryInto, sync::Arc}; use template_distribution_sv2::{NewTemplate, SetNewPrevHash as SetNewPrevHashFromTp}; use tracing::{debug, error, info, trace, warn}; @@ -210,13 +210,80 @@ struct ChannelFactory { last_prev_hash: Option<(StagedPhash, Vec)>, last_prev_hash_: Option, // (NewExtendedMiningJob,group ids that already received the job) - last_valid_job: Option<(NewExtendedMiningJob<'static>, Vec)>, + last_valid_jobs: [Option<(NewExtendedMiningJob<'static>, Vec)>;3], + // Index of the last valid job for channel_id ++ job_id + id_to_job: HashMap>, + // Used to understand which is the last added element in last_valid_jobs + added_elements: usize, kind: ExtendedChannelKind, job_ids: Id, channel_to_group_id: HashMap>, future_templates: HashMap, BuildNoHashHasher>, } +impl ChannelFactory { + fn add_valid_job(&mut self, job: NewExtendedMiningJob<'static>, group_ids: Vec) { + match self.last_valid_jobs { + [None,None,None] => { + self.id_to_job.insert(job.job_id, 0); + self.last_valid_jobs[0] = Some((job,group_ids)); + self.added_elements = 1; + }, + [Some(_),None,None] => { + self.id_to_job.insert(job.job_id, 1); + self.last_valid_jobs[1] = Some((job,group_ids)); + self.added_elements = 2; + }, + [Some(_),Some(_),None] => { + self.id_to_job.insert(job.job_id, 2); + self.last_valid_jobs[2] = Some((job,group_ids)); + self.added_elements = 3; + }, + [Some(_),Some(_),Some(_)] => { + let to_remove = self.added_elements % 3; + self.id_to_job.retain(|_, v| *v != to_remove as u8); + self.id_to_job.insert(job.job_id, to_remove as u8); + self.last_valid_jobs[to_remove] = Some((job,group_ids)); + }, + _ => panic!("Internal error: invalid last_valid_jobs state"), + } + } + fn get_valid_job(&self, job_id: u32) -> Option<&(NewExtendedMiningJob<'static>, Vec)> { + let index = self.id_to_job.get(&job_id)?; + self.last_valid_jobs[*index as usize].as_ref() + } + fn get_last_valid_job(&self) -> Option<&(NewExtendedMiningJob<'static>, Vec)> { + let index = self.get_last_valid_job_index()?; + self.last_valid_jobs[index as usize].as_ref() + } + fn get_mut_last_valid_job(&mut self) -> Option<&mut (NewExtendedMiningJob<'static>, Vec)> { + let index = self.get_last_valid_job_index()?; + self.last_valid_jobs[index as usize].as_mut() + } + fn get_last_valid_job_index(&self) -> Option { + match self.last_valid_jobs { + [None,None,None] => { + None + }, + [Some(_),None,None] => { + Some(0) + }, + [Some(_),Some(_),None] => { + Some(1) + }, + [Some(_),Some(_),Some(_)] => { + Some(2) + }, + _ => panic!("Internal error: invalid last_valid_jobs state"), + } + } + fn clear_valid_jobs(&mut self) { + self.last_valid_jobs = [None,None,None]; + self.id_to_job.clear(); + self.added_elements = 0; + } +} + impl ChannelFactory { pub fn add_standard_channel( &mut self, @@ -298,7 +365,7 @@ impl ChannelFactory { }; self.extended_channels.insert(channel_id, success.clone()); let mut result = vec![Mining::OpenExtendedMiningChannelSuccess(success)]; - if let Some((job, _)) = &self.last_valid_job { + if let Some((job, _)) = &self.get_last_valid_job() { let mut job = job.clone(); job.set_future(); let j_id = job.job_id; @@ -498,14 +565,15 @@ impl ChannelFactory { }) .collect(); + let id = self.job_ids.next(); // OPTIMIZATION the extranonce is cloned so many time but maybe is avoidable? - let last_valid_job = match &self.last_valid_job { + let last_valid_job = match self.get_last_valid_job() { Some((j, _)) => Some( extended_to_standard_job( j, standard_channel.extranonce.as_ref(), standard_channel.channel_id, - Some(self.job_ids.next()), + Some(id), additional_coinbase_script_data, ) .ok_or(Error::ImpossibleToCalculateMerkleRoot)?, @@ -593,10 +661,12 @@ impl ChannelFactory { // This is the same thing of just check if there is a prev hash add it to result if there // is last_job add it to result and add each future job to result. // But using the pattern match is more clear how each option is handled + let last_prev_hash = self.last_prev_hash.clone(); + let is_empty = self.future_jobs.is_empty(); match ( - self.last_prev_hash.as_mut(), - self.last_valid_job.as_mut(), - self.future_jobs.is_empty(), + last_prev_hash, + self.get_mut_last_valid_job(), + is_empty, ) { // If we do not have anything just do nothing (None, None, true) => (), @@ -614,16 +684,17 @@ impl ChannelFactory { } // If we have just a prev hash we need to send it after the SetupConnectionSuccess // message - (Some((prev_h, group_id_p_hash_sent)), None, true) => { + (Some((prev_h, mut group_id_p_hash_sent)), None, true) => { if !group_id_p_hash_sent.contains(&group_id) { let prev_h = prev_h.into_set_p_hash(group_id, None); group_id_p_hash_sent.push(group_id); result.push(Mining::SetNewPrevHash(prev_h.clone())); } + self.last_prev_hash = Some((prev_h, group_id_p_hash_sent)); } // If we have a prev hash and a last valid job we need to send before the prev hash and // the the valid job - (Some((prev_h, group_id_p_hash_sent)), Some((job, group_id_job_sent)), true) => { + (Some((prev_h, mut group_id_p_hash_sent)), Some((job, group_id_job_sent)), true) => { if !group_id_p_hash_sent.contains(&group_id) { let prev_h = prev_h.into_set_p_hash(group_id, Some(job.job_id)); group_id_p_hash_sent.push(group_id); @@ -635,9 +706,10 @@ impl ChannelFactory { group_id_job_sent.push(group_id); result.push(Mining::NewExtendedMiningJob(job)); } + self.last_prev_hash = Some((prev_h, group_id_p_hash_sent)); } // If we have everything we need, send before the prev hash and then all the jobs - (Some((prev_h, group_id_p_hash_sent)), Some((job, group_id_job_sent)), false) => { + (Some((prev_h, mut group_id_p_hash_sent)), Some((job, group_id_job_sent)), false) => { if !group_id_p_hash_sent.contains(&group_id) { let prev_h = prev_h.into_set_p_hash(group_id, Some(job.job_id)); group_id_p_hash_sent.push(group_id); @@ -659,6 +731,7 @@ impl ChannelFactory { result.push(Mining::NewExtendedMiningJob(job)); } } + self.last_prev_hash = Some((prev_h, group_id_p_hash_sent)); } // This can not happen because we can not have a valid job without a prev hash (None, Some(_), true) => unreachable!(), @@ -674,6 +747,7 @@ impl ChannelFactory { /// job queue, we move the future job into the valid job slot and store the prev hash as the /// current prev hash to be referenced. fn on_new_prev_hash(&mut self, m: StagedPhash) -> Result<(), Error> { + self.clear_valid_jobs(); while let Some(mut job) = self.future_jobs.pop() { if job.0.job_id == m.job_id { let now = std::time::SystemTime::now() @@ -681,10 +755,9 @@ impl ChannelFactory { .unwrap() .as_secs() as u32; job.0.set_no_future(now); - self.last_valid_job = Some(job); + self.add_valid_job(job.0, job.1); break; } - self.last_valid_job = None; } self.future_jobs = vec![]; self.last_prev_hash_ = Some(crate::utils::u256_to_block_hash(m.prev_hash.clone())); @@ -738,7 +811,7 @@ impl ChannelFactory { ids.push(group_id) } } - self.last_valid_job = Some((m, ids)); + self.add_valid_job(m, ids); if let Some((_p_hash, _)) = &self.last_prev_hash { Ok(result) } else { @@ -1016,7 +1089,17 @@ pub struct PoolChannelFactory { pool_coinbase_outputs: Vec, // Additional data that the pool may want to include in the coinbase input script as first part // of the extranonce. This can be used to put things like the pool signature. - additional_coinbase_script_data: Vec, + // I prepend with _ cause it means that I don't want to use this value directly. + _additional_coinbase_script_data: Vec, + // This is normally set to None. When the pool change the additional_coinbase_script_data we + // set it to Some(old value). We need it cause for a short time frame we will have job that are + // supposed to use the new one and jobs that are supposed to use the old value. As soon we + // have only job that use the new value, this is set to None. We do not support more then 2 + // additional_coinbase_script_data at time. + // I prepend with _ cause it means that I don't want to use this value directly. + _additional_coinbase_script_data_old: Option>, + // channel_id ++ job_id + job_ids_using_old_add_data: HashSet>, // extedned_channel_id -> SetCustomMiningJob negotiated_jobs: HashMap, BuildNoHashHasher>, } @@ -1029,7 +1112,7 @@ impl PoolChannelFactory { share_per_min: f32, kind: ExtendedChannelKind, pool_coinbase_outputs: Vec, - additional_coinbase_script_data: Vec, + _additional_coinbase_script_data: Vec, ) -> Self { let inner = ChannelFactory { ids, @@ -1045,7 +1128,9 @@ impl PoolChannelFactory { future_jobs: Vec::new(), last_prev_hash: None, last_prev_hash_: None, - last_valid_job: None, + last_valid_jobs: [None,None,None], + id_to_job: HashMap::with_hasher(BuildNoHashHasher::default()), + added_elements: 0, kind, job_ids: Id::new(), channel_to_group_id: HashMap::with_hasher(BuildNoHashHasher::default()), @@ -1056,7 +1141,9 @@ impl PoolChannelFactory { inner, job_creator, pool_coinbase_outputs, - additional_coinbase_script_data, + _additional_coinbase_script_data, + _additional_coinbase_script_data_old: None, + job_ids_using_old_add_data: HashSet::with_hasher(BuildNoHashHasher::default()), negotiated_jobs: HashMap::with_hasher(BuildNoHashHasher::default()), } } @@ -1073,7 +1160,7 @@ impl PoolChannelFactory { downstream_hash_rate, is_header_only, id, - Some(&self.additional_coinbase_script_data), + Some(&self.get_last_additional_coinbase_script_data()), ) } /// Calls [`ChannelFactory::new_extended_channel`] @@ -1087,7 +1174,7 @@ impl PoolChannelFactory { request_id, hash_rate, min_extranonce_size, - Some(&self.additional_coinbase_script_data), + Some(&self.get_last_additional_coinbase_script_data()), ) } /// Called when we want to replicate a channel already opened by another actor. @@ -1133,10 +1220,10 @@ impl PoolChannelFactory { m, true, self.pool_coinbase_outputs.clone(), - self.additional_coinbase_script_data.len() as u8, + self.get_last_additional_coinbase_script_data().len() as u8, )?; self.inner - .on_new_extended_mining_job(new_job, Some(&self.additional_coinbase_script_data)) + .on_new_extended_mining_job(new_job, Some(&self.get_last_additional_coinbase_script_data())) } /// Called when a `SubmitSharesStandard` message is received from the downstream. We check the /// shares against the channel's respective target and return `OnNewShare` to let us know if @@ -1145,12 +1232,13 @@ impl PoolChannelFactory { &mut self, m: SubmitSharesStandard, ) -> Result { + let additional_coinbase_script_data = self.get_additional_coinbase_script_data(m.channel_id,m.job_id); match self.inner.channel_to_group_id.get(&m.channel_id) { Some(g_id) => { let referenced_job = self .inner - .last_valid_job - .clone() + .get_valid_job(m.job_id) + .cloned() .ok_or(Error::ShareDoNotMatchAnyJob)? .0; let merkle_path = referenced_job.merkle_path.to_vec(); @@ -1180,7 +1268,7 @@ impl PoolChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, - Some(&self.additional_coinbase_script_data), + Some(&additional_coinbase_script_data), ) } None => { @@ -1205,6 +1293,7 @@ impl PoolChannelFactory { m: SubmitSharesExtended, ) -> Result { let target = self.job_creator.last_target(); + let additional_coinbase_script_data = self.get_additional_coinbase_script_data(m.channel_id,m.job_id); // When downstream set a custom mining job we add the job to the negotiated job // hashmap, with the extended channel id as a key. Whenever the pool receive a share must // first check if the channel have a negotiated job if so we can not retreive the template @@ -1214,7 +1303,7 @@ impl PoolChannelFactory { let merkle_path = referenced_job.merkle_path.to_vec(); let extended_job = job_creator::extended_job_from_custom_job( referenced_job, - self.additional_coinbase_script_data.len() as u8, + additional_coinbase_script_data.len() as u8, self.inner.extranonces.get_len() as u8, ) .unwrap(); @@ -1230,13 +1319,13 @@ impl PoolChannelFactory { extended_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, - Some(&self.additional_coinbase_script_data), + Some(&additional_coinbase_script_data), ) } else { let referenced_job = self .inner - .last_valid_job - .clone() + .get_valid_job(m.job_id) + .cloned() .ok_or(Error::ShareDoNotMatchAnyJob)? .0; let merkle_path = referenced_job.merkle_path.to_vec(); @@ -1265,7 +1354,7 @@ impl PoolChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, - Some(&self.additional_coinbase_script_data), + Some(&additional_coinbase_script_data), ) } } @@ -1342,6 +1431,31 @@ impl PoolChannelFactory { pub fn set_target(&mut self, new_target: &mut Target) { self.inner.kind.set_target(new_target); } + + // TODO ret can not be larger then 32 bytes maybe use the stack for it? + #[inline(always)] + fn get_additional_coinbase_script_data(&self, channel_id: u32, job_id: u32) -> Vec { + let id = ((channel_id as u64) << 32) | (job_id as u64); + match (self.job_ids_using_old_add_data.contains(&id), &self._additional_coinbase_script_data_old) { + (true, Some(additional_coinbase_script_data)) => additional_coinbase_script_data.clone(), + (false, _) => self._additional_coinbase_script_data.clone(), + _ => panic!("Internal error: when job_ids_using_old_add_data contains elements _additional_coinbase_script_data_old must be Some") + + } + } + // TODO ret can not be larger then 32 bytes maybe use the stack for it? + #[inline(always)] + fn get_last_additional_coinbase_script_data(&self) -> Vec { + self._additional_coinbase_script_data.clone() + } + + pub fn change_additional_coinbase_script_data(&mut self, new_data: Vec) { + todo!() + } + + fn active_jobs(&self) -> Vec { + todo!() + } } /// Used by proxies that want to open extended channls with upstream. If the proxy has job @@ -1393,7 +1507,9 @@ impl ProxyExtendedChannelFactory { future_jobs: Vec::new(), last_prev_hash: None, last_prev_hash_: None, - last_valid_job: None, + last_valid_jobs: [None,None,None], + id_to_job: HashMap::with_hasher(BuildNoHashHasher::default()), + added_elements: 0, kind, job_ids: Id::new(), channel_to_group_id: HashMap::with_hasher(BuildNoHashHasher::default()), @@ -1544,7 +1660,7 @@ impl ProxyExtendedChannelFactory { ) -> Result { let merkle_path = self .inner - .last_valid_job + .get_valid_job(m.job_id) .as_ref() .ok_or(Error::ShareDoNotMatchAnyJob)? .0 @@ -1553,8 +1669,8 @@ impl ProxyExtendedChannelFactory { let referenced_job = self .inner - .last_valid_job - .clone() + .get_valid_job(m.job_id) + .cloned() .ok_or(Error::ShareDoNotMatchAnyJob)? .0; @@ -1639,7 +1755,7 @@ impl ProxyExtendedChannelFactory { ) -> Result { let merkle_path = self .inner - .last_valid_job + .get_valid_job(m.job_id) .as_ref() .ok_or(Error::ShareDoNotMatchAnyJob)? .0 @@ -1647,8 +1763,8 @@ impl ProxyExtendedChannelFactory { .to_vec(); let referenced_job = self .inner - .last_valid_job - .clone() + .get_valid_job(m.job_id) + .cloned() .ok_or(Error::ShareDoNotMatchAnyJob)? .0; match self.inner.channel_to_group_id.get(&m.channel_id) { @@ -1656,7 +1772,7 @@ impl ProxyExtendedChannelFactory { if let Some(job_creator) = self.job_creator.as_mut() { let template_id = job_creator .get_template_id_from_job( - self.inner.last_valid_job.as_ref().unwrap().0.job_id, + self.inner.get_valid_job(m.job_id).as_ref().unwrap().0.job_id, ) .ok_or(Error::NoTemplateForId)?; let bitcoin_target = job_creator.last_target(); @@ -1747,7 +1863,7 @@ impl ProxyExtendedChannelFactory { self.inner.kind.set_target(new_target); } pub fn last_valid_job_version(&self) -> Option { - self.inner.last_valid_job.as_ref().map(|j| j.0.version) + self.inner.get_last_valid_job().as_ref().map(|j| j.0.version) } /// Returns the full extranonce, extranonce1 (static for channel) + extranonce2 (miner nonce /// space) From aa7ba60803692d58a596d4fd526d1337fad4abd1 Mon Sep 17 00:00:00 2001 From: fi3 Date: Fri, 15 Nov 2024 16:42:23 +0100 Subject: [PATCH 05/27] Fix coinbase_prefix, pool channel factory new This commit fix 2 miner things: When we calculate the coinbase_prefix (what we need to put in the extended job) we need to account also for the coinbase input script additional data that is part of the extranonce. When we create pool channel facotry we pass an extranonce creator and an pool signature. If the signature + extranonce are bigger then 32 bytes we have to return an error. That cause in sv2 the extranonce can not be longer than 32 bytes. --- .../src/channel_logic/channel_factory.rs | 97 ++++++++++--------- protocols/v2/roles-logic-sv2/src/errors.rs | 2 + .../v2/roles-logic-sv2/src/job_creator.rs | 13 ++- roles/jd-client/src/lib/downstream.rs | 3 +- .../src/lib/upstream_sv2/upstream.rs | 3 +- roles/pool/src/lib/mining_pool/mod.rs | 27 +++--- .../src/lib/upstream_sv2/upstream.rs | 3 +- 7 files changed, 84 insertions(+), 64 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 56ba006161..dfb141d7b3 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -15,7 +15,11 @@ use mining_sv2::{ }; use nohash_hasher::BuildNoHashHasher; -use std::{collections::{HashMap, HashSet}, convert::TryInto, sync::Arc}; +use std::{ + collections::{HashMap, HashSet}, + convert::TryInto, + sync::Arc, +}; use template_distribution_sv2::{NewTemplate, SetNewPrevHash as SetNewPrevHashFromTp}; use tracing::{debug, error, info, trace, warn}; @@ -210,9 +214,9 @@ struct ChannelFactory { last_prev_hash: Option<(StagedPhash, Vec)>, last_prev_hash_: Option, // (NewExtendedMiningJob,group ids that already received the job) - last_valid_jobs: [Option<(NewExtendedMiningJob<'static>, Vec)>;3], + last_valid_jobs: [Option<(NewExtendedMiningJob<'static>, Vec)>; 3], // Index of the last valid job for channel_id ++ job_id - id_to_job: HashMap>, + id_to_job: HashMap>, // Used to understand which is the last added element in last_valid_jobs added_elements: usize, kind: ExtendedChannelKind, @@ -224,27 +228,27 @@ struct ChannelFactory { impl ChannelFactory { fn add_valid_job(&mut self, job: NewExtendedMiningJob<'static>, group_ids: Vec) { match self.last_valid_jobs { - [None,None,None] => { + [None, None, None] => { self.id_to_job.insert(job.job_id, 0); - self.last_valid_jobs[0] = Some((job,group_ids)); + self.last_valid_jobs[0] = Some((job, group_ids)); self.added_elements = 1; - }, - [Some(_),None,None] => { + } + [Some(_), None, None] => { self.id_to_job.insert(job.job_id, 1); - self.last_valid_jobs[1] = Some((job,group_ids)); + self.last_valid_jobs[1] = Some((job, group_ids)); self.added_elements = 2; - }, - [Some(_),Some(_),None] => { + } + [Some(_), Some(_), None] => { self.id_to_job.insert(job.job_id, 2); - self.last_valid_jobs[2] = Some((job,group_ids)); + self.last_valid_jobs[2] = Some((job, group_ids)); self.added_elements = 3; - }, - [Some(_),Some(_),Some(_)] => { + } + [Some(_), Some(_), Some(_)] => { let to_remove = self.added_elements % 3; self.id_to_job.retain(|_, v| *v != to_remove as u8); self.id_to_job.insert(job.job_id, to_remove as u8); - self.last_valid_jobs[to_remove] = Some((job,group_ids)); - }, + self.last_valid_jobs[to_remove] = Some((job, group_ids)); + } _ => panic!("Internal error: invalid last_valid_jobs state"), } } @@ -262,23 +266,15 @@ impl ChannelFactory { } fn get_last_valid_job_index(&self) -> Option { match self.last_valid_jobs { - [None,None,None] => { - None - }, - [Some(_),None,None] => { - Some(0) - }, - [Some(_),Some(_),None] => { - Some(1) - }, - [Some(_),Some(_),Some(_)] => { - Some(2) - }, + [None, None, None] => None, + [Some(_), None, None] => Some(0), + [Some(_), Some(_), None] => Some(1), + [Some(_), Some(_), Some(_)] => Some(2), _ => panic!("Internal error: invalid last_valid_jobs state"), } } fn clear_valid_jobs(&mut self) { - self.last_valid_jobs = [None,None,None]; + self.last_valid_jobs = [None, None, None]; self.id_to_job.clear(); self.added_elements = 0; } @@ -663,11 +659,7 @@ impl ChannelFactory { // But using the pattern match is more clear how each option is handled let last_prev_hash = self.last_prev_hash.clone(); let is_empty = self.future_jobs.is_empty(); - match ( - last_prev_hash, - self.get_mut_last_valid_job(), - is_empty, - ) { + match (last_prev_hash, self.get_mut_last_valid_job(), is_empty) { // If we do not have anything just do nothing (None, None, true) => (), // If we have only future jobs we need to send them all after the @@ -1113,7 +1105,11 @@ impl PoolChannelFactory { kind: ExtendedChannelKind, pool_coinbase_outputs: Vec, _additional_coinbase_script_data: Vec, - ) -> Self { + ) -> Result { + if _additional_coinbase_script_data.len() + extranonces.get_len() > 32 { + error!("Additional coinbase script data is too big"); + return Err(Error::AdditionalCoinbaseScriptDataTooBig); + } let inner = ChannelFactory { ids, standard_channels_for_non_hom_downstreams: HashMap::with_hasher( @@ -1128,7 +1124,7 @@ impl PoolChannelFactory { future_jobs: Vec::new(), last_prev_hash: None, last_prev_hash_: None, - last_valid_jobs: [None,None,None], + last_valid_jobs: [None, None, None], id_to_job: HashMap::with_hasher(BuildNoHashHasher::default()), added_elements: 0, kind, @@ -1137,7 +1133,7 @@ impl PoolChannelFactory { future_templates: HashMap::with_hasher(BuildNoHashHasher::default()), }; - Self { + Ok(Self { inner, job_creator, pool_coinbase_outputs, @@ -1145,7 +1141,7 @@ impl PoolChannelFactory { _additional_coinbase_script_data_old: None, job_ids_using_old_add_data: HashSet::with_hasher(BuildNoHashHasher::default()), negotiated_jobs: HashMap::with_hasher(BuildNoHashHasher::default()), - } + }) } /// Calls [`ChannelFactory::add_standard_channel`] pub fn add_standard_channel( @@ -1222,8 +1218,10 @@ impl PoolChannelFactory { self.pool_coinbase_outputs.clone(), self.get_last_additional_coinbase_script_data().len() as u8, )?; - self.inner - .on_new_extended_mining_job(new_job, Some(&self.get_last_additional_coinbase_script_data())) + self.inner.on_new_extended_mining_job( + new_job, + Some(&self.get_last_additional_coinbase_script_data()), + ) } /// Called when a `SubmitSharesStandard` message is received from the downstream. We check the /// shares against the channel's respective target and return `OnNewShare` to let us know if @@ -1232,7 +1230,8 @@ impl PoolChannelFactory { &mut self, m: SubmitSharesStandard, ) -> Result { - let additional_coinbase_script_data = self.get_additional_coinbase_script_data(m.channel_id,m.job_id); + let additional_coinbase_script_data = + self.get_additional_coinbase_script_data(m.channel_id, m.job_id); match self.inner.channel_to_group_id.get(&m.channel_id) { Some(g_id) => { let referenced_job = self @@ -1293,7 +1292,8 @@ impl PoolChannelFactory { m: SubmitSharesExtended, ) -> Result { let target = self.job_creator.last_target(); - let additional_coinbase_script_data = self.get_additional_coinbase_script_data(m.channel_id,m.job_id); + let additional_coinbase_script_data = + self.get_additional_coinbase_script_data(m.channel_id, m.job_id); // When downstream set a custom mining job we add the job to the negotiated job // hashmap, with the extended channel id as a key. Whenever the pool receive a share must // first check if the channel have a negotiated job if so we can not retreive the template @@ -1440,7 +1440,6 @@ impl PoolChannelFactory { (true, Some(additional_coinbase_script_data)) => additional_coinbase_script_data.clone(), (false, _) => self._additional_coinbase_script_data.clone(), _ => panic!("Internal error: when job_ids_using_old_add_data contains elements _additional_coinbase_script_data_old must be Some") - } } // TODO ret can not be larger then 32 bytes maybe use the stack for it? @@ -1507,7 +1506,7 @@ impl ProxyExtendedChannelFactory { future_jobs: Vec::new(), last_prev_hash: None, last_prev_hash_: None, - last_valid_jobs: [None,None,None], + last_valid_jobs: [None, None, None], id_to_job: HashMap::with_hasher(BuildNoHashHasher::default()), added_elements: 0, kind, @@ -1772,7 +1771,12 @@ impl ProxyExtendedChannelFactory { if let Some(job_creator) = self.job_creator.as_mut() { let template_id = job_creator .get_template_id_from_job( - self.inner.get_valid_job(m.job_id).as_ref().unwrap().0.job_id, + self.inner + .get_valid_job(m.job_id) + .as_ref() + .unwrap() + .0 + .job_id, ) .ok_or(Error::NoTemplateForId)?; let bitcoin_target = job_creator.last_target(); @@ -1863,7 +1867,10 @@ impl ProxyExtendedChannelFactory { self.inner.kind.set_target(new_target); } pub fn last_valid_job_version(&self) -> Option { - self.inner.get_last_valid_job().as_ref().map(|j| j.0.version) + self.inner + .get_last_valid_job() + .as_ref() + .map(|j| j.0.version) } /// Returns the full extranonce, extranonce1 (static for channel) + extranonce2 (miner nonce /// space) diff --git a/protocols/v2/roles-logic-sv2/src/errors.rs b/protocols/v2/roles-logic-sv2/src/errors.rs index 20c4bcd554..da95e2d532 100644 --- a/protocols/v2/roles-logic-sv2/src/errors.rs +++ b/protocols/v2/roles-logic-sv2/src/errors.rs @@ -61,6 +61,7 @@ pub enum Error { HashrateError(InputError), LogicErrorMessage(std::boxed::Box>), JDSMissingTransactions, + AdditionalCoinbaseScriptDataTooBig, } impl From for Error { @@ -153,6 +154,7 @@ impl Display for Error { HashrateError(e) => write!(f, "Impossible to get Hashrate: {:?}", e), LogicErrorMessage(e) => write!(f, "Message is well formatted but can not be handled: {:?}", e), JDSMissingTransactions => write!(f, "JD server cannot propagate the block: missing transactions"), + AdditionalCoinbaseScriptDataTooBig => write!(f, "Additional coinbase script data too big"), } } } diff --git a/protocols/v2/roles-logic-sv2/src/job_creator.rs b/protocols/v2/roles-logic-sv2/src/job_creator.rs index 0e65c35b35..e52ee9b787 100644 --- a/protocols/v2/roles-logic-sv2/src/job_creator.rs +++ b/protocols/v2/roles-logic-sv2/src/job_creator.rs @@ -224,7 +224,12 @@ fn new_extended_job( version_rolling_allowed, merkle_path: new_template.merkle_path.clone().into_static(), coinbase_tx_prefix: coinbase_tx_prefix(&coinbase, script_prefix_len)?, - coinbase_tx_suffix: coinbase_tx_suffix(&coinbase, extranonce_len, script_prefix_len)?, + coinbase_tx_suffix: coinbase_tx_suffix( + &coinbase, + extranonce_len, + script_prefix_len, + additional_coinbase_script_data_len as usize, + )?, }; debug!( @@ -249,10 +254,10 @@ fn coinbase_tx_prefix( }; let index = 4 // tx version + segwit_bytes - + 1 // number of inputs TODO can be also 3 + + 1 // number of inputs (always 1) + 32 // prev OutPoint + 4 // index - + 1 // bytes in script TODO can be also 3 + + 1 // bytes in script (max 100 so always 1 byte) + script_prefix_len; // bip34_bytes let r = encoded[0..index].to_vec(); r.try_into().map_err(Error::BinarySv2Error) @@ -264,6 +269,7 @@ fn coinbase_tx_suffix( coinbase: &Transaction, extranonce_len: u8, script_prefix_len: usize, + additional_coinbase_script_data_len: usize, ) -> Result, Error> { let encoded = coinbase.serialize(); // If script_prefix_len is not 0 we are not in a test enviornment and the coinbase have the 0 @@ -279,6 +285,7 @@ fn coinbase_tx_suffix( + 4 // index + 1 // bytes in script TODO can be also 3 + script_prefix_len // bip34_bytes + + additional_coinbase_script_data_len + (extranonce_len as usize)..] .to_vec(); r.try_into().map_err(Error::BinarySv2Error) diff --git a/roles/jd-client/src/lib/downstream.rs b/roles/jd-client/src/lib/downstream.rs index 59703491fa..68b5fc6ef5 100644 --- a/roles/jd-client/src/lib/downstream.rs +++ b/roles/jd-client/src/lib/downstream.rs @@ -493,7 +493,8 @@ impl kind, coinbase_outputs, "SOLO".as_bytes().to_vec(), - ); + ) + .expect("Signature + extranonce lens exceed 32 bytes"); self.status.set_channel(channel_factory); let request_id = m.request_id; diff --git a/roles/jd-client/src/lib/upstream_sv2/upstream.rs b/roles/jd-client/src/lib/upstream_sv2/upstream.rs index 3dc7b33915..9e30f98c1c 100644 --- a/roles/jd-client/src/lib/upstream_sv2/upstream.rs +++ b/roles/jd-client/src/lib/upstream_sv2/upstream.rs @@ -587,7 +587,8 @@ impl ParseUpstreamMiningMessages, status_tx: status::Sender, ) -> Arc> { - let extranonce_len = 16; + let extranonce_len = 13; let range_0 = std::ops::Range { start: 0, end: 0 }; - let range_1 = std::ops::Range { start: 0, end: 8 }; + let range_1 = std::ops::Range { start: 0, end: 5 }; let range_2 = std::ops::Range { - start: 8, + start: 5, end: extranonce_len, }; let ids = Arc::new(Mutex::new(roles_logic_sv2::utils::GroupId::new())); @@ -611,15 +611,18 @@ impl Pool { let creator = JobsCreators::new(extranonce_len as u8); let share_per_min = 1.0; let kind = roles_logic_sv2::channel_logic::channel_factory::ExtendedChannelKind::Pool; - let channel_factory = Arc::new(Mutex::new(PoolChannelFactory::new( - ids, - extranonces, - creator, - share_per_min, - kind, - pool_coinbase_outputs.expect("Invalid coinbase output in config"), - config.pool_signature.clone().into_bytes(), - ))); + let channel_factory = Arc::new(Mutex::new( + PoolChannelFactory::new( + ids, + extranonces, + creator, + share_per_min, + kind, + pool_coinbase_outputs.expect("Invalid coinbase output in config"), + config.pool_signature.clone().into_bytes(), + ) + .expect("Signature + extranonce lens exceed 32 bytes"), + )); let pool = Arc::new(Mutex::new(Pool { downstreams: HashMap::with_hasher(BuildNoHashHasher::default()), solution_sender, diff --git a/roles/translator/src/lib/upstream_sv2/upstream.rs b/roles/translator/src/lib/upstream_sv2/upstream.rs index c025c14071..9b4a2a20c1 100644 --- a/roles/translator/src/lib/upstream_sv2/upstream.rs +++ b/roles/translator/src/lib/upstream_sv2/upstream.rs @@ -171,8 +171,7 @@ impl Upstream { job_id: None, last_job_id: None, min_extranonce_size, - upstream_extranonce1_size: 16, /* 16 is the default since that is the only value the - * pool supports currently */ + upstream_extranonce1_size: 8, tx_sv2_extranonce, tx_status, target, From f1ae8fc758e46c3b06b8042b75a87c5ad27d90a4 Mon Sep 17 00:00:00 2001 From: fi3 Date: Fri, 15 Nov 2024 17:21:38 +0100 Subject: [PATCH 06/27] Fix transalator segwit remover The translator normalize the coinbase and remove the segwit data from the coinbase prefix and suffix. In order to do that it need to know the extranonce len, we used a default value of 322 bytes, but the pool could use also smalle extranonces. --- .../roles-logic-sv2/src/channel_logic/channel_factory.rs | 4 ++++ roles/translator/src/lib/proxy/bridge.rs | 9 +++++++++ roles/translator/src/lib/proxy/next_mining_notify.rs | 3 ++- test/config/interop-jd-translator/pool-config.toml | 2 +- 4 files changed, 16 insertions(+), 2 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index dfb141d7b3..bccb389ff8 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -1924,6 +1924,10 @@ impl ProxyExtendedChannelFactory { ) -> Option { self.inner.update_target_for_channel(channel_id, new_target) } + + pub fn get_extranonce_len(&self) -> usize { + self.inner.extranonces.get_len() + } } /// Used by proxies for tracking upstream targets. diff --git a/roles/translator/src/lib/proxy/bridge.rs b/roles/translator/src/lib/proxy/bridge.rs index ac65140c46..a931e391eb 100644 --- a/roles/translator/src/lib/proxy/bridge.rs +++ b/roles/translator/src/lib/proxy/bridge.rs @@ -340,6 +340,10 @@ impl Bridge { }) .map_err(|_| PoisonLock)?; + let extranonce_len = self_ + .safe_lock(|s| s.channel_factory.get_extranonce_len()) + .unwrap(); + let mut match_a_future_job = false; while let Some(job) = future_jobs.pop() { if job.job_id == sv2_set_new_prev_hash.job_id { @@ -349,6 +353,7 @@ impl Bridge { sv2_set_new_prev_hash.clone(), job, true, + extranonce_len, ); // Get the sender to send the mining.notify to the Downstream @@ -428,6 +433,9 @@ impl Bridge { .on_new_extended_mining_job(sv2_new_extended_mining_job.as_static().clone()) }) .map_err(|_| PoisonLock)??; + let extranonce_len = self_ + .safe_lock(|s| s.channel_factory.get_extranonce_len()) + .unwrap(); // If future_job=true, this job is meant for a future SetNewPrevHash that the proxy // has yet to receive. Insert this new job into the job_mapper . @@ -456,6 +464,7 @@ impl Bridge { last_p_hash, sv2_new_extended_mining_job.clone(), false, + extranonce_len, ); // Get the sender to send the mining.notify to the Downstream tx_sv1_notify.send(notify.clone())?; diff --git a/roles/translator/src/lib/proxy/next_mining_notify.rs b/roles/translator/src/lib/proxy/next_mining_notify.rs index 7bcaf44f1a..c611986672 100644 --- a/roles/translator/src/lib/proxy/next_mining_notify.rs +++ b/roles/translator/src/lib/proxy/next_mining_notify.rs @@ -16,9 +16,10 @@ pub fn create_notify( new_prev_hash: SetNewPrevHash<'static>, new_job: NewExtendedMiningJob<'static>, clean_jobs: bool, + extranonce_len: usize, ) -> server_to_client::Notify<'static> { // TODO 32 must be changed! - let new_job = extended_job_to_non_segwit(new_job, 32) + let new_job = extended_job_to_non_segwit(new_job, extranonce_len) .expect("failed to convert extended job to non segwit"); // Make sure that SetNewPrevHash + NewExtendedMiningJob is matching (not future) let job_id = new_job.job_id.to_string(); diff --git a/test/config/interop-jd-translator/pool-config.toml b/test/config/interop-jd-translator/pool-config.toml index 9de7b11b12..91d50f203e 100644 --- a/test/config/interop-jd-translator/pool-config.toml +++ b/test/config/interop-jd-translator/pool-config.toml @@ -16,7 +16,7 @@ coinbase_outputs = [ ] # Pool signature (string to be included in coinbase tx) # e.g. "Foundry USA", "Antpool", "/ViaBTC/Mined by gitgab19", etc -pool_signature = "Stratum v2 SRI Pool - gitgab19" +pool_signature = "Stratum v2 SRI Pool" # Template Provider config # hosted testnet TP From 96866ebd8428a2d0e5b5f2843bb582db3b2113b6 Mon Sep 17 00:00:00 2001 From: fi3 Date: Sat, 16 Nov 2024 10:30:48 +0100 Subject: [PATCH 07/27] Update extranonce size for sv1 devices in translato config --- .../config-examples/tproxy-config-hosted-pool-example.toml | 2 +- .../config-examples/tproxy-config-local-pool-example.toml | 2 +- test/config/tproxy-config-no-jd-sv1-cpu-md.toml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/roles/translator/config-examples/tproxy-config-hosted-pool-example.toml b/roles/translator/config-examples/tproxy-config-hosted-pool-example.toml index 47d4ea8758..cd1202f5bf 100644 --- a/roles/translator/config-examples/tproxy-config-hosted-pool-example.toml +++ b/roles/translator/config-examples/tproxy-config-hosted-pool-example.toml @@ -20,7 +20,7 @@ min_supported_version = 2 # Max value: 16 (leaves 0 bytes for search space splitting of downstreams) # Max value for CGminer: 8 # Min value: 2 -min_extranonce2_size = 8 +min_extranonce2_size = 5 # Difficulty params [downstream_difficulty_config] diff --git a/roles/translator/config-examples/tproxy-config-local-pool-example.toml b/roles/translator/config-examples/tproxy-config-local-pool-example.toml index b4359d5ab4..12d6f2f173 100644 --- a/roles/translator/config-examples/tproxy-config-local-pool-example.toml +++ b/roles/translator/config-examples/tproxy-config-local-pool-example.toml @@ -20,7 +20,7 @@ min_supported_version = 2 # Max value: 16 (leaves 0 bytes for search space splitting of downstreams) # Max value for CGminer: 8 # Min value: 2 -min_extranonce2_size = 8 +min_extranonce2_size = 5 # Difficulty params [downstream_difficulty_config] diff --git a/test/config/tproxy-config-no-jd-sv1-cpu-md.toml b/test/config/tproxy-config-no-jd-sv1-cpu-md.toml index 7c90479f0a..2b05ec8793 100644 --- a/test/config/tproxy-config-no-jd-sv1-cpu-md.toml +++ b/test/config/tproxy-config-no-jd-sv1-cpu-md.toml @@ -24,7 +24,7 @@ min_supported_version = 2 # Max value: 16 (leaves 0 bytes for search space splitting of downstreams) # Max value for CGminer: 8 # Min value: 2 -min_extranonce2_size = 8 +min_extranonce2_size = 5 coinbase_reward_sat = 5_000_000_000 # optional jn config, if set the tproxy start on JN mode @@ -45,4 +45,4 @@ shares_per_minute = 100.0 # interval in seconds to elapse before updating channel hashrate with the pool channel_diff_update_interval = 60 # estimated accumulated hashrate of all downstream miners -channel_nominal_hashrate = 500.0 \ No newline at end of file +channel_nominal_hashrate = 500.0 From 1871a33cbb922c86fa1550d9589075371bad8ada Mon Sep 17 00:00:00 2001 From: fi3 Date: Sat, 16 Nov 2024 13:53:27 +0100 Subject: [PATCH 08/27] Update pool channel factory, send new extranonce prefix Add a method that the pool can use in order to change coinbase_script_additional_data of an already opened channel and send downstream the new extranonce_prefix --- .../src/channel_logic/channel_factory.rs | 225 ++++++++++++++---- protocols/v2/roles-logic-sv2/src/errors.rs | 2 + 2 files changed, 184 insertions(+), 43 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index bccb389ff8..3e1bfbfb22 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -10,8 +10,8 @@ use crate::{ use mining_sv2::{ ExtendedExtranonce, NewExtendedMiningJob, NewMiningJob, OpenExtendedMiningChannelSuccess, OpenMiningChannelError, OpenStandardMiningChannelSuccess, SetCustomMiningJob, - SetCustomMiningJobSuccess, SetNewPrevHash, SubmitSharesError, SubmitSharesExtended, - SubmitSharesStandard, Target, + SetCustomMiningJobSuccess, SetExtranoncePrefix, SetNewPrevHash, SubmitSharesError, + SubmitSharesExtended, SubmitSharesStandard, Target, }; use nohash_hasher::BuildNoHashHasher; @@ -225,6 +225,17 @@ struct ChannelFactory { future_templates: HashMap, BuildNoHashHasher>, } +impl ChannelFactory { + // TODO channels in groups channel must be handled in a different way + // get the group_id construct group_id ++ channel_id and remove it + // this will be done in a future PR since no one is using them + fn close_channel(&mut self, channel_id: u32) { + self.standard_channels_for_hom_downstreams + .remove(&channel_id); + self.extended_channels.remove(&channel_id); + } +} + impl ChannelFactory { fn add_valid_job(&mut self, job: NewExtendedMiningJob<'static>, group_ids: Vec) { match self.last_valid_jobs { @@ -317,7 +328,7 @@ impl ChannelFactory { hash_rate: f32, min_extranonce_size: u16, additional_coinbase_script_data: Option<&[u8]>, - ) -> Result>, Error> { + ) -> Result<(Vec>, Option), Error> { let extended_channels_group = 0; let max_extranonce_size = self.extranonces.get_range2_len() as u16; if min_extranonce_size <= max_extranonce_size { @@ -378,11 +389,14 @@ impl ChannelFactory { for (job, _) in &self.future_jobs { result.push(Mining::NewExtendedMiningJob(job.clone())) } - Ok(result) + Ok((result, Some(channel_id))) } else { - Ok(vec![Mining::OpenMiningChannelError( - OpenMiningChannelError::unsupported_extranonce_size(request_id), - )]) + Ok(( + vec![Mining::OpenMiningChannelError( + OpenMiningChannelError::unsupported_extranonce_size(request_id), + )], + None, + )) } } /// Called when we want to replicate a channel already opened by another actor. @@ -831,6 +845,8 @@ impl ChannelFactory { channel.extranonce.as_ref(), *id, Some(job_id), + // We dont' care about what we have in the additional data since downstream do not + // have to handle extranonces in that case, whatever is ok. additional_coinbase_script_data, ) .unwrap(); @@ -1079,19 +1095,19 @@ pub struct PoolChannelFactory { inner: ChannelFactory, job_creator: JobsCreators, pool_coinbase_outputs: Vec, + // Per channel additional data that the pool may want to include in the coinbase input script + // as first part of the extranonce. This can be used to put things like the pool signature + // or commitments. It is per channel since the pool may want to include different + // commitment data based on downstream hash rate. + // channel_if -> (additional_coinbase_script_data, old_additional_coinbase_script_data) + #[allow(clippy::type_complexity)] + channel_to_additional_coinbase_script_data: + HashMap, Option>), BuildNoHashHasher>, // Additional data that the pool may want to include in the coinbase input script as first part // of the extranonce. This can be used to put things like the pool signature. - // I prepend with _ cause it means that I don't want to use this value directly. - _additional_coinbase_script_data: Vec, - // This is normally set to None. When the pool change the additional_coinbase_script_data we - // set it to Some(old value). We need it cause for a short time frame we will have job that are - // supposed to use the new one and jobs that are supposed to use the old value. As soon we - // have only job that use the new value, this is set to None. We do not support more then 2 - // additional_coinbase_script_data at time. - // I prepend with _ cause it means that I don't want to use this value directly. - _additional_coinbase_script_data_old: Option>, + additional_coinbase_script_data: Vec, // channel_id ++ job_id - job_ids_using_old_add_data: HashSet>, + job_ids_using_old_add_data: HashSet>, // extedned_channel_id -> SetCustomMiningJob negotiated_jobs: HashMap, BuildNoHashHasher>, } @@ -1104,9 +1120,9 @@ impl PoolChannelFactory { share_per_min: f32, kind: ExtendedChannelKind, pool_coinbase_outputs: Vec, - _additional_coinbase_script_data: Vec, + additional_coinbase_script_data: Vec, ) -> Result { - if _additional_coinbase_script_data.len() + extranonces.get_len() > 32 { + if additional_coinbase_script_data.len() + extranonces.get_len() > 32 { error!("Additional coinbase script data is too big"); return Err(Error::AdditionalCoinbaseScriptDataTooBig); } @@ -1137,8 +1153,10 @@ impl PoolChannelFactory { inner, job_creator, pool_coinbase_outputs, - _additional_coinbase_script_data, - _additional_coinbase_script_data_old: None, + channel_to_additional_coinbase_script_data: HashMap::with_hasher( + BuildNoHashHasher::default(), + ), + additional_coinbase_script_data, job_ids_using_old_add_data: HashSet::with_hasher(BuildNoHashHasher::default()), negotiated_jobs: HashMap::with_hasher(BuildNoHashHasher::default()), }) @@ -1151,12 +1169,14 @@ impl PoolChannelFactory { is_header_only: bool, id: u32, ) -> Result, Error> { + self.channel_to_additional_coinbase_script_data + .insert(id, (self.additional_coinbase_script_data.clone(), None)); self.inner.add_standard_channel( request_id, downstream_hash_rate, is_header_only, id, - Some(&self.get_last_additional_coinbase_script_data()), + Some(&self.additional_coinbase_script_data), ) } /// Calls [`ChannelFactory::new_extended_channel`] @@ -1166,12 +1186,25 @@ impl PoolChannelFactory { hash_rate: f32, min_extranonce_size: u16, ) -> Result>, Error> { - self.inner.new_extended_channel( + match self.inner.new_extended_channel( request_id, hash_rate, min_extranonce_size, - Some(&self.get_last_additional_coinbase_script_data()), - ) + Some(&self.additional_coinbase_script_data), + ) { + // Channel is opened + Ok((res, Some(channel_id))) => { + self.channel_to_additional_coinbase_script_data.insert( + channel_id, + (self.additional_coinbase_script_data.clone(), None), + ); + Ok(res) + } + // Channel is not opened and we can return an error downtream + Ok((res, None)) => Ok(res), + // Channel is not opened and we can not return an error downtream + Err(e) => Err(e), + } } /// Called when we want to replicate a channel already opened by another actor. /// is used only in the jd client from the template provider module to mock a pool. @@ -1183,6 +1216,9 @@ impl PoolChannelFactory { channel_id: u32, extranonce_size: u16, ) -> Option<()> { + // This initialise a PoolChannelFactory for a JDC that can not have + // additional_coinbase_script_data as it is set only by the pool. + assert!(self.additional_coinbase_script_data.is_empty()); self.inner.replicate_upstream_extended_channel_only_jd( target, extranonce, @@ -1216,11 +1252,13 @@ impl PoolChannelFactory { m, true, self.pool_coinbase_outputs.clone(), - self.get_last_additional_coinbase_script_data().len() as u8, + self.additional_coinbase_script_data.len() as u8, )?; self.inner.on_new_extended_mining_job( new_job, - Some(&self.get_last_additional_coinbase_script_data()), + // Here we can use the data that we used to initialize this channel factory. Since this + // value it will be used only to create standard jobs for HOM downstreams. + Some(&self.additional_coinbase_script_data), ) } /// Called when a `SubmitSharesStandard` message is received from the downstream. We check the @@ -1309,8 +1347,8 @@ impl PoolChannelFactory { .unwrap(); let prev_blockhash = crate::utils::u256_to_block_hash(referenced_job.prev_hash.clone()); let bits = referenced_job.nbits; - self.inner.check_target( - Share::Extended(m.into_static()), + match self.inner.check_target( + Share::Extended(m.clone().into_static()), target, None, 0, @@ -1320,7 +1358,42 @@ impl PoolChannelFactory { prev_blockhash, bits, Some(&additional_coinbase_script_data), - ) + ) { + // Since this is a share for a custom job and there is no way to know if the share + // do not met target cause pool sent a new extranonce prefix and the miner is still + // using the old one we check also against the old one since we don't want to fail + // in that case. + Ok(OnNewShare::SendErrorDownstream(m_)) => { + match self.get_old_additional_coinbase_script_data(m.channel_id) { + Some(additional_coinbase_script_data) => { + let target = self.job_creator.last_target(); + let referenced_job = self.negotiated_jobs.get(&m.channel_id).unwrap(); + let merkle_path = referenced_job.merkle_path.to_vec(); + let extended_job = job_creator::extended_job_from_custom_job( + referenced_job, + additional_coinbase_script_data.len() as u8, + self.inner.extranonces.get_len() as u8, + ) + .unwrap(); + self.inner.check_target( + Share::Extended(m.into_static()), + target, + None, + 0, + merkle_path, + extended_job.coinbase_tx_prefix.as_ref(), + extended_job.coinbase_tx_suffix.as_ref(), + prev_blockhash, + bits, + Some(&additional_coinbase_script_data), + ) + } + None => Ok(OnNewShare::SendErrorDownstream(m_)), + } + } + Ok(res) => Ok(res), + Err(err) => Err(err), + } } else { let referenced_job = self .inner @@ -1435,25 +1508,90 @@ impl PoolChannelFactory { // TODO ret can not be larger then 32 bytes maybe use the stack for it? #[inline(always)] fn get_additional_coinbase_script_data(&self, channel_id: u32, job_id: u32) -> Vec { - let id = ((channel_id as u64) << 32) | (job_id as u64); - match (self.job_ids_using_old_add_data.contains(&id), &self._additional_coinbase_script_data_old) { - (true, Some(additional_coinbase_script_data)) => additional_coinbase_script_data.clone(), - (false, _) => self._additional_coinbase_script_data.clone(), - _ => panic!("Internal error: when job_ids_using_old_add_data contains elements _additional_coinbase_script_data_old must be Some") + debug_assert!({ + let have_old = self.job_ids_using_old_add_data.contains(&job_id); + let not_have_old = self + .channel_to_additional_coinbase_script_data + .get(&channel_id) + .unwrap() + .1 + .is_some(); + if have_old { + !not_have_old + } else { + true + } + }); + match self + .channel_to_additional_coinbase_script_data + .get(&channel_id) + { + Some((add_data, None)) => add_data.clone(), + Some((add_data, Some(old_data))) => { + if self.job_ids_using_old_add_data.contains(&job_id) { + old_data.clone() + } else { + add_data.clone() + } + } + None => panic!("Internal error: channel not initialized can not get additional data"), } } + // TODO ret can not be larger then 32 bytes maybe use the stack for it? #[inline(always)] - fn get_last_additional_coinbase_script_data(&self) -> Vec { - self._additional_coinbase_script_data.clone() - } - - pub fn change_additional_coinbase_script_data(&mut self, new_data: Vec) { - todo!() + fn get_old_additional_coinbase_script_data(&self, channel_id: u32) -> Option> { + self.channel_to_additional_coinbase_script_data + .get(&channel_id)? + .1 + .clone() + } + + /// This set a new additional coinbase script data for a particular channel. Think to keep in + /// mind before using this function: + /// 1. Standard hom channels are not affected by the change + /// 2. The new additional data MUST have the exact same len as the additonal data used to + /// initialize the channle factory with PoolChannelFactory::new + /// 3. For job provided by the pool, all the non future sent before the new additional data will + /// have the old additional data. All the future jobs and the non future jobs sent after the + /// new additional data will have the new additional data + /// 4. Custom jobs will be checked against the new additional data, if the check fail we check + /// against the old additional data if also this check fail we return SubmitShareError + pub fn change_additional_coinbase_script_data( + &mut self, + new_data: Vec, + channel_id: u32, + ) -> Result { + if self.additional_coinbase_script_data.len() == new_data.len() { + let mut ids_for_old_data = HashSet::with_hasher(BuildNoHashHasher::default()); + for id in self.inner.id_to_job.keys() { + ids_for_old_data.insert(*id); + } + self.job_ids_using_old_add_data = ids_for_old_data; + match self + .channel_to_additional_coinbase_script_data + .get_mut(&channel_id) + { + Some(data) => { + data.1 = Some(data.0.clone()); + data.0 = new_data.clone(); + let res = SetExtranoncePrefix { + channel_id, + extranonce_prefix: new_data.try_into().expect(""), + }; + Ok(Mining::SetExtranoncePrefix(res)) + } + None => Err(Error::NotFoundChannelId), + } + } else { + Err(Error::NewAdditionalCoinbaseDataLenDoNotMatch) + } } - fn active_jobs(&self) -> Vec { - todo!() + pub fn close_channel(&mut self, channel_id: u32) { + self.channel_to_additional_coinbase_script_data + .retain(|k, _| k != &channel_id); + self.inner.close_channel(channel_id); } } @@ -1541,6 +1679,7 @@ impl ProxyExtendedChannelFactory { ) -> Result, Error> { self.inner .new_extended_channel(request_id, hash_rate, min_extranonce_size, None) + .map(|x| x.0) } /// Called only when a new prev hash is received by a Template Provider when job declaration is /// used. It matches the message with a `job_id`, creates a new custom job, and calls diff --git a/protocols/v2/roles-logic-sv2/src/errors.rs b/protocols/v2/roles-logic-sv2/src/errors.rs index da95e2d532..5ad78b42ed 100644 --- a/protocols/v2/roles-logic-sv2/src/errors.rs +++ b/protocols/v2/roles-logic-sv2/src/errors.rs @@ -62,6 +62,7 @@ pub enum Error { LogicErrorMessage(std::boxed::Box>), JDSMissingTransactions, AdditionalCoinbaseScriptDataTooBig, + NewAdditionalCoinbaseDataLenDoNotMatch, } impl From for Error { @@ -155,6 +156,7 @@ impl Display for Error { LogicErrorMessage(e) => write!(f, "Message is well formatted but can not be handled: {:?}", e), JDSMissingTransactions => write!(f, "JD server cannot propagate the block: missing transactions"), AdditionalCoinbaseScriptDataTooBig => write!(f, "Additional coinbase script data too big"), + NewAdditionalCoinbaseDataLenDoNotMatch => write!(f, "Channel factory can update the additional data only if the new data is the same size as the old one"), } } } From 52b2f3d6de5caca71def916424a136af1617d1d0 Mon Sep 17 00:00:00 2001 From: fi3 Date: Mon, 18 Nov 2024 11:24:14 +0100 Subject: [PATCH 09/27] Upgrade roles-logic-sv2, Fix tests --- benches/Cargo.toml | 4 +- protocols/fuzz-tests/Cargo.toml | 2 +- protocols/v2/roles-logic-sv2/Cargo.toml | 6 +- .../src/channel_logic/channel_factory.rs | 165 +++++++++++++++++- .../v2/roles-logic-sv2/src/job_creator.rs | 6 +- .../v2/roles-logic-sv2/src/job_dispatcher.rs | 39 ++++- protocols/v2/subprotocols/mining/Cargo.toml | 4 +- protocols/v2/subprotocols/mining/src/lib.rs | 6 +- roles/Cargo.lock | 4 +- roles/jd-client/Cargo.toml | 2 +- roles/jd-server/Cargo.toml | 4 +- roles/mining-proxy/Cargo.toml | 2 +- roles/pool/Cargo.toml | 2 +- .../src/lib/mining_pool/message_handler.rs | 5 +- roles/test-utils/mining-device/Cargo.toml | 2 +- roles/translator/Cargo.toml | 2 +- .../pool-sri-test-1-standard.json | 2 +- utils/message-generator/Cargo.toml | 2 +- 18 files changed, 219 insertions(+), 40 deletions(-) diff --git a/benches/Cargo.toml b/benches/Cargo.toml index 525c526e5b..4041b4a8d8 100644 --- a/benches/Cargo.toml +++ b/benches/Cargo.toml @@ -10,8 +10,8 @@ async-channel = "1.4.0" v1 = { path="../protocols/v1", package="sv1_api", version = "^1.0.0" } serde_json = { version = "1.0.64", default-features = false, features = ["alloc"] } iai="0.1" -mining_sv2 = { path = "../protocols/v2/subprotocols/mining", version = "^1.0.0" } -roles_logic_sv2 = { path = "../protocols/v2/roles-logic-sv2", version = "^1.0.0" } +mining_sv2 = { path = "../protocols/v2/subprotocols/mining", version = "^2.0.0" } +roles_logic_sv2 = { path = "../protocols/v2/roles-logic-sv2", version = "^2.0.0" } framing_sv2 = { version = "2.0.0", path = "../protocols/v2/framing-sv2" } serde = { version = "1.0.89", default-features = false, features = ["derive", "alloc"] } num-bigint = "0.4.3" diff --git a/protocols/fuzz-tests/Cargo.toml b/protocols/fuzz-tests/Cargo.toml index 5f0ee7ef61..fabb3d2b28 100644 --- a/protocols/fuzz-tests/Cargo.toml +++ b/protocols/fuzz-tests/Cargo.toml @@ -19,7 +19,7 @@ arbitrary = { version = "1", features = ["derive"] } rand = "0.8.3" binary_codec_sv2 = { version = "1.0.0", path = "../v2/binary-sv2/no-serde-sv2/codec"} codec_sv2 = { version = "1.0.0", path = "../v2/codec-sv2", features = ["noise_sv2"]} -roles_logic_sv2 = { version = "1.0.0", path = "../v2/roles-logic-sv2"} +roles_logic_sv2 = { version = "2.0.0", path = "../v2/roles-logic-sv2"} affinity = "0.1.1" threadpool = "1.8.1" lazy_static = "1.4.0" diff --git a/protocols/v2/roles-logic-sv2/Cargo.toml b/protocols/v2/roles-logic-sv2/Cargo.toml index f1b73e7e20..691dee313e 100644 --- a/protocols/v2/roles-logic-sv2/Cargo.toml +++ b/protocols/v2/roles-logic-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roles_logic_sv2" -version = "1.2.1" +version = "2.0.0" authors = ["The Stratum V2 Developers"] edition = "2018" readme = "README.md" @@ -18,7 +18,7 @@ stratum-common = { version="1.0.0", path = "../../../common", features=["bitcoin serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = false, optional = true} binary_sv2 = {version = "^1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2", default-features = true } common_messages_sv2 = { path = "../../../protocols/v2/subprotocols/common-messages", version = "^2.0.0" } -mining_sv2 = { path = "../../../protocols/v2/subprotocols/mining", version = "^1.0.0" } +mining_sv2 = { path = "../../../protocols/v2/subprotocols/mining", version = "^2.0.0" } template_distribution_sv2 = { path = "../../../protocols/v2/subprotocols/template-distribution", version = "^1.0.1" } job_declaration_sv2 = { path = "../../../protocols/v2/subprotocols/job-declaration", version = "^1.0.0" } const_sv2 = { version = "^2.0.0", path = "../../../protocols/v2/const-sv2"} @@ -46,4 +46,4 @@ prop_test = ["template_distribution_sv2/prop_test"] disable_nopanic = [] [package.metadata.docs.rs] -all-features = true \ No newline at end of file +all-features = true diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 3e1bfbfb22..7a627cb0cc 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -363,6 +363,7 @@ impl ChannelFactory { additional_coinbase_script_data.unwrap_or(&[]), ) .unwrap(); + dbg!(&extranonce_prefix); let success = OpenExtendedMiningChannelSuccess { request_id, channel_id, @@ -461,13 +462,22 @@ impl ChannelFactory { self.standard_channels_for_hom_downstreams .insert(channel_id, standard_channel); + let extranonce: Vec = match additional_coinbase_script_data { + Some(data) => { + let mut data = data.to_vec(); + data.extend_from_slice(extranonce.as_ref()); + data + } + None => extranonce.into(), + }; + // First message to be sent is OpenStandardMiningChannelSuccess result.push(Mining::OpenStandardMiningChannelSuccess( OpenStandardMiningChannelSuccess { request_id: request_id.into(), channel_id, target, - extranonce_prefix: extranonce.into(), + extranonce_prefix: extranonce.try_into().expect("Internal error: On initialization we make sure that extranonce + coinbase script additional data are not > then 32 bytes"), group_channel_id: hom_group_id, }, )); @@ -521,13 +531,13 @@ impl ChannelFactory { self.standard_channels_for_non_hom_downstreams .insert(complete_id, standard_channel); - let extranonce = match additional_coinbase_script_data { + let extranonce: Vec = match additional_coinbase_script_data { Some(data) => { let mut data = data.to_vec(); data.extend_from_slice(extranonce.as_ref()); - extranonce + data } - None => extranonce, + None => extranonce.into(), }; // First message to be sent is OpenStandardMiningChannelSuccess result.push(Mining::OpenStandardMiningChannelSuccess( @@ -535,7 +545,7 @@ impl ChannelFactory { request_id: request_id.into(), channel_id, target, - extranonce_prefix: extranonce.into(), + extranonce_prefix: extranonce.try_into().expect(""), group_channel_id: group_id, }, )); @@ -2092,7 +2102,7 @@ mod test { use super::*; use binary_sv2::{Seq0255, B064K, U256}; use bitcoin::{hash_types::WPubkeyHash, PublicKey, TxOut}; - use mining_sv2::OpenStandardMiningChannel; + use mining_sv2::{OpenExtendedMiningChannel, OpenStandardMiningChannel}; const BLOCK_REWARD: u64 = 2_000_000_000; @@ -2206,7 +2216,8 @@ mod test { channel_kind, vec![out], additional_coinbase_script_data.into_bytes(), - ); + ) + .unwrap(); // Build a NewTemplate let new_template = NewTemplate { @@ -2313,4 +2324,144 @@ mod test { OnNewShare::ShareMeetDownstreamTarget => panic!(), }; } + #[test] + fn test_extranonce_prefix_in_hom() { + let extranonce_prefix1 = [10, 11, 12]; + let (prefix, _, _) = get_coinbase(); + + // Initialize a Channel of type Pool + let out = TxOut {value: BLOCK_REWARD, script_pubkey: decode_hex("4104c6d0969c2d98a5c19ba7c36c7937c5edbd60ff2a01397c4afe54f16cd641667ea0049ba6f9e1796ba3c8e49e1b504c532ebbaaa1010c3f7d9b83a8ea7fd800e2ac").unwrap().into()}; + let creator = JobsCreators::new(7); + let share_per_min = 1.0; + let extranonces = ExtendedExtranonce::new(0..0, 0..0, 0..7); + + let ids = Arc::new(Mutex::new(GroupId::new())); + let channel_kind = ExtendedChannelKind::Pool; + let mut channel = PoolChannelFactory::new( + ids, + extranonces, + creator, + share_per_min, + channel_kind, + vec![out], + extranonce_prefix1.clone().into(), + ) + .unwrap(); + + // Build a NewTemplate + let new_template = NewTemplate { + template_id: 10, + future_template: true, + version: VERSION, + coinbase_tx_version: 1, + coinbase_prefix: prefix.try_into().unwrap(), + coinbase_tx_input_sequence: u32::MAX, + coinbase_tx_value_remaining: 5_000_000_000, + coinbase_tx_outputs_count: 0, + coinbase_tx_outputs: get_coinbase_outputs(), + coinbase_tx_locktime: 0, + merkle_path: get_merkle_path(), + }; + + // "Send" the NewTemplate to the channel + let _ = channel.on_new_template(&mut (new_template.clone())); + + // Build a PrevHash + let mut p_hash = decode_hex(PREV_HASH).unwrap(); + p_hash.reverse(); + let prev_hash = SetNewPrevHashFromTp { + template_id: 10, + prev_hash: p_hash.try_into().unwrap(), + header_timestamp: PREV_HEADER_TIMESTAMP, + n_bits: PREV_HEADER_NBITS, + target: nbit_to_target(PREV_HEADER_NBITS), + }; + + // "Send" the SetNewPrevHash to channel + let _ = channel.on_new_prev_hash_from_tp(&prev_hash); + + let result = channel + .add_standard_channel(100, 100_000_000_000_000.0, true, 2) + .unwrap(); + let extranonce_prefix = match &result[0] { + Mining::OpenStandardMiningChannelSuccess(msg) => msg.extranonce_prefix.clone().to_vec(), + _ => panic!(), + }; + assert!(&extranonce_prefix.to_vec()[0..3] == extranonce_prefix1); + } + #[test] + fn test_extranonce_prefix_in_extended() { + let extranonce_prefix1 = [10, 11, 12]; + let extranonce_prefix2 = [14, 11, 12]; + let (prefix, _, _) = get_coinbase(); + + // Initialize a Channel of type Pool + let out = TxOut {value: BLOCK_REWARD, script_pubkey: decode_hex("4104c6d0969c2d98a5c19ba7c36c7937c5edbd60ff2a01397c4afe54f16cd641667ea0049ba6f9e1796ba3c8e49e1b504c532ebbaaa1010c3f7d9b83a8ea7fd800e2ac").unwrap().into()}; + let creator = JobsCreators::new(16); + let share_per_min = 1.0; + let extranonces = ExtendedExtranonce::new(0..0, 0..8, 8..16); + + let ids = Arc::new(Mutex::new(GroupId::new())); + let channel_kind = ExtendedChannelKind::Pool; + let mut channel = PoolChannelFactory::new( + ids, + extranonces, + creator, + share_per_min, + channel_kind, + vec![out], + extranonce_prefix1.clone().into(), + ) + .unwrap(); + + // Build a NewTemplate + let new_template = NewTemplate { + template_id: 10, + future_template: true, + version: VERSION, + coinbase_tx_version: 1, + coinbase_prefix: prefix.try_into().unwrap(), + coinbase_tx_input_sequence: u32::MAX, + coinbase_tx_value_remaining: 5_000_000_000, + coinbase_tx_outputs_count: 0, + coinbase_tx_outputs: get_coinbase_outputs(), + coinbase_tx_locktime: 0, + merkle_path: get_merkle_path(), + }; + + // "Send" the NewTemplate to the channel + let _ = channel.on_new_template(&mut (new_template.clone())); + + // Build a PrevHash + let mut p_hash = decode_hex(PREV_HASH).unwrap(); + p_hash.reverse(); + let prev_hash = SetNewPrevHashFromTp { + template_id: 10, + prev_hash: p_hash.try_into().unwrap(), + header_timestamp: PREV_HEADER_TIMESTAMP, + n_bits: PREV_HEADER_NBITS, + target: nbit_to_target(PREV_HEADER_NBITS), + }; + + let _ = channel.on_new_prev_hash_from_tp(&prev_hash); + + let result = channel + .new_extended_channel(100, 100_000_000_000_000.0, 2) + .unwrap(); + let (extranonce_prefix, channel_id) = match &result[0] { + Mining::OpenExtendedMiningChannelSuccess(msg) => { + (msg.extranonce_prefix.clone().to_vec(), msg.channel_id) + } + _ => panic!(), + }; + assert!(&extranonce_prefix.to_vec()[0..3] == extranonce_prefix1); + match channel + .change_additional_coinbase_script_data(extranonce_prefix2.to_vec(), channel_id) + { + Ok(Mining::SetExtranoncePrefix(msg)) => { + assert!(&msg.extranonce_prefix.to_vec()[0..3] == extranonce_prefix2); + } + _ => panic!(), + } + } } diff --git a/protocols/v2/roles-logic-sv2/src/job_creator.rs b/protocols/v2/roles-logic-sv2/src/job_creator.rs index e52ee9b787..0e93e87d78 100644 --- a/protocols/v2/roles-logic-sv2/src/job_creator.rs +++ b/protocols/v2/roles-logic-sv2/src/job_creator.rs @@ -564,7 +564,7 @@ pub mod tests { let mut jobs_creators = JobsCreators::new(32); let job = jobs_creators - .on_new_template(template.borrow_mut(), false, vec![out]) + .on_new_template(template.borrow_mut(), false, vec![out], 0) .unwrap(); assert_eq!( @@ -588,7 +588,7 @@ pub mod tests { assert_eq!(jobs_creators.lasts_new_template.len(), 0); - let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out]); + let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out], 0); assert_eq!(jobs_creators.lasts_new_template.len(), 1); assert_eq!(jobs_creators.lasts_new_template[0], template); @@ -622,7 +622,7 @@ pub mod tests { let mut jobs_creators = JobsCreators::new(32); //Create a template - let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out]); + let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out], 0); let test_id = template.template_id; // Create a SetNewPrevHash with matching template_id diff --git a/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs b/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs index a0f4cb6172..780687efe0 100644 --- a/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs +++ b/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs @@ -146,6 +146,7 @@ impl GroupChannelJobDispatcher { &mut self, extended: &NewExtendedMiningJob, channel: &StandardChannel, + additional_coinbase_script_data: &[u8], // should be changed to return a Result> ) -> Option> { if extended.is_future() { @@ -161,9 +162,17 @@ impl GroupChannelJobDispatcher { let standard_job_id = self.ids.safe_lock(|ids| ids.next()).unwrap(); let extranonce: Vec = channel.extranonce.clone().into(); + let mut prefix: Vec = + Vec::with_capacity(extranonce.len() + additional_coinbase_script_data.len()); + for b in additional_coinbase_script_data { + prefix.push(*b); + } + for b in extranonce { + prefix.push(b); + } let new_mining_job_message = extended_to_standard_job_for_group_channel( extended, - &extranonce, + &prefix, channel.channel_id, standard_job_id, )?; @@ -310,12 +319,13 @@ mod tests { #[test] fn test_group_channel_job_dispatcher() { + let extranonce_len = 16; let out = TxOut { value: BLOCK_REWARD, script_pubkey: Script::new_p2pk(&new_pub_key()), }; - let pool_signature = "Stratum v2 SRI Pool".to_string(); - let mut jobs_creators = JobsCreators::new(32); + let pool_signature = "Stratum v2 SRI".to_string(); + let mut jobs_creators = JobsCreators::new(extranonce_len); let group_channel_id = 1; //Create a template let mut template = template_from_gen(&mut Gen::new(255)); @@ -331,8 +341,9 @@ mod tests { // create standard channel let target = Target::from(U256::try_from(utils::extranonce_gen()).unwrap()); let standard_channel_id = 2; - let extranonce = Extranonce::try_from(utils::extranonce_gen()) - .expect("Failed to convert bytes to extranonce"); + let extranonce = + Extranonce::try_from(utils::extranonce_gen()[0..extranonce_len as usize].to_vec()) + .expect("Failed to convert bytes to extranonce"); let standard_channel = StandardChannel { channel_id: standard_channel_id, group_id: group_channel_id, @@ -341,7 +352,11 @@ mod tests { }; // call target function (on_new_extended_mining_job) let new_mining_job = group_channel_dispatcher - .on_new_extended_mining_job(&extended_mining_job, &standard_channel) + .on_new_extended_mining_job( + &extended_mining_job, + &standard_channel, + &pool_signature.clone().into_bytes(), + ) .unwrap(); // on_new_extended_mining_job assertions @@ -351,6 +366,7 @@ mod tests { &extended_mining_job, extranonce.clone(), standard_channel_id, + &pool_signature, ); // on_new_prev_hash assertions if extended_mining_job.is_future() { @@ -374,12 +390,21 @@ mod tests { extended_mining_job: &NewExtendedMiningJob, extranonce: Extranonce, standard_channel_id: u32, + pool_signature: &String, ) -> (u32, Vec) { + let extranonce: Vec = extranonce.clone().into(); + let mut prefix: Vec = Vec::new(); + for b in pool_signature.clone().into_bytes() { + prefix.push(b); + } + for b in extranonce { + prefix.push(b); + } // compute test merkle path let new_root = merkle_root_from_path( extended_mining_job.coinbase_tx_prefix.inner_as_ref(), extended_mining_job.coinbase_tx_suffix.inner_as_ref(), - extranonce.to_vec().as_slice(), + prefix.as_slice(), &extended_mining_job.merkle_path.inner_as_ref(), &[], ) diff --git a/protocols/v2/subprotocols/mining/Cargo.toml b/protocols/v2/subprotocols/mining/Cargo.toml index 020ab39ec9..f000187595 100644 --- a/protocols/v2/subprotocols/mining/Cargo.toml +++ b/protocols/v2/subprotocols/mining/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "mining_sv2" -version = "1.0.0" +version = "2.0.0" authors = ["The Stratum V2 Developers"] edition = "2018" readme = "README.md" @@ -28,4 +28,4 @@ no_std = [] with_serde = ["binary_sv2/with_serde", "serde"] [package.metadata.docs.rs] -all-features = true \ No newline at end of file +all-features = true diff --git a/protocols/v2/subprotocols/mining/src/lib.rs b/protocols/v2/subprotocols/mining/src/lib.rs index d71d5605ad..901ee671d1 100644 --- a/protocols/v2/subprotocols/mining/src/lib.rs +++ b/protocols/v2/subprotocols/mining/src/lib.rs @@ -328,12 +328,12 @@ impl Extranonce { None } else { let mut prefix = Vec::with_capacity(prefix_len + additional_coinbase_script_data.len()); - for i in 0..prefix_len { - prefix.push(self.extranonce[i]); - } for b in additional_coinbase_script_data { prefix.push(*b); } + for i in 0..prefix_len { + prefix.push(self.extranonce[i]); + } // unwrap is sage as prefix_len can not be greater than 32 cause is not possible to // contruct Extranonce with the inner vecto greater than 32. Some(prefix.try_into().unwrap()) diff --git a/roles/Cargo.lock b/roles/Cargo.lock index 5f834dd456..99dbe286f8 100644 --- a/roles/Cargo.lock +++ b/roles/Cargo.lock @@ -1611,7 +1611,7 @@ dependencies = [ [[package]] name = "mining_sv2" -version = "1.0.0" +version = "2.0.0" dependencies = [ "binary_sv2", "const_sv2", @@ -2021,7 +2021,7 @@ dependencies = [ [[package]] name = "roles_logic_sv2" -version = "1.2.1" +version = "2.0.0" dependencies = [ "binary_sv2", "chacha20poly1305", diff --git a/roles/jd-client/Cargo.toml b/roles/jd-client/Cargo.toml index 1cb07ed7c7..ff9a0c9d13 100644 --- a/roles/jd-client/Cargo.toml +++ b/roles/jd-client/Cargo.toml @@ -24,7 +24,7 @@ buffer_sv2 = { version = "^1.0.0", path = "../../utils/buffer" } codec_sv2 = { version = "^1.0.1", path = "../../protocols/v2/codec-sv2", features = ["noise_sv2", "with_buffer_pool"] } framing_sv2 = { version = "^2.0.0", path = "../../protocols/v2/framing-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features=["with_tokio", "with_buffer_pool"] } -roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "^2.0.0", path = "../../protocols/v2/roles-logic-sv2" } serde = { version = "1.0.89", default-features = false, features = ["derive", "alloc"] } futures = "0.3.25" tokio = { version = "1", features = ["full"] } diff --git a/roles/jd-server/Cargo.toml b/roles/jd-server/Cargo.toml index 9e5ad8986a..08d566753a 100644 --- a/roles/jd-server/Cargo.toml +++ b/roles/jd-server/Cargo.toml @@ -26,7 +26,7 @@ const_sv2 = { version = "^2.0.0", path = "../../protocols/v2/const-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features = ["with_tokio"] } noise_sv2 = { version = "1.1.0", path = "../../protocols/v2/noise-sv2" } rand = "0.8.4" -roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "^2.0.0", path = "../../protocols/v2/roles-logic-sv2" } tokio = { version = "1", features = ["full"] } ext-config = { version = "0.14.0", features = ["toml"], package = "config" } tracing = { version = "0.1" } @@ -38,4 +38,4 @@ serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = hashbrown = { version = "0.11", default-features = false, features = ["ahash", "serde"] } key-utils = { version = "^1.0.0", path = "../../utils/key-utils" } rpc_sv2 = { version = "1.0.0", path = "../roles-utils/rpc" } -hex = "0.4.3" \ No newline at end of file +hex = "0.4.3" diff --git a/roles/mining-proxy/Cargo.toml b/roles/mining-proxy/Cargo.toml index 175fca0392..431a1ec850 100644 --- a/roles/mining-proxy/Cargo.toml +++ b/roles/mining-proxy/Cargo.toml @@ -27,7 +27,7 @@ const_sv2 = { version = "^2.0.0", path = "../../protocols/v2/const-sv2" } futures = "0.3.19" network_helpers_sv2 = {version = "2.0.0", path = "../roles-utils/network-helpers", features = ["with_tokio","with_buffer_pool"] } once_cell = "1.12.0" -roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "^2.0.0", path = "../../protocols/v2/roles-logic-sv2" } serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = false } tokio = { version = "1", features = ["full"] } ext-config = { version = "0.14.0", features = ["toml"], package = "config" } diff --git a/roles/pool/Cargo.toml b/roles/pool/Cargo.toml index 95c21957bc..626ddbd25b 100644 --- a/roles/pool/Cargo.toml +++ b/roles/pool/Cargo.toml @@ -26,7 +26,7 @@ const_sv2 = { version = "^2.0.0", path = "../../protocols/v2/const-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features =["with_tokio","with_buffer_pool"] } noise_sv2 = { version = "1.1.0", path = "../../protocols/v2/noise-sv2" } rand = "0.8.4" -roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "^2.0.0", path = "../../protocols/v2/roles-logic-sv2" } serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = false } tokio = { version = "1", features = ["full"] } ext-config = { version = "0.14.0", features = ["toml"], package = "config" } diff --git a/roles/pool/src/lib/mining_pool/message_handler.rs b/roles/pool/src/lib/mining_pool/message_handler.rs index 8d8b1ce882..3c07c369c9 100644 --- a/roles/pool/src/lib/mining_pool/message_handler.rs +++ b/roles/pool/src/lib/mining_pool/message_handler.rs @@ -144,7 +144,10 @@ impl ParseDownstreamMiningMessages<(), NullDownstreamMiningSelector, NoRouting> Ok(SendTo::Respond(Mining::SubmitSharesSuccess(success))) }, }, - Err(_) => todo!(), + Err(e) => { + dbg!(e); + panic!("Internal Error: unexpected message from channel factory"); + } } } diff --git a/roles/test-utils/mining-device/Cargo.toml b/roles/test-utils/mining-device/Cargo.toml index 5fa557db08..b0262677f3 100644 --- a/roles/test-utils/mining-device/Cargo.toml +++ b/roles/test-utils/mining-device/Cargo.toml @@ -22,7 +22,7 @@ path = "src/lib/mod.rs" [dependencies] stratum-common = { version = "1.0.0", path = "../../../common" } codec_sv2 = { version = "^1.0.1", path = "../../../protocols/v2/codec-sv2", features=["noise_sv2"] } -roles_logic_sv2 = { version = "1.0.0", path = "../../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "2.0.0", path = "../../../protocols/v2/roles-logic-sv2" } const_sv2 = { version = "2.0.0", path = "../../../protocols/v2/const-sv2" } async-channel = "1.5.1" binary_sv2 = { version = "1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2" } diff --git a/roles/translator/Cargo.toml b/roles/translator/Cargo.toml index cf58e40930..834b92a429 100644 --- a/roles/translator/Cargo.toml +++ b/roles/translator/Cargo.toml @@ -30,7 +30,7 @@ codec_sv2 = { version = "^1.0.1", path = "../../protocols/v2/codec-sv2", feature framing_sv2 = { version = "^2.0.0", path = "../../protocols/v2/framing-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features=["async_std", "with_buffer_pool"] } once_cell = "1.12.0" -roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "^2.0.0", path = "../../protocols/v2/roles-logic-sv2" } serde = { version = "1.0.89", default-features = false, features = ["derive", "alloc"] } serde_json = { version = "1.0.64", default-features = false, features = ["alloc"] } futures = "0.3.25" diff --git a/test/message-generator/test/pool-sri-test-1-standard/pool-sri-test-1-standard.json b/test/message-generator/test/pool-sri-test-1-standard/pool-sri-test-1-standard.json index 24ae109c50..85cfd0b429 100644 --- a/test/message-generator/test/pool-sri-test-1-standard/pool-sri-test-1-standard.json +++ b/test/message-generator/test/pool-sri-test-1-standard/pool-sri-test-1-standard.json @@ -48,7 +48,7 @@ "type": "SubmitSharesStandard", "channel_id": 1, "sequence_number": 0, - "job_id": 0, + "job_id": 1, "nonce": 4035255480, "ntime": 1698941362, "version": 536870912 diff --git a/utils/message-generator/Cargo.toml b/utils/message-generator/Cargo.toml index 55f17d365e..120d03e99f 100644 --- a/utils/message-generator/Cargo.toml +++ b/utils/message-generator/Cargo.toml @@ -20,7 +20,7 @@ codec_sv2 = { version = "1.0.0", path = "../../protocols/v2/codec-sv2", features const_sv2 = { version = "2.0.0", path = "../../protocols/v2/const-sv2" } load_file = "1.0.1" network_helpers_sv2 = { version = "2.0.0", path = "../../roles/roles-utils/network-helpers", features = ["with_tokio","with_serde"] } -roles_logic_sv2 = { version = "1.0.0", path = "../../protocols/v2/roles-logic-sv2", features = ["with_serde"] } +roles_logic_sv2 = { version = "2.0.0", path = "../../protocols/v2/roles-logic-sv2", features = ["with_serde"] } v1 = { version = "^1.0.0", path = "../../protocols/v1", package="sv1_api" } serde = { version = "*", features = ["derive", "alloc"], default-features = false } serde_json = { version = "1.0", default-features = false, features = ["alloc"] } From 14cb9331a3d91c13ac928afc587f90dd3e9123ef Mon Sep 17 00:00:00 2001 From: fi3 Date: Mon, 18 Nov 2024 11:52:26 +0100 Subject: [PATCH 10/27] Fix JDC do not use pool signature in config but the implicit one in the extranonce --- .../roles-logic-sv2/src/channel_logic/channel_factory.rs | 1 - roles/jd-client/src/lib/mod.rs | 1 - roles/jd-client/src/lib/upstream_sv2/upstream.rs | 7 +------ .../config-examples/jds-config-local-example.toml | 2 +- 4 files changed, 2 insertions(+), 9 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 7a627cb0cc..064b2f64c9 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -363,7 +363,6 @@ impl ChannelFactory { additional_coinbase_script_data.unwrap_or(&[]), ) .unwrap(); - dbg!(&extranonce_prefix); let success = OpenExtendedMiningChannelSuccess { request_id, channel_id, diff --git a/roles/jd-client/src/lib/mod.rs b/roles/jd-client/src/lib/mod.rs index 467ac52b54..b5b1fc0fa8 100644 --- a/roles/jd-client/src/lib/mod.rs +++ b/roles/jd-client/src/lib/mod.rs @@ -247,7 +247,6 @@ impl JobDeclaratorClient { upstream_addr, upstream_config.authority_pubkey, 0, // TODO - upstream_config.pool_signature.clone(), status::Sender::Upstream(tx_status.clone()), task_collector.clone(), Arc::new(Mutex::new(PoolChangerTrigger::new(timeout))), diff --git a/roles/jd-client/src/lib/upstream_sv2/upstream.rs b/roles/jd-client/src/lib/upstream_sv2/upstream.rs index 9e30f98c1c..03238940c5 100644 --- a/roles/jd-client/src/lib/upstream_sv2/upstream.rs +++ b/roles/jd-client/src/lib/upstream_sv2/upstream.rs @@ -114,8 +114,6 @@ pub struct Upstream { pub min_extranonce_size: u16, #[allow(dead_code)] pub upstream_extranonce1_size: usize, - /// String be included in coinbase tx input scriptsig - pub pool_signature: String, /// Receives messages from the SV2 Upstream role pub receiver: Receiver, /// Sends messages to the SV2 Upstream role @@ -151,7 +149,6 @@ impl Upstream { address: SocketAddr, authority_public_key: Secp256k1PublicKey, min_extranonce_size: u16, - pool_signature: String, tx_status: status::Sender, task_collector: Arc>>, pool_chaneger_trigger: Arc>, @@ -189,7 +186,6 @@ impl Upstream { min_extranonce_size, upstream_extranonce1_size: 16, /* 16 is the default since that is the only value the * pool supports currently */ - pool_signature, tx_status, receiver, sender, @@ -564,7 +560,6 @@ impl ParseUpstreamMiningMessages Result, RolesLogicError> { info!("Receive open extended mining channel success"); let ids = Arc::new(Mutex::new(roles_logic_sv2::utils::GroupId::new())); - let pool_signature = self.pool_signature.clone().into(); let prefix_len = m.extranonce_prefix.to_vec().len(); let self_len = 0; let total_len = prefix_len + m.extranonce_size as usize; @@ -586,7 +581,7 @@ impl ParseUpstreamMiningMessages Date: Mon, 18 Nov 2024 12:36:19 +0100 Subject: [PATCH 11/27] Fix translator config file to use shorter extranonce --- .../config-examples/tproxy-config-local-jdc-example.toml | 4 ++-- roles/translator/src/lib/mod.rs | 1 + roles/translator/src/lib/upstream_sv2/upstream.rs | 6 +++--- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/roles/translator/config-examples/tproxy-config-local-jdc-example.toml b/roles/translator/config-examples/tproxy-config-local-jdc-example.toml index 5fe4a8eebd..1e18457ac2 100644 --- a/roles/translator/config-examples/tproxy-config-local-jdc-example.toml +++ b/roles/translator/config-examples/tproxy-config-local-jdc-example.toml @@ -19,8 +19,8 @@ min_supported_version = 2 # Minimum extranonce2 size for downstream # Max value: 16 (leaves 0 bytes for search space splitting of downstreams) # Max value for CGminer: 8 -# Min value: 2 -min_extranonce2_size = 8 +# Min value: 5 +min_extranonce2_size = 5 # Difficulty params [downstream_difficulty_config] diff --git a/roles/translator/src/lib/mod.rs b/roles/translator/src/lib/mod.rs index 7b47a40e6b..e225a133af 100644 --- a/roles/translator/src/lib/mod.rs +++ b/roles/translator/src/lib/mod.rs @@ -206,6 +206,7 @@ impl TranslatorSv2 { upstream.clone(), proxy_config.min_supported_version, proxy_config.max_supported_version, + proxy_config.min_extranonce2_size, ) .await { diff --git a/roles/translator/src/lib/upstream_sv2/upstream.rs b/roles/translator/src/lib/upstream_sv2/upstream.rs index 9b4a2a20c1..bdefe6373d 100644 --- a/roles/translator/src/lib/upstream_sv2/upstream.rs +++ b/roles/translator/src/lib/upstream_sv2/upstream.rs @@ -185,6 +185,7 @@ impl Upstream { self_: Arc>, min_version: u16, max_version: u16, + min_extranonce_size: u16, ) -> ProxyResult<'static, ()> { // Get the `SetupConnection` message with Mining Device information (currently hard coded) let setup_connection = Self::get_setup_connection_message(min_version, max_version, false)?; @@ -241,9 +242,8 @@ impl Upstream { request_id: 0, // TODO user_identity, // TODO nominal_hash_rate, - max_target: u256_from_int(u64::MAX), // TODO - min_extranonce_size: 8, /* 8 is the max extranonce2 size the braiins - * pool supports */ + max_target: u256_from_int(u64::MAX), + min_extranonce_size, }); // reset channel hashrate so downstreams can manage from now on out From 0a6f3d72864d7cec6ce8b9388899877eb85778ad Mon Sep 17 00:00:00 2001 From: fi3 Date: Tue, 19 Nov 2024 10:30:37 +0100 Subject: [PATCH 12/27] Fix MG tests --- .../src/channel_logic/channel_factory.rs | 60 +++++++++++++++---- test/config/pool-config-sri-tp.toml | 2 +- .../pool-mock-tp-standard-coverage.toml | 2 +- test/config/pool-mock-tp.toml | 2 +- .../pool-sri-test-extended_0.json | 4 +- .../pool-sri-test-extended_1.json | 2 +- .../standard-coverage-test.json | 6 +- .../translation-proxy-old-share.json | 2 +- 8 files changed, 57 insertions(+), 23 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 064b2f64c9..6bfa5e4c85 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -357,6 +357,18 @@ impl ChannelFactory { .extranonces .next_extended(max_extranonce_size as usize) .unwrap(); + let extranonce_with_stripped_data = extranonce + .into_prefix(self.extranonces.get_prefix_len(), &[]) + .unwrap(); + let success_with_stirpped_extranonce_add_data = OpenExtendedMiningChannelSuccess { + request_id, + channel_id, + target: target.clone(), + extranonce_size: max_extranonce_size, + extranonce_prefix: extranonce_with_stripped_data, + }; + self.extended_channels + .insert(channel_id, success_with_stirpped_extranonce_add_data); let extranonce_prefix = extranonce .into_prefix( self.extranonces.get_prefix_len(), @@ -370,7 +382,6 @@ impl ChannelFactory { extranonce_size: max_extranonce_size, extranonce_prefix, }; - self.extended_channels.insert(channel_id, success.clone()); let mut result = vec![Mining::OpenExtendedMiningChannelSuccess(success)]; if let Some((job, _)) = &self.get_last_valid_job() { let mut job = job.clone(); @@ -909,9 +920,7 @@ impl ChannelFactory { } => upstream_target.clone(), }; - let (downstream_target, extranonce) = self - .get_channel_specific_mining_info(&m) - .ok_or(Error::ShareDoNotMatchAnyChannel)?; + let (downstream_target, extranonce) = self.get_channel_specific_mining_info(&m)?; let extranonce_1_len = self.extranonces.get_range0_len(); let extranonce_2 = extranonce[extranonce_1_len..].to_vec(); match &mut m { @@ -1037,10 +1046,16 @@ impl ChannelFactory { } } /// Returns the downstream target and extranonce for the channel - fn get_channel_specific_mining_info(&self, m: &Share) -> Option<(mining_sv2::Target, Vec)> { + fn get_channel_specific_mining_info( + &self, + m: &Share, + ) -> Result<(mining_sv2::Target, Vec), Error> { match m { Share::Extended(share) => { - let channel = self.extended_channels.get(&m.get_channel_id())?; + let channel = self + .extended_channels + .get(&m.get_channel_id()) + .ok_or(Error::ShareDoNotMatchAnyChannel)?; let extranonce_prefix = channel.extranonce_prefix.to_vec(); let dowstream_target = channel.target.clone().into(); let extranonce = [&extranonce_prefix[..], &share.extranonce.to_vec()[..]] @@ -1052,8 +1067,9 @@ impl ChannelFactory { self.extranonces.get_len(), extranonce.len() ); + return Err(Error::InvalidCoinbase); } - Some((dowstream_target, extranonce)) + Ok((dowstream_target, extranonce)) } Share::Standard((share, group_id)) => match &self.kind { ExtendedChannelKind::Pool => { @@ -1066,9 +1082,16 @@ impl ChannelFactory { .standard_channels_for_hom_downstreams .get(&share.channel_id); }; - Some(( - channel?.target.clone(), - channel?.extranonce.clone().to_vec(), + Ok(( + channel + .ok_or(Error::ShareDoNotMatchAnyChannel)? + .target + .clone(), + channel + .ok_or(Error::ShareDoNotMatchAnyChannel)? + .extranonce + .clone() + .to_vec(), )) } ExtendedChannelKind::Proxy { .. } | ExtendedChannelKind::ProxyJd { .. } => { @@ -1081,9 +1104,16 @@ impl ChannelFactory { .standard_channels_for_hom_downstreams .get(&share.channel_id); }; - Some(( - channel?.target.clone(), - channel?.extranonce.clone().to_vec(), + Ok(( + channel + .ok_or(Error::ShareDoNotMatchAnyChannel)? + .target + .clone(), + channel + .ok_or(Error::ShareDoNotMatchAnyChannel)? + .extranonce + .clone() + .to_vec(), )) } }, @@ -1228,6 +1258,10 @@ impl PoolChannelFactory { // This initialise a PoolChannelFactory for a JDC that can not have // additional_coinbase_script_data as it is set only by the pool. assert!(self.additional_coinbase_script_data.is_empty()); + self.channel_to_additional_coinbase_script_data.insert( + channel_id, + (self.additional_coinbase_script_data.clone(), None), + ); self.inner.replicate_upstream_extended_channel_only_jd( target, extranonce, diff --git a/test/config/pool-config-sri-tp.toml b/test/config/pool-config-sri-tp.toml index 3d7c18baf7..be433b26aa 100644 --- a/test/config/pool-config-sri-tp.toml +++ b/test/config/pool-config-sri-tp.toml @@ -11,4 +11,4 @@ coinbase_outputs = [ { output_script_type = "P2WPKH", output_script_value = "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075" }, ] # Pool signature (string to be included in coinbase tx) -pool_signature = "Stratum v2 SRI Pool" \ No newline at end of file +pool_signature = "Stratum v2 SRI" diff --git a/test/config/pool-mock-tp-standard-coverage.toml b/test/config/pool-mock-tp-standard-coverage.toml index 181981c3ff..98e4a5cc6b 100644 --- a/test/config/pool-mock-tp-standard-coverage.toml +++ b/test/config/pool-mock-tp-standard-coverage.toml @@ -12,4 +12,4 @@ coinbase_outputs = [ { output_script_type = "P2WPKH", output_script_value = "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075" }, ] # Pool signature (string to be included in coinbase tx) -pool_signature = "Stratum v2 SRI Pool" \ No newline at end of file +pool_signature = "Stratum v2 SRI" diff --git a/test/config/pool-mock-tp.toml b/test/config/pool-mock-tp.toml index 252d0637a6..98e4a5cc6b 100644 --- a/test/config/pool-mock-tp.toml +++ b/test/config/pool-mock-tp.toml @@ -12,4 +12,4 @@ coinbase_outputs = [ { output_script_type = "P2WPKH", output_script_value = "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075" }, ] # Pool signature (string to be included in coinbase tx) -pool_signature = "Stratum v2 SRI Pool" +pool_signature = "Stratum v2 SRI" diff --git a/test/message-generator/test/pool-sri-test-extended_0/pool-sri-test-extended_0.json b/test/message-generator/test/pool-sri-test-extended_0/pool-sri-test-extended_0.json index b8a5290174..0cbe477ba4 100644 --- a/test/message-generator/test/pool-sri-test-extended_0/pool-sri-test-extended_0.json +++ b/test/message-generator/test/pool-sri-test-extended_0/pool-sri-test-extended_0.json @@ -14,7 +14,7 @@ "user_identity": "", "nominal_hash_rate": 10, "max_target": [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], - "min_extranonce_size": 16 + "min_extranonce_size": 8 }, "replace_fields": [["request_id", "ARBITRARY"]], "id": "open_extended_mining_channel" @@ -54,7 +54,7 @@ [ [ "extranonce_size", - {"U16": 16} + {"U16": 8} ] ] ] diff --git a/test/message-generator/test/pool-sri-test-extended_1/pool-sri-test-extended_1.json b/test/message-generator/test/pool-sri-test-extended_1/pool-sri-test-extended_1.json index 0567ae1e3f..ecd4fa743b 100644 --- a/test/message-generator/test/pool-sri-test-extended_1/pool-sri-test-extended_1.json +++ b/test/message-generator/test/pool-sri-test-extended_1/pool-sri-test-extended_1.json @@ -66,7 +66,7 @@ [ [ "coinbase_tx_prefix", - {"B064K": [2, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 56, 3, 76, 163, 38, 0, 83, 116, 114, 97, 116, 117, 109, 32, 118, 50, 32, 83, 82, 73, 32, 80, 111, 111, 108]} + {"B064K": [2, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 32, 3, 76, 163, 38, 0]} ], [ diff --git a/test/message-generator/test/standard-coverage-test/standard-coverage-test.json b/test/message-generator/test/standard-coverage-test/standard-coverage-test.json index a9a8daa93b..e11d2d7263 100644 --- a/test/message-generator/test/standard-coverage-test/standard-coverage-test.json +++ b/test/message-generator/test/standard-coverage-test/standard-coverage-test.json @@ -54,7 +54,7 @@ "type": "SubmitSharesStandard", "channel_id": 1, "sequence_number": 0, - "job_id": 0, + "job_id": 1, "nonce": 927894720, "ntime": 1671039088, "version": 536870912 @@ -66,8 +66,8 @@ "type": "SubmitSharesStandard", "channel_id": 1, "sequence_number": 0, - "job_id": 0, - "nonce": 1751, + "job_id": 1, + "nonce": 1752, "ntime": 1671116742, "version": 536870912 }, diff --git a/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json b/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json index 79833f2c04..52ada3a763 100644 --- a/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json +++ b/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json @@ -22,7 +22,7 @@ "message": { "id": 0, "method": "mining.submit", - "params": ["username", "0", "0000000000000000", "641577b0", "7a600640"] + "params": ["username", "0", "0000000000", "641577b0", "7a600640"] }, "id": "mining.submit" } From de3c0422d28d65f0520a6e78a10844f3cbab3f15 Mon Sep 17 00:00:00 2001 From: Priceless-P Date: Tue, 3 Dec 2024 13:10:19 +0100 Subject: [PATCH 13/27] add time buffer --- protocols/v2/noise-sv2/src/signature_message.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/protocols/v2/noise-sv2/src/signature_message.rs b/protocols/v2/noise-sv2/src/signature_message.rs index c82e4bd087..954e22b514 100644 --- a/protocols/v2/noise-sv2/src/signature_message.rs +++ b/protocols/v2/noise-sv2/src/signature_message.rs @@ -76,7 +76,7 @@ impl SignatureNoiseMessage { .duration_since(SystemTime::UNIX_EPOCH) .unwrap() .as_secs() as u32; - if self.valid_from <= now && self.not_valid_after >= now { + if (self.valid_from - 10) <= now && (self.not_valid_after + 10) >= now { let secp = Secp256k1::verification_only(); let (m, s) = self.split(); // m = SHA-256(version || valid_from || not_valid_after || server_static_key) From 0388ad4d242caa589bf874c7e9b0c78405d6f5f3 Mon Sep 17 00:00:00 2001 From: Priceless-P Date: Tue, 3 Dec 2024 14:48:34 +0100 Subject: [PATCH 14/27] bump patch version number --- protocols/v2/noise-sv2/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/protocols/v2/noise-sv2/Cargo.toml b/protocols/v2/noise-sv2/Cargo.toml index 2bcb8ba6d0..05ca9b3067 100644 --- a/protocols/v2/noise-sv2/Cargo.toml +++ b/protocols/v2/noise-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "noise_sv2" -version = "1.2.0" +version = "1.2.1" authors = ["The Stratum V2 Developers"] edition = "2018" readme = "README.md" From cdcc9c8a86475ec1c9fda9d4102884f90cf09323 Mon Sep 17 00:00:00 2001 From: fi3 Date: Wed, 13 Nov 2024 12:48:52 +0100 Subject: [PATCH 15/27] Update channel factory, coinbase input script handling. Coinbase signature is not part of the Sv2 protocol, some pool maybe want to use it other not. The first part of the extranonce could also be reserved for things that are not a pool signature. This pr rename the pool_signature field of the channel factory into additional_coinbase_script_data and change the type from Strgin to Vec, since can be anything. --- .../src/channel_logic/channel_factory.rs | 33 +++++++++++-------- .../v2/roles-logic-sv2/src/job_creator.rs | 18 +++++----- roles/jd-client/src/lib/downstream.rs | 2 +- .../src/lib/upstream_sv2/upstream.rs | 2 +- roles/pool/src/lib/mining_pool/mod.rs | 2 +- 5 files changed, 31 insertions(+), 26 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 943349ebd2..49161f5617 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -975,7 +975,9 @@ pub struct PoolChannelFactory { inner: ChannelFactory, job_creator: JobsCreators, pool_coinbase_outputs: Vec, - pool_signature: String, + // Additional data that the pool may want to include in the coinbase input script as first part + // of the extranonce. This can be used to put things like the pool signature. + additional_coinbase_script_data: Vec, // extedned_channel_id -> SetCustomMiningJob negotiated_jobs: HashMap, BuildNoHashHasher>, } @@ -988,7 +990,7 @@ impl PoolChannelFactory { share_per_min: f32, kind: ExtendedChannelKind, pool_coinbase_outputs: Vec, - pool_signature: String, + additional_coinbase_script_data: Vec, ) -> Self { let inner = ChannelFactory { ids, @@ -1015,7 +1017,7 @@ impl PoolChannelFactory { inner, job_creator, pool_coinbase_outputs, - pool_signature, + additional_coinbase_script_data, negotiated_jobs: HashMap::with_hasher(BuildNoHashHasher::default()), } } @@ -1083,7 +1085,7 @@ impl PoolChannelFactory { m, true, self.pool_coinbase_outputs.clone(), - self.pool_signature.clone(), + &self.additional_coinbase_script_data, )?; self.inner.on_new_extended_mining_job(new_job) } @@ -1160,10 +1162,13 @@ impl PoolChannelFactory { if self.negotiated_jobs.contains_key(&m.channel_id) { let referenced_job = self.negotiated_jobs.get(&m.channel_id).unwrap(); let merkle_path = referenced_job.merkle_path.to_vec(); - let pool_signature = self.pool_signature.clone(); - let extended_job = - job_creator::extended_job_from_custom_job(referenced_job, pool_signature, 32) - .unwrap(); + let additional_coinbase_script_data = self.additional_coinbase_script_data.clone(); + let extended_job = job_creator::extended_job_from_custom_job( + referenced_job, + additional_coinbase_script_data.as_ref(), + 32, + ) + .unwrap(); let prev_blockhash = crate::utils::u256_to_block_hash(referenced_job.prev_hash.clone()); let bits = referenced_job.nbits; self.inner.check_target( @@ -1295,7 +1300,7 @@ pub struct ProxyExtendedChannelFactory { inner: ChannelFactory, job_creator: Option, pool_coinbase_outputs: Option>, - pool_signature: String, + additional_coinbase_script_data: String, // Id assigned to the extended channel by upstream extended_channel_id: u32, } @@ -1309,7 +1314,7 @@ impl ProxyExtendedChannelFactory { share_per_min: f32, kind: ExtendedChannelKind, pool_coinbase_outputs: Option>, - pool_signature: String, + additional_coinbase_script_data: String, extended_channel_id: u32, ) -> Self { match &kind { @@ -1349,7 +1354,7 @@ impl ProxyExtendedChannelFactory { inner, job_creator, pool_coinbase_outputs, - pool_signature, + additional_coinbase_script_data, extended_channel_id, } } @@ -1444,7 +1449,7 @@ impl ProxyExtendedChannelFactory { m, true, pool_coinbase_outputs.clone(), - self.pool_signature.clone(), + self.additional_coinbase_script_data.as_ref(), )?; let id = new_job.job_id; if !new_job.is_future() && self.inner.last_prev_hash.is_some() { @@ -1862,7 +1867,7 @@ mod test { // Initialize a Channel of type Pool let out = TxOut {value: BLOCK_REWARD, script_pubkey: decode_hex("4104c6d0969c2d98a5c19ba7c36c7937c5edbd60ff2a01397c4afe54f16cd641667ea0049ba6f9e1796ba3c8e49e1b504c532ebbaaa1010c3f7d9b83a8ea7fd800e2ac").unwrap().into()}; - let pool_signature = "".to_string(); + let additional_coinbase_script_data = "".to_string(); let creator = JobsCreators::new(7); let share_per_min = 1.0; // Create an ExtendedExtranonce of len 7: @@ -1883,7 +1888,7 @@ mod test { share_per_min, channel_kind, vec![out], - pool_signature, + additional_coinbase_script_data, ); // Build a NewTemplate diff --git a/protocols/v2/roles-logic-sv2/src/job_creator.rs b/protocols/v2/roles-logic-sv2/src/job_creator.rs index 1ed6537629..6f1ce1b67a 100644 --- a/protocols/v2/roles-logic-sv2/src/job_creator.rs +++ b/protocols/v2/roles-logic-sv2/src/job_creator.rs @@ -70,7 +70,7 @@ impl JobsCreators { template: &mut NewTemplate, version_rolling_allowed: bool, mut pool_coinbase_outputs: Vec, - pool_signature: String, + additional_coinbase_script_data: &[u8], ) -> Result, Error> { let server_tx_outputs = template.coinbase_tx_outputs.to_vec(); let mut outputs = tx_outputs_to_costum_scripts(&server_tx_outputs); @@ -87,7 +87,7 @@ impl JobsCreators { new_extended_job( template, &mut pool_coinbase_outputs, - pool_signature, + additional_coinbase_script_data, next_job_id, version_rolling_allowed, self.extranonce_len, @@ -137,7 +137,7 @@ impl JobsCreators { pub fn extended_job_from_custom_job( referenced_job: &mining_sv2::SetCustomMiningJob, - pool_signature: String, + additional_coinbase_script_data: &[u8], extranonce_len: u8, ) -> Result, Error> { let mut outputs = @@ -158,7 +158,7 @@ pub fn extended_job_from_custom_job( new_extended_job( &mut template, &mut outputs, - pool_signature, + additional_coinbase_script_data, 0, true, extranonce_len, @@ -177,7 +177,7 @@ pub fn extended_job_from_custom_job( fn new_extended_job( new_template: &mut NewTemplate, coinbase_outputs: &mut [TxOut], - pool_signature: String, + additional_coinbase_script_data: &[u8], job_id: u32, version_rolling_allowed: bool, extranonce_len: u8, @@ -193,7 +193,7 @@ fn new_extended_job( .map_err(|_| Error::TxVersionTooBig)?; let bip34_bytes = get_bip_34_bytes(new_template, tx_version)?; - let script_prefix_len = bip34_bytes.len() + pool_signature.as_bytes().len(); + let script_prefix_len = bip34_bytes.len() + additional_coinbase_script_data.len(); let coinbase = coinbase( bip34_bytes, @@ -201,7 +201,7 @@ fn new_extended_job( new_template.coinbase_tx_locktime, new_template.coinbase_tx_input_sequence, coinbase_outputs, - pool_signature, + additional_coinbase_script_data, extranonce_len, ); @@ -327,7 +327,7 @@ fn coinbase( lock_time: u32, sequence: u32, coinbase_outputs: &[TxOut], - pool_signature: String, + additional_coinbase_script_data: &[u8], extranonce_len: u8, ) -> Transaction { // If script_prefix_len is not 0 we are not in a test enviornment and the coinbase have the 0 @@ -336,7 +336,7 @@ fn coinbase( 0 => Witness::from_vec(vec![]), _ => Witness::from_vec(vec![vec![0; 32]]), }; - bip34_bytes.extend_from_slice(pool_signature.as_bytes()); + bip34_bytes.extend_from_slice(additional_coinbase_script_data); bip34_bytes.extend_from_slice(&vec![0; extranonce_len as usize]); let tx_in = TxIn { previous_output: OutPoint::null(), diff --git a/roles/jd-client/src/lib/downstream.rs b/roles/jd-client/src/lib/downstream.rs index c5d49d304f..3823b400a3 100644 --- a/roles/jd-client/src/lib/downstream.rs +++ b/roles/jd-client/src/lib/downstream.rs @@ -492,7 +492,7 @@ impl share_per_min, kind, coinbase_outputs, - "SOLO".to_string(), + "SOLO".as_bytes().to_vec(), ); self.status.set_channel(channel_factory); diff --git a/roles/jd-client/src/lib/upstream_sv2/upstream.rs b/roles/jd-client/src/lib/upstream_sv2/upstream.rs index 4877f44e77..25dfb5bc00 100644 --- a/roles/jd-client/src/lib/upstream_sv2/upstream.rs +++ b/roles/jd-client/src/lib/upstream_sv2/upstream.rs @@ -564,7 +564,7 @@ impl ParseUpstreamMiningMessages Result, RolesLogicError> { info!("Receive open extended mining channel success"); let ids = Arc::new(Mutex::new(roles_logic_sv2::utils::GroupId::new())); - let pool_signature = self.pool_signature.clone(); + let pool_signature = self.pool_signature.clone().into(); let prefix_len = m.extranonce_prefix.to_vec().len(); let self_len = 0; let total_len = prefix_len + m.extranonce_size as usize; diff --git a/roles/pool/src/lib/mining_pool/mod.rs b/roles/pool/src/lib/mining_pool/mod.rs index 2b179c7885..d9e426e5db 100644 --- a/roles/pool/src/lib/mining_pool/mod.rs +++ b/roles/pool/src/lib/mining_pool/mod.rs @@ -618,7 +618,7 @@ impl Pool { share_per_min, kind, pool_coinbase_outputs.expect("Invalid coinbase output in config"), - config.pool_signature.clone(), + config.pool_signature.clone().into_bytes(), ))); let pool = Arc::new(Mutex::new(Pool { downstreams: HashMap::with_hasher(BuildNoHashHasher::default()), From e1368565038f8155831fc8f0f7f540cba9a42fdd Mon Sep 17 00:00:00 2001 From: fi3 Date: Wed, 13 Nov 2024 18:30:23 +0100 Subject: [PATCH 16/27] Update channel factory, coinbase input script handling. The coinbase input script additional data should be sent as part of the extranonce_prefix and not as part of the coinbase_prefix. So that a JDC can see what the pool want as coinbase input script additional data without the need to observ the coinbase prefix in job constructed by the pool. --- .../src/channel_logic/channel_factory.rs | 119 +++++++++++++----- .../roles-logic-sv2/src/channel_logic/mod.rs | 6 +- .../src/channel_logic/proxy_group_channel.rs | 3 + .../v2/roles-logic-sv2/src/job_creator.rs | 32 +++-- .../v2/roles-logic-sv2/src/job_dispatcher.rs | 7 +- protocols/v2/roles-logic-sv2/src/utils.rs | 32 ++++- protocols/v2/subprotocols/mining/src/lib.rs | 26 +++- roles/mining-proxy/src/lib/upstream_mining.rs | 1 - roles/test-utils/mining-device-sv1/src/job.rs | 1 + roles/translator/src/lib/proxy/bridge.rs | 1 - 10 files changed, 159 insertions(+), 69 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 49161f5617..18f289a46a 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -224,15 +224,20 @@ impl ChannelFactory { downstream_hash_rate: f32, is_header_only: bool, id: u32, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result, Error> { match is_header_only { - true => { - self.new_standard_channel_for_hom_downstream(request_id, downstream_hash_rate, id) - } + true => self.new_standard_channel_for_hom_downstream( + request_id, + downstream_hash_rate, + id, + additional_coinbase_script_data, + ), false => self.new_standard_channel_for_non_hom_downstream( request_id, downstream_hash_rate, id, + additional_coinbase_script_data, ), } } @@ -248,6 +253,7 @@ impl ChannelFactory { request_id: u32, hash_rate: f32, min_extranonce_size: u16, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result>, Error> { let extended_channels_group = 0; let max_extranonce_size = self.extranonces.get_range2_len() as u16; @@ -278,7 +284,10 @@ impl ChannelFactory { .next_extended(max_extranonce_size as usize) .unwrap(); let extranonce_prefix = extranonce - .into_prefix(self.extranonces.get_prefix_len()) + .into_prefix( + self.extranonces.get_prefix_len(), + additional_coinbase_script_data.unwrap_or(&[]), + ) .unwrap(); let success = OpenExtendedMiningChannelSuccess { request_id, @@ -344,6 +353,7 @@ impl ChannelFactory { request_id: u32, downstream_hash_rate: f32, id: u32, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result, Error> { let hom_group_id = 0; let mut result = vec![]; @@ -384,7 +394,11 @@ impl ChannelFactory { group_channel_id: hom_group_id, }, )); - self.prepare_standard_jobs_and_p_hash(&mut result, channel_id)?; + self.prepare_standard_jobs_and_p_hash( + &mut result, + channel_id, + additional_coinbase_script_data, + )?; self.channel_to_group_id.insert(channel_id, hom_group_id); Ok(result) } @@ -396,6 +410,7 @@ impl ChannelFactory { request_id: u32, downstream_hash_rate: f32, group_id: u32, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result, Error> { let mut result = vec![]; let channel_id = self @@ -429,6 +444,14 @@ impl ChannelFactory { self.standard_channels_for_non_hom_downstreams .insert(complete_id, standard_channel); + let extranonce = match additional_coinbase_script_data { + Some(data) => { + let mut data = data.to_vec(); + data.extend_from_slice(extranonce.as_ref()); + extranonce + } + None => extranonce, + }; // First message to be sent is OpenStandardMiningChannelSuccess result.push(Mining::OpenStandardMiningChannelSuccess( OpenStandardMiningChannelSuccess { @@ -450,6 +473,7 @@ impl ChannelFactory { &mut self, result: &mut Vec, channel_id: u32, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result<(), Error> { // Safe cause the function is private and we always add the channel before calling this // funtion @@ -466,9 +490,10 @@ impl ChannelFactory { .map(|j| { extended_to_standard_job( &j.0, - &standard_channel.extranonce.clone().to_vec()[..], + standard_channel.extranonce.as_ref(), standard_channel.channel_id, Some(job_id), + additional_coinbase_script_data, ) }) .collect(); @@ -478,9 +503,10 @@ impl ChannelFactory { Some((j, _)) => Some( extended_to_standard_job( j, - &standard_channel.extranonce.clone().to_vec(), + standard_channel.extranonce.as_ref(), standard_channel.channel_id, Some(self.job_ids.next()), + additional_coinbase_script_data, ) .ok_or(Error::ImpossibleToCalculateMerkleRoot)?, ), @@ -677,11 +703,16 @@ impl ChannelFactory { fn on_new_extended_mining_job( &mut self, m: NewExtendedMiningJob<'static>, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result, BuildNoHashHasher>, Error> { match (m.is_future(), &self.last_prev_hash) { (true, _) => { let mut result = HashMap::with_hasher(BuildNoHashHasher::default()); - self.prepare_jobs_for_downstream_on_new_extended(&mut result, &m)?; + self.prepare_jobs_for_downstream_on_new_extended( + &mut result, + &m, + additional_coinbase_script_data, + )?; let mut ids = vec![]; for complete_id in self.standard_channels_for_non_hom_downstreams.keys() { let group_id = GroupId::into_group_id(*complete_id); @@ -694,7 +725,11 @@ impl ChannelFactory { } (false, Some(_)) => { let mut result = HashMap::with_hasher(BuildNoHashHasher::default()); - self.prepare_jobs_for_downstream_on_new_extended(&mut result, &m)?; + self.prepare_jobs_for_downstream_on_new_extended( + &mut result, + &m, + additional_coinbase_script_data, + )?; // If job is not future it must always be paired with the last received prev hash let mut ids = vec![]; for complete_id in self.standard_channels_for_non_hom_downstreams.keys() { @@ -722,14 +757,16 @@ impl ChannelFactory { &mut self, result: &mut HashMap>, m: &NewExtendedMiningJob<'static>, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result<(), Error> { for (id, channel) in &self.standard_channels_for_hom_downstreams { let job_id = self.job_ids.next(); let mut standard_job = extended_to_standard_job( m, - &channel.extranonce.clone().to_vec()[..], + channel.extranonce.as_ref(), *id, Some(job_id), + additional_coinbase_script_data, ) .unwrap(); standard_job.channel_id = *id; @@ -769,6 +806,7 @@ impl ChannelFactory { coinbase_tx_suffix: &[u8], prev_blockhash: hash_types::BlockHash, bits: u32, + additional_coinbase_script_data: Option<&[u8]>, ) -> Result { debug!("Checking target for share {:?}", m); let upstream_target = match &self.kind { @@ -806,6 +844,7 @@ impl ChannelFactory { coinbase_tx_suffix, &extranonce[..], &merkle_path[..], + additional_coinbase_script_data.unwrap_or(&[]), ) .ok_or(Error::InvalidCoinbase)? .try_into() @@ -1029,8 +1068,13 @@ impl PoolChannelFactory { is_header_only: bool, id: u32, ) -> Result, Error> { - self.inner - .add_standard_channel(request_id, downstream_hash_rate, is_header_only, id) + self.inner.add_standard_channel( + request_id, + downstream_hash_rate, + is_header_only, + id, + Some(&self.additional_coinbase_script_data), + ) } /// Calls [`ChannelFactory::new_extended_channel`] pub fn new_extended_channel( @@ -1039,8 +1083,12 @@ impl PoolChannelFactory { hash_rate: f32, min_extranonce_size: u16, ) -> Result>, Error> { - self.inner - .new_extended_channel(request_id, hash_rate, min_extranonce_size) + self.inner.new_extended_channel( + request_id, + hash_rate, + min_extranonce_size, + Some(&self.additional_coinbase_script_data), + ) } /// Called when we want to replicate a channel already opened by another actor. /// is used only in the jd client from the template provider module to mock a pool. @@ -1085,9 +1133,10 @@ impl PoolChannelFactory { m, true, self.pool_coinbase_outputs.clone(), - &self.additional_coinbase_script_data, + self.additional_coinbase_script_data.len() as u8, )?; - self.inner.on_new_extended_mining_job(new_job) + self.inner + .on_new_extended_mining_job(new_job, Some(&self.additional_coinbase_script_data)) } /// Called when a `SubmitSharesStandard` message is received from the downstream. We check the /// shares against the channel's respective target and return `OnNewShare` to let us know if @@ -1131,6 +1180,7 @@ impl PoolChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + Some(&self.additional_coinbase_script_data), ) } None => { @@ -1162,11 +1212,10 @@ impl PoolChannelFactory { if self.negotiated_jobs.contains_key(&m.channel_id) { let referenced_job = self.negotiated_jobs.get(&m.channel_id).unwrap(); let merkle_path = referenced_job.merkle_path.to_vec(); - let additional_coinbase_script_data = self.additional_coinbase_script_data.clone(); let extended_job = job_creator::extended_job_from_custom_job( referenced_job, - additional_coinbase_script_data.as_ref(), - 32, + self.additional_coinbase_script_data.len() as u8, + self.inner.extranonces.get_len() as u8, ) .unwrap(); let prev_blockhash = crate::utils::u256_to_block_hash(referenced_job.prev_hash.clone()); @@ -1181,6 +1230,7 @@ impl PoolChannelFactory { extended_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + Some(&self.additional_coinbase_script_data), ) } else { let referenced_job = self @@ -1215,6 +1265,7 @@ impl PoolChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + Some(&self.additional_coinbase_script_data), ) } } @@ -1300,7 +1351,6 @@ pub struct ProxyExtendedChannelFactory { inner: ChannelFactory, job_creator: Option, pool_coinbase_outputs: Option>, - additional_coinbase_script_data: String, // Id assigned to the extended channel by upstream extended_channel_id: u32, } @@ -1314,7 +1364,6 @@ impl ProxyExtendedChannelFactory { share_per_min: f32, kind: ExtendedChannelKind, pool_coinbase_outputs: Option>, - additional_coinbase_script_data: String, extended_channel_id: u32, ) -> Self { match &kind { @@ -1354,7 +1403,6 @@ impl ProxyExtendedChannelFactory { inner, job_creator, pool_coinbase_outputs, - additional_coinbase_script_data, extended_channel_id, } } @@ -1367,7 +1415,7 @@ impl ProxyExtendedChannelFactory { id: u32, ) -> Result, Error> { self.inner - .add_standard_channel(request_id, downstream_hash_rate, id_header_only, id) + .add_standard_channel(request_id, downstream_hash_rate, id_header_only, id, None) } /// Calls [`ChannelFactory::new_extended_channel`] pub fn new_extended_channel( @@ -1377,7 +1425,7 @@ impl ProxyExtendedChannelFactory { min_extranonce_size: u16, ) -> Result, Error> { self.inner - .new_extended_channel(request_id, hash_rate, min_extranonce_size) + .new_extended_channel(request_id, hash_rate, min_extranonce_size, None) } /// Called only when a new prev hash is received by a Template Provider when job declaration is /// used. It matches the message with a `job_id`, creates a new custom job, and calls @@ -1445,12 +1493,7 @@ impl ProxyExtendedChannelFactory { self.job_creator.as_mut(), self.pool_coinbase_outputs.as_mut(), ) { - let new_job = job_creator.on_new_template( - m, - true, - pool_coinbase_outputs.clone(), - self.additional_coinbase_script_data.as_ref(), - )?; + let new_job = job_creator.on_new_template(m, true, pool_coinbase_outputs.clone(), 0)?; let id = new_job.job_id; if !new_job.is_future() && self.inner.last_prev_hash.is_some() { let prev_hash = self.last_prev_hash().unwrap(); @@ -1473,7 +1516,7 @@ impl ProxyExtendedChannelFactory { future_job: m.future_template, }; return Ok(( - self.inner.on_new_extended_mining_job(new_job)?, + self.inner.on_new_extended_mining_job(new_job, None)?, Some(custom_mining_job), id, )); @@ -1482,7 +1525,11 @@ impl ProxyExtendedChannelFactory { .future_templates .insert(new_job.job_id, m.clone()); } - Ok((self.inner.on_new_extended_mining_job(new_job)?, None, id)) + Ok(( + self.inner.on_new_extended_mining_job(new_job, None)?, + None, + id, + )) } else { panic!("Either channel factory has no job creator or pool_coinbase_outputs are not yet set") } @@ -1551,6 +1598,7 @@ impl ProxyExtendedChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + None, ) } else { let bitcoin_target = [0; 32]; @@ -1577,6 +1625,7 @@ impl ProxyExtendedChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + None, ) } } @@ -1632,6 +1681,7 @@ impl ProxyExtendedChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + None, ) } else { let bitcoin_target = [0; 32]; @@ -1658,6 +1708,7 @@ impl ProxyExtendedChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, + None, ) } } @@ -1690,7 +1741,7 @@ impl ProxyExtendedChannelFactory { &mut self, m: NewExtendedMiningJob<'static>, ) -> Result, BuildNoHashHasher>, Error> { - self.inner.on_new_extended_mining_job(m) + self.inner.on_new_extended_mining_job(m, None) } pub fn set_target(&mut self, new_target: &mut Target) { self.inner.kind.set_target(new_target); @@ -1888,7 +1939,7 @@ mod test { share_per_min, channel_kind, vec![out], - additional_coinbase_script_data, + additional_coinbase_script_data.into_bytes(), ); // Build a NewTemplate diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/mod.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/mod.rs index 7b5f0feed7..d66a64d69e 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/mod.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/mod.rs @@ -7,15 +7,17 @@ use std::convert::TryInto; /// convert extended to standard job by calculating the merkle root pub fn extended_to_standard_job<'a>( extended: &NewExtendedMiningJob, - coinbase_script: &[u8], + extranonce: &[u8], channel_id: u32, job_id: Option, + additional_coinbase_script_data: Option<&[u8]>, ) -> Option> { let merkle_root = crate::utils::merkle_root_from_path( extended.coinbase_tx_prefix.inner_as_ref(), extended.coinbase_tx_suffix.inner_as_ref(), - coinbase_script, + extranonce, &extended.merkle_path.inner_as_ref(), + additional_coinbase_script_data.unwrap_or(&[]), ); Some(NewMiningJob { diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/proxy_group_channel.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/proxy_group_channel.rs index 65f4cdb3d8..cee3d160e7 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/proxy_group_channel.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/proxy_group_channel.rs @@ -115,6 +115,7 @@ impl GroupChannel { &channel.extranonce.clone().to_vec(), channel.channel_id, None, + None, ) .ok_or(Error::ImpossibleToCalculateMerkleRoot)?; res.push(Mining::NewMiningJob(standard_job)); @@ -126,6 +127,7 @@ impl GroupChannel { &channel.extranonce.clone().to_vec(), channel.channel_id, None, + None, ) .ok_or(Error::ImpossibleToCalculateMerkleRoot)?; @@ -192,6 +194,7 @@ impl GroupChannel { &downstream.extranonce.clone().to_vec(), downstream.channel_id, None, + None, ) .ok_or(Error::ImpossibleToCalculateMerkleRoot) } diff --git a/protocols/v2/roles-logic-sv2/src/job_creator.rs b/protocols/v2/roles-logic-sv2/src/job_creator.rs index 6f1ce1b67a..0e65c35b35 100644 --- a/protocols/v2/roles-logic-sv2/src/job_creator.rs +++ b/protocols/v2/roles-logic-sv2/src/job_creator.rs @@ -70,7 +70,7 @@ impl JobsCreators { template: &mut NewTemplate, version_rolling_allowed: bool, mut pool_coinbase_outputs: Vec, - additional_coinbase_script_data: &[u8], + additional_coinbase_script_data_len: u8, ) -> Result, Error> { let server_tx_outputs = template.coinbase_tx_outputs.to_vec(); let mut outputs = tx_outputs_to_costum_scripts(&server_tx_outputs); @@ -87,7 +87,7 @@ impl JobsCreators { new_extended_job( template, &mut pool_coinbase_outputs, - additional_coinbase_script_data, + additional_coinbase_script_data_len, next_job_id, version_rolling_allowed, self.extranonce_len, @@ -137,7 +137,7 @@ impl JobsCreators { pub fn extended_job_from_custom_job( referenced_job: &mining_sv2::SetCustomMiningJob, - additional_coinbase_script_data: &[u8], + additional_coinbase_script_data_len: u8, extranonce_len: u8, ) -> Result, Error> { let mut outputs = @@ -158,7 +158,7 @@ pub fn extended_job_from_custom_job( new_extended_job( &mut template, &mut outputs, - additional_coinbase_script_data, + additional_coinbase_script_data_len, 0, true, extranonce_len, @@ -177,7 +177,7 @@ pub fn extended_job_from_custom_job( fn new_extended_job( new_template: &mut NewTemplate, coinbase_outputs: &mut [TxOut], - additional_coinbase_script_data: &[u8], + additional_coinbase_script_data_len: u8, job_id: u32, version_rolling_allowed: bool, extranonce_len: u8, @@ -193,7 +193,7 @@ fn new_extended_job( .map_err(|_| Error::TxVersionTooBig)?; let bip34_bytes = get_bip_34_bytes(new_template, tx_version)?; - let script_prefix_len = bip34_bytes.len() + additional_coinbase_script_data.len(); + let script_prefix_len = bip34_bytes.len(); let coinbase = coinbase( bip34_bytes, @@ -201,7 +201,7 @@ fn new_extended_job( new_template.coinbase_tx_locktime, new_template.coinbase_tx_input_sequence, coinbase_outputs, - additional_coinbase_script_data, + additional_coinbase_script_data_len, extranonce_len, ); @@ -327,7 +327,7 @@ fn coinbase( lock_time: u32, sequence: u32, coinbase_outputs: &[TxOut], - additional_coinbase_script_data: &[u8], + additional_coinbase_script_data_len: u8, extranonce_len: u8, ) -> Transaction { // If script_prefix_len is not 0 we are not in a test enviornment and the coinbase have the 0 @@ -336,7 +336,7 @@ fn coinbase( 0 => Witness::from_vec(vec![]), _ => Witness::from_vec(vec![vec![0; 32]]), }; - bip34_bytes.extend_from_slice(additional_coinbase_script_data); + bip34_bytes.extend_from_slice(&vec![0_u8; additional_coinbase_script_data_len as usize]); bip34_bytes.extend_from_slice(&vec![0; extranonce_len as usize]); let tx_in = TxIn { previous_output: OutPoint::null(), @@ -421,9 +421,9 @@ impl StrippedCoinbaseTx { } /// the coinbase tx prefix is the LE bytes concatenation of the tx version and all - /// of the tx inputs minus the 32 bytes after the bip34 bytes in the script + /// of the tx inputs minus the extranonce bytes after the bip34 bytes in the script /// and the last input's sequence (used as the first entry in the coinbase tx suffix). - /// The last 32 bytes after the bip34 bytes in the script will be used to allow extranonce + /// The last bytes after the bip34 bytes in the script will be used to allow extranonce /// space for the miner. We remove the bip141 marker and flag since it is only used for /// computing the `wtxid` and the legacy `txid` is what is used for computing the merkle root // clippy allow because we dont want to consume self @@ -557,7 +557,7 @@ pub mod tests { let mut jobs_creators = JobsCreators::new(32); let job = jobs_creators - .on_new_template(template.borrow_mut(), false, vec![out], "".to_string()) + .on_new_template(template.borrow_mut(), false, vec![out]) .unwrap(); assert_eq!( @@ -581,8 +581,7 @@ pub mod tests { assert_eq!(jobs_creators.lasts_new_template.len(), 0); - let _ = - jobs_creators.on_new_template(template.borrow_mut(), false, vec![out], "".to_string()); + let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out]); assert_eq!(jobs_creators.lasts_new_template.len(), 1); assert_eq!(jobs_creators.lasts_new_template[0], template); @@ -616,8 +615,7 @@ pub mod tests { let mut jobs_creators = JobsCreators::new(32); //Create a template - let _ = - jobs_creators.on_new_template(template.borrow_mut(), false, vec![out], "".to_string()); + let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out]); let test_id = template.template_id; // Create a SetNewPrevHash with matching template_id @@ -705,7 +703,7 @@ pub mod tests { let extranonce = &[0_u8; 32]; let path: &[binary_sv2::U256] = &[]; let stripped_merkle_root = - merkle_root_from_path(&prefix[..], &suffix[..], extranonce, path).unwrap(); + merkle_root_from_path(&prefix[..], &suffix[..], extranonce, path, &[]).unwrap(); let og_merkle_root = coinbase.txid().to_vec(); assert!( stripped_merkle_root == og_merkle_root, diff --git a/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs b/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs index 019f9f2742..a0f4cb6172 100644 --- a/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs +++ b/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs @@ -18,8 +18,7 @@ use std::{collections::HashMap, convert::TryInto, sync::Arc}; use stratum_common::bitcoin::hashes::{sha256d, Hash, HashEngine}; -/// Used to convert an extended mining job to a standard mining job. The `extranonce` field must -/// be exactly 32 bytes. +/// Used to convert an extended mining job to a standard mining job pub fn extended_to_standard_job_for_group_channel<'a>( extended: &NewExtendedMiningJob, extranonce: &[u8], @@ -31,6 +30,7 @@ pub fn extended_to_standard_job_for_group_channel<'a>( extended.coinbase_tx_suffix.inner_as_ref(), extranonce, &extended.merkle_path.inner_as_ref(), + &[], ); Some(NewMiningJob { @@ -322,7 +322,7 @@ mod tests { template.template_id = template.template_id % u64::MAX; template.future_template = true; let extended_mining_job = jobs_creators - .on_new_template(&mut template, false, vec![out], pool_signature) + .on_new_template(&mut template, false, vec![out], pool_signature.len() as u8) .expect("Failed to create new job"); // create GroupChannelJobDispatcher @@ -381,6 +381,7 @@ mod tests { extended_mining_job.coinbase_tx_suffix.inner_as_ref(), extranonce.to_vec().as_slice(), &extended_mining_job.merkle_path.inner_as_ref(), + &[], ) .unwrap(); // Assertions diff --git a/protocols/v2/roles-logic-sv2/src/utils.rs b/protocols/v2/roles-logic-sv2/src/utils.rs index d4b6f89441..d85fef1c87 100644 --- a/protocols/v2/roles-logic-sv2/src/utils.rs +++ b/protocols/v2/roles-logic-sv2/src/utils.rs @@ -142,10 +142,16 @@ pub fn merkle_root_from_path>( coinbase_tx_suffix: &[u8], extranonce: &[u8], path: &[T], + additional_coinbase_script_data: &[u8], ) -> Option> { - let mut coinbase = - Vec::with_capacity(coinbase_tx_prefix.len() + coinbase_tx_suffix.len() + extranonce.len()); + let mut coinbase = Vec::with_capacity( + coinbase_tx_prefix.len() + + coinbase_tx_suffix.len() + + extranonce.len() + + additional_coinbase_script_data.len(), + ); coinbase.extend_from_slice(coinbase_tx_prefix); + coinbase.extend_from_slice(additional_coinbase_script_data); coinbase.extend_from_slice(extranonce); coinbase.extend_from_slice(coinbase_tx_suffix); let coinbase = match Transaction::deserialize(&coinbase[..]) { @@ -549,6 +555,7 @@ fn test_merkle_root_from_path() { &coinbase_bytes[30..], &coinbase_bytes[20..30], &path, + &[], ) .unwrap(); assert_eq!(expected_root, root); @@ -565,13 +572,20 @@ fn test_merkle_root_from_path() { &coinbase_bytes[30..], &coinbase_bytes[20..30], &path, + &[], ) .unwrap(); assert_eq!(coinbase_id, root); //Target None return path on serialization assert_eq!( - merkle_root_from_path(&coinbase_bytes, &coinbase_bytes, &coinbase_bytes, &path), + merkle_root_from_path( + &coinbase_bytes, + &coinbase_bytes, + &coinbase_bytes, + &path, + &[] + ), None ); } @@ -676,6 +690,7 @@ pub fn get_target( coinbase_tx_suffix, extranonce, &(merkle_path[..]), + &[], ) .unwrap() .try_into() @@ -778,9 +793,14 @@ impl<'a> From> for bitcoin::Block { let id = id.as_ref().to_vec(); path.push(id); } - let merkle_root = - merkle_root_from_path(&coinbase_pre[..], &coinbase_suf[..], &extranonce[..], &path) - .expect("Invalid coinbase"); + let merkle_root = merkle_root_from_path( + &coinbase_pre[..], + &coinbase_suf[..], + &extranonce[..], + &path, + &[], + ) + .expect("Invalid coinbase"); let merkle_root = Hash::from_inner(merkle_root.try_into().unwrap()); let prev_blockhash = u256_to_block_hash(message.prev_hash.into_static()); diff --git a/protocols/v2/subprotocols/mining/src/lib.rs b/protocols/v2/subprotocols/mining/src/lib.rs index 83be80d0be..d71d5605ad 100644 --- a/protocols/v2/subprotocols/mining/src/lib.rs +++ b/protocols/v2/subprotocols/mining/src/lib.rs @@ -106,6 +106,7 @@ //! //! This protocol explicitly expects that upstream server software is able to manage the size of //! the hashing space correctly for its clients and can provide new jobs quickly enough. +use alloc::vec::Vec; use binary_sv2::{B032, U256}; use core::{ cmp::{Ord, PartialOrd}, @@ -278,6 +279,12 @@ impl core::convert::TryFrom> for Extranonce { } } +impl AsRef<[u8]> for Extranonce { + fn as_ref(&self) -> &[u8] { + self.extranonce.as_ref() + } +} + impl Extranonce { pub fn new(len: usize) -> Option { if len > MAX_EXTRANONCE_LEN { @@ -312,12 +319,21 @@ impl Extranonce { /// Return only the prefix part of the extranonce /// If the required size is greater than the extranonce len it return None - pub fn into_prefix(&self, prefix_len: usize) -> Option> { + pub fn into_prefix( + &self, + prefix_len: usize, + additional_coinbase_script_data: &[u8], + ) -> Option> { if prefix_len > self.extranonce.len() { None } else { - let mut prefix = self.extranonce.clone(); - prefix.resize(prefix_len, 0); + let mut prefix = Vec::with_capacity(prefix_len + additional_coinbase_script_data.len()); + for i in 0..prefix_len { + prefix.push(self.extranonce[i]); + } + for b in additional_coinbase_script_data { + prefix.push(*b); + } // unwrap is sage as prefix_len can not be greater than 32 cause is not possible to // contruct Extranonce with the inner vecto greater than 32. Some(prefix.try_into().unwrap()) @@ -1103,7 +1119,7 @@ pub mod tests { fn test_extranonce_to_prefix() { let inner = vec![1, 2, 3, 4, 5, 6, 7, 8, 9]; let extranone = Extranonce { extranonce: inner }; - let prefix = extranone.into_prefix(4).unwrap(); + let prefix = extranone.into_prefix(4, &[]).unwrap(); assert!(vec![1, 2, 3, 4] == prefix.to_vec()) } @@ -1111,7 +1127,7 @@ pub mod tests { fn test_extranonce_to_prefix_not_greater_than_inner() { let inner = vec![1, 2, 3, 4, 5, 6, 7, 8, 9]; let extranone = Extranonce { extranonce: inner }; - let prefix = extranone.into_prefix(20); + let prefix = extranone.into_prefix(20, &[]); assert!(prefix.is_none()) } diff --git a/roles/mining-proxy/src/lib/upstream_mining.rs b/roles/mining-proxy/src/lib/upstream_mining.rs index dde1e4c04b..af4ea8f80a 100644 --- a/roles/mining-proxy/src/lib/upstream_mining.rs +++ b/roles/mining-proxy/src/lib/upstream_mining.rs @@ -87,7 +87,6 @@ impl ChannelKind { downstream_share_per_minute, kind, Some(vec![]), - String::from(""), up_id, ); *self = Self::Extended(Some(factory)); diff --git a/roles/test-utils/mining-device-sv1/src/job.rs b/roles/test-utils/mining-device-sv1/src/job.rs index 1d6b3d2bcd..73ef57c8dd 100644 --- a/roles/test-utils/mining-device-sv1/src/job.rs +++ b/roles/test-utils/mining-device-sv1/src/job.rs @@ -49,6 +49,7 @@ impl Job { &coinbase_tx_suffix, &extranonce, &path, + &[], ) .unwrap(); let merkle_root: [u8; 32] = merkle_root.try_into().unwrap(); diff --git a/roles/translator/src/lib/proxy/bridge.rs b/roles/translator/src/lib/proxy/bridge.rs index 1525217570..ac65140c46 100644 --- a/roles/translator/src/lib/proxy/bridge.rs +++ b/roles/translator/src/lib/proxy/bridge.rs @@ -101,7 +101,6 @@ impl Bridge { share_per_min, ExtendedChannelKind::Proxy { upstream_target }, None, - String::from(""), up_id, ), future_jobs: vec![], From a3b672b0e25d854c5136bc2e2d247c3fd280d998 Mon Sep 17 00:00:00 2001 From: fi3 Date: Thu, 14 Nov 2024 17:35:54 +0100 Subject: [PATCH 17/27] Update pool to use shorter extranonce Update the pool to use an extranonce of 16 bytes rather then 32 so that there is enaugh space to add the additional coinbase input script data. --- roles/pool/src/lib/mining_pool/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/roles/pool/src/lib/mining_pool/mod.rs b/roles/pool/src/lib/mining_pool/mod.rs index d9e426e5db..f274b26d4f 100644 --- a/roles/pool/src/lib/mining_pool/mod.rs +++ b/roles/pool/src/lib/mining_pool/mod.rs @@ -597,11 +597,11 @@ impl Pool { sender_message_received_signal: Sender<()>, status_tx: status::Sender, ) -> Arc> { - let extranonce_len = 32; + let extranonce_len = 16; let range_0 = std::ops::Range { start: 0, end: 0 }; - let range_1 = std::ops::Range { start: 0, end: 16 }; + let range_1 = std::ops::Range { start: 0, end: 8 }; let range_2 = std::ops::Range { - start: 16, + start: 8, end: extranonce_len, }; let ids = Arc::new(Mutex::new(roles_logic_sv2::utils::GroupId::new())); From 58522b6519c45970bb31148a73ac9d693e3ba73d Mon Sep 17 00:00:00 2001 From: fi3 Date: Thu, 14 Nov 2024 17:40:13 +0100 Subject: [PATCH 18/27] Update channel factory to support more active jobs in the same moment Right now the channel factory only support one active job at time. That means that if we receive a share for a job right after we sent downstream a new job that share will be invalid. Now the channel factory keep track of the last 3 jobs, so we give time to the dowstream to receive the job and propagate it down before stop accepting shares for older job. This is useful, and the system can be more responsive: as soon as we change the coinbase additional input script data we can send a new job dowsntream without worrying of invalidating miner's shares. When the pool receive a prev hash it immidiatly invalidate all the previous jobs, we still want to refuse shares for stale jobs. The client can easly handle this situation: when a pool refuse a share it should start a timer and if do not receive a new prev hash (or already have) within n seconds it change pool. --- .../src/channel_logic/channel_factory.rs | 194 ++++++++++++++---- 1 file changed, 155 insertions(+), 39 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 18f289a46a..56ba006161 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -15,7 +15,7 @@ use mining_sv2::{ }; use nohash_hasher::BuildNoHashHasher; -use std::{collections::HashMap, convert::TryInto, sync::Arc}; +use std::{collections::{HashMap, HashSet}, convert::TryInto, sync::Arc}; use template_distribution_sv2::{NewTemplate, SetNewPrevHash as SetNewPrevHashFromTp}; use tracing::{debug, error, info, trace, warn}; @@ -210,13 +210,80 @@ struct ChannelFactory { last_prev_hash: Option<(StagedPhash, Vec)>, last_prev_hash_: Option, // (NewExtendedMiningJob,group ids that already received the job) - last_valid_job: Option<(NewExtendedMiningJob<'static>, Vec)>, + last_valid_jobs: [Option<(NewExtendedMiningJob<'static>, Vec)>;3], + // Index of the last valid job for channel_id ++ job_id + id_to_job: HashMap>, + // Used to understand which is the last added element in last_valid_jobs + added_elements: usize, kind: ExtendedChannelKind, job_ids: Id, channel_to_group_id: HashMap>, future_templates: HashMap, BuildNoHashHasher>, } +impl ChannelFactory { + fn add_valid_job(&mut self, job: NewExtendedMiningJob<'static>, group_ids: Vec) { + match self.last_valid_jobs { + [None,None,None] => { + self.id_to_job.insert(job.job_id, 0); + self.last_valid_jobs[0] = Some((job,group_ids)); + self.added_elements = 1; + }, + [Some(_),None,None] => { + self.id_to_job.insert(job.job_id, 1); + self.last_valid_jobs[1] = Some((job,group_ids)); + self.added_elements = 2; + }, + [Some(_),Some(_),None] => { + self.id_to_job.insert(job.job_id, 2); + self.last_valid_jobs[2] = Some((job,group_ids)); + self.added_elements = 3; + }, + [Some(_),Some(_),Some(_)] => { + let to_remove = self.added_elements % 3; + self.id_to_job.retain(|_, v| *v != to_remove as u8); + self.id_to_job.insert(job.job_id, to_remove as u8); + self.last_valid_jobs[to_remove] = Some((job,group_ids)); + }, + _ => panic!("Internal error: invalid last_valid_jobs state"), + } + } + fn get_valid_job(&self, job_id: u32) -> Option<&(NewExtendedMiningJob<'static>, Vec)> { + let index = self.id_to_job.get(&job_id)?; + self.last_valid_jobs[*index as usize].as_ref() + } + fn get_last_valid_job(&self) -> Option<&(NewExtendedMiningJob<'static>, Vec)> { + let index = self.get_last_valid_job_index()?; + self.last_valid_jobs[index as usize].as_ref() + } + fn get_mut_last_valid_job(&mut self) -> Option<&mut (NewExtendedMiningJob<'static>, Vec)> { + let index = self.get_last_valid_job_index()?; + self.last_valid_jobs[index as usize].as_mut() + } + fn get_last_valid_job_index(&self) -> Option { + match self.last_valid_jobs { + [None,None,None] => { + None + }, + [Some(_),None,None] => { + Some(0) + }, + [Some(_),Some(_),None] => { + Some(1) + }, + [Some(_),Some(_),Some(_)] => { + Some(2) + }, + _ => panic!("Internal error: invalid last_valid_jobs state"), + } + } + fn clear_valid_jobs(&mut self) { + self.last_valid_jobs = [None,None,None]; + self.id_to_job.clear(); + self.added_elements = 0; + } +} + impl ChannelFactory { pub fn add_standard_channel( &mut self, @@ -298,7 +365,7 @@ impl ChannelFactory { }; self.extended_channels.insert(channel_id, success.clone()); let mut result = vec![Mining::OpenExtendedMiningChannelSuccess(success)]; - if let Some((job, _)) = &self.last_valid_job { + if let Some((job, _)) = &self.get_last_valid_job() { let mut job = job.clone(); job.set_future(); let j_id = job.job_id; @@ -498,14 +565,15 @@ impl ChannelFactory { }) .collect(); + let id = self.job_ids.next(); // OPTIMIZATION the extranonce is cloned so many time but maybe is avoidable? - let last_valid_job = match &self.last_valid_job { + let last_valid_job = match self.get_last_valid_job() { Some((j, _)) => Some( extended_to_standard_job( j, standard_channel.extranonce.as_ref(), standard_channel.channel_id, - Some(self.job_ids.next()), + Some(id), additional_coinbase_script_data, ) .ok_or(Error::ImpossibleToCalculateMerkleRoot)?, @@ -593,10 +661,12 @@ impl ChannelFactory { // This is the same thing of just check if there is a prev hash add it to result if there // is last_job add it to result and add each future job to result. // But using the pattern match is more clear how each option is handled + let last_prev_hash = self.last_prev_hash.clone(); + let is_empty = self.future_jobs.is_empty(); match ( - self.last_prev_hash.as_mut(), - self.last_valid_job.as_mut(), - self.future_jobs.is_empty(), + last_prev_hash, + self.get_mut_last_valid_job(), + is_empty, ) { // If we do not have anything just do nothing (None, None, true) => (), @@ -614,16 +684,17 @@ impl ChannelFactory { } // If we have just a prev hash we need to send it after the SetupConnectionSuccess // message - (Some((prev_h, group_id_p_hash_sent)), None, true) => { + (Some((prev_h, mut group_id_p_hash_sent)), None, true) => { if !group_id_p_hash_sent.contains(&group_id) { let prev_h = prev_h.into_set_p_hash(group_id, None); group_id_p_hash_sent.push(group_id); result.push(Mining::SetNewPrevHash(prev_h.clone())); } + self.last_prev_hash = Some((prev_h, group_id_p_hash_sent)); } // If we have a prev hash and a last valid job we need to send before the prev hash and // the the valid job - (Some((prev_h, group_id_p_hash_sent)), Some((job, group_id_job_sent)), true) => { + (Some((prev_h, mut group_id_p_hash_sent)), Some((job, group_id_job_sent)), true) => { if !group_id_p_hash_sent.contains(&group_id) { let prev_h = prev_h.into_set_p_hash(group_id, Some(job.job_id)); group_id_p_hash_sent.push(group_id); @@ -635,9 +706,10 @@ impl ChannelFactory { group_id_job_sent.push(group_id); result.push(Mining::NewExtendedMiningJob(job)); } + self.last_prev_hash = Some((prev_h, group_id_p_hash_sent)); } // If we have everything we need, send before the prev hash and then all the jobs - (Some((prev_h, group_id_p_hash_sent)), Some((job, group_id_job_sent)), false) => { + (Some((prev_h, mut group_id_p_hash_sent)), Some((job, group_id_job_sent)), false) => { if !group_id_p_hash_sent.contains(&group_id) { let prev_h = prev_h.into_set_p_hash(group_id, Some(job.job_id)); group_id_p_hash_sent.push(group_id); @@ -659,6 +731,7 @@ impl ChannelFactory { result.push(Mining::NewExtendedMiningJob(job)); } } + self.last_prev_hash = Some((prev_h, group_id_p_hash_sent)); } // This can not happen because we can not have a valid job without a prev hash (None, Some(_), true) => unreachable!(), @@ -674,6 +747,7 @@ impl ChannelFactory { /// job queue, we move the future job into the valid job slot and store the prev hash as the /// current prev hash to be referenced. fn on_new_prev_hash(&mut self, m: StagedPhash) -> Result<(), Error> { + self.clear_valid_jobs(); while let Some(mut job) = self.future_jobs.pop() { if job.0.job_id == m.job_id { let now = std::time::SystemTime::now() @@ -681,10 +755,9 @@ impl ChannelFactory { .unwrap() .as_secs() as u32; job.0.set_no_future(now); - self.last_valid_job = Some(job); + self.add_valid_job(job.0, job.1); break; } - self.last_valid_job = None; } self.future_jobs = vec![]; self.last_prev_hash_ = Some(crate::utils::u256_to_block_hash(m.prev_hash.clone())); @@ -738,7 +811,7 @@ impl ChannelFactory { ids.push(group_id) } } - self.last_valid_job = Some((m, ids)); + self.add_valid_job(m, ids); if let Some((_p_hash, _)) = &self.last_prev_hash { Ok(result) } else { @@ -1016,7 +1089,17 @@ pub struct PoolChannelFactory { pool_coinbase_outputs: Vec, // Additional data that the pool may want to include in the coinbase input script as first part // of the extranonce. This can be used to put things like the pool signature. - additional_coinbase_script_data: Vec, + // I prepend with _ cause it means that I don't want to use this value directly. + _additional_coinbase_script_data: Vec, + // This is normally set to None. When the pool change the additional_coinbase_script_data we + // set it to Some(old value). We need it cause for a short time frame we will have job that are + // supposed to use the new one and jobs that are supposed to use the old value. As soon we + // have only job that use the new value, this is set to None. We do not support more then 2 + // additional_coinbase_script_data at time. + // I prepend with _ cause it means that I don't want to use this value directly. + _additional_coinbase_script_data_old: Option>, + // channel_id ++ job_id + job_ids_using_old_add_data: HashSet>, // extedned_channel_id -> SetCustomMiningJob negotiated_jobs: HashMap, BuildNoHashHasher>, } @@ -1029,7 +1112,7 @@ impl PoolChannelFactory { share_per_min: f32, kind: ExtendedChannelKind, pool_coinbase_outputs: Vec, - additional_coinbase_script_data: Vec, + _additional_coinbase_script_data: Vec, ) -> Self { let inner = ChannelFactory { ids, @@ -1045,7 +1128,9 @@ impl PoolChannelFactory { future_jobs: Vec::new(), last_prev_hash: None, last_prev_hash_: None, - last_valid_job: None, + last_valid_jobs: [None,None,None], + id_to_job: HashMap::with_hasher(BuildNoHashHasher::default()), + added_elements: 0, kind, job_ids: Id::new(), channel_to_group_id: HashMap::with_hasher(BuildNoHashHasher::default()), @@ -1056,7 +1141,9 @@ impl PoolChannelFactory { inner, job_creator, pool_coinbase_outputs, - additional_coinbase_script_data, + _additional_coinbase_script_data, + _additional_coinbase_script_data_old: None, + job_ids_using_old_add_data: HashSet::with_hasher(BuildNoHashHasher::default()), negotiated_jobs: HashMap::with_hasher(BuildNoHashHasher::default()), } } @@ -1073,7 +1160,7 @@ impl PoolChannelFactory { downstream_hash_rate, is_header_only, id, - Some(&self.additional_coinbase_script_data), + Some(&self.get_last_additional_coinbase_script_data()), ) } /// Calls [`ChannelFactory::new_extended_channel`] @@ -1087,7 +1174,7 @@ impl PoolChannelFactory { request_id, hash_rate, min_extranonce_size, - Some(&self.additional_coinbase_script_data), + Some(&self.get_last_additional_coinbase_script_data()), ) } /// Called when we want to replicate a channel already opened by another actor. @@ -1133,10 +1220,10 @@ impl PoolChannelFactory { m, true, self.pool_coinbase_outputs.clone(), - self.additional_coinbase_script_data.len() as u8, + self.get_last_additional_coinbase_script_data().len() as u8, )?; self.inner - .on_new_extended_mining_job(new_job, Some(&self.additional_coinbase_script_data)) + .on_new_extended_mining_job(new_job, Some(&self.get_last_additional_coinbase_script_data())) } /// Called when a `SubmitSharesStandard` message is received from the downstream. We check the /// shares against the channel's respective target and return `OnNewShare` to let us know if @@ -1145,12 +1232,13 @@ impl PoolChannelFactory { &mut self, m: SubmitSharesStandard, ) -> Result { + let additional_coinbase_script_data = self.get_additional_coinbase_script_data(m.channel_id,m.job_id); match self.inner.channel_to_group_id.get(&m.channel_id) { Some(g_id) => { let referenced_job = self .inner - .last_valid_job - .clone() + .get_valid_job(m.job_id) + .cloned() .ok_or(Error::ShareDoNotMatchAnyJob)? .0; let merkle_path = referenced_job.merkle_path.to_vec(); @@ -1180,7 +1268,7 @@ impl PoolChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, - Some(&self.additional_coinbase_script_data), + Some(&additional_coinbase_script_data), ) } None => { @@ -1205,6 +1293,7 @@ impl PoolChannelFactory { m: SubmitSharesExtended, ) -> Result { let target = self.job_creator.last_target(); + let additional_coinbase_script_data = self.get_additional_coinbase_script_data(m.channel_id,m.job_id); // When downstream set a custom mining job we add the job to the negotiated job // hashmap, with the extended channel id as a key. Whenever the pool receive a share must // first check if the channel have a negotiated job if so we can not retreive the template @@ -1214,7 +1303,7 @@ impl PoolChannelFactory { let merkle_path = referenced_job.merkle_path.to_vec(); let extended_job = job_creator::extended_job_from_custom_job( referenced_job, - self.additional_coinbase_script_data.len() as u8, + additional_coinbase_script_data.len() as u8, self.inner.extranonces.get_len() as u8, ) .unwrap(); @@ -1230,13 +1319,13 @@ impl PoolChannelFactory { extended_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, - Some(&self.additional_coinbase_script_data), + Some(&additional_coinbase_script_data), ) } else { let referenced_job = self .inner - .last_valid_job - .clone() + .get_valid_job(m.job_id) + .cloned() .ok_or(Error::ShareDoNotMatchAnyJob)? .0; let merkle_path = referenced_job.merkle_path.to_vec(); @@ -1265,7 +1354,7 @@ impl PoolChannelFactory { referenced_job.coinbase_tx_suffix.as_ref(), prev_blockhash, bits, - Some(&self.additional_coinbase_script_data), + Some(&additional_coinbase_script_data), ) } } @@ -1342,6 +1431,31 @@ impl PoolChannelFactory { pub fn set_target(&mut self, new_target: &mut Target) { self.inner.kind.set_target(new_target); } + + // TODO ret can not be larger then 32 bytes maybe use the stack for it? + #[inline(always)] + fn get_additional_coinbase_script_data(&self, channel_id: u32, job_id: u32) -> Vec { + let id = ((channel_id as u64) << 32) | (job_id as u64); + match (self.job_ids_using_old_add_data.contains(&id), &self._additional_coinbase_script_data_old) { + (true, Some(additional_coinbase_script_data)) => additional_coinbase_script_data.clone(), + (false, _) => self._additional_coinbase_script_data.clone(), + _ => panic!("Internal error: when job_ids_using_old_add_data contains elements _additional_coinbase_script_data_old must be Some") + + } + } + // TODO ret can not be larger then 32 bytes maybe use the stack for it? + #[inline(always)] + fn get_last_additional_coinbase_script_data(&self) -> Vec { + self._additional_coinbase_script_data.clone() + } + + pub fn change_additional_coinbase_script_data(&mut self, new_data: Vec) { + todo!() + } + + fn active_jobs(&self) -> Vec { + todo!() + } } /// Used by proxies that want to open extended channls with upstream. If the proxy has job @@ -1393,7 +1507,9 @@ impl ProxyExtendedChannelFactory { future_jobs: Vec::new(), last_prev_hash: None, last_prev_hash_: None, - last_valid_job: None, + last_valid_jobs: [None,None,None], + id_to_job: HashMap::with_hasher(BuildNoHashHasher::default()), + added_elements: 0, kind, job_ids: Id::new(), channel_to_group_id: HashMap::with_hasher(BuildNoHashHasher::default()), @@ -1544,7 +1660,7 @@ impl ProxyExtendedChannelFactory { ) -> Result { let merkle_path = self .inner - .last_valid_job + .get_valid_job(m.job_id) .as_ref() .ok_or(Error::ShareDoNotMatchAnyJob)? .0 @@ -1553,8 +1669,8 @@ impl ProxyExtendedChannelFactory { let referenced_job = self .inner - .last_valid_job - .clone() + .get_valid_job(m.job_id) + .cloned() .ok_or(Error::ShareDoNotMatchAnyJob)? .0; @@ -1639,7 +1755,7 @@ impl ProxyExtendedChannelFactory { ) -> Result { let merkle_path = self .inner - .last_valid_job + .get_valid_job(m.job_id) .as_ref() .ok_or(Error::ShareDoNotMatchAnyJob)? .0 @@ -1647,8 +1763,8 @@ impl ProxyExtendedChannelFactory { .to_vec(); let referenced_job = self .inner - .last_valid_job - .clone() + .get_valid_job(m.job_id) + .cloned() .ok_or(Error::ShareDoNotMatchAnyJob)? .0; match self.inner.channel_to_group_id.get(&m.channel_id) { @@ -1656,7 +1772,7 @@ impl ProxyExtendedChannelFactory { if let Some(job_creator) = self.job_creator.as_mut() { let template_id = job_creator .get_template_id_from_job( - self.inner.last_valid_job.as_ref().unwrap().0.job_id, + self.inner.get_valid_job(m.job_id).as_ref().unwrap().0.job_id, ) .ok_or(Error::NoTemplateForId)?; let bitcoin_target = job_creator.last_target(); @@ -1747,7 +1863,7 @@ impl ProxyExtendedChannelFactory { self.inner.kind.set_target(new_target); } pub fn last_valid_job_version(&self) -> Option { - self.inner.last_valid_job.as_ref().map(|j| j.0.version) + self.inner.get_last_valid_job().as_ref().map(|j| j.0.version) } /// Returns the full extranonce, extranonce1 (static for channel) + extranonce2 (miner nonce /// space) From 081aa2feaa8f829a6a687784e84f6ff8edecafe8 Mon Sep 17 00:00:00 2001 From: fi3 Date: Fri, 15 Nov 2024 16:42:23 +0100 Subject: [PATCH 19/27] Fix coinbase_prefix, pool channel factory new This commit fix 2 miner things: When we calculate the coinbase_prefix (what we need to put in the extended job) we need to account also for the coinbase input script additional data that is part of the extranonce. When we create pool channel facotry we pass an extranonce creator and an pool signature. If the signature + extranonce are bigger then 32 bytes we have to return an error. That cause in sv2 the extranonce can not be longer than 32 bytes. --- .../src/channel_logic/channel_factory.rs | 97 ++++++++++--------- protocols/v2/roles-logic-sv2/src/errors.rs | 2 + .../v2/roles-logic-sv2/src/job_creator.rs | 13 ++- roles/jd-client/src/lib/downstream.rs | 3 +- .../src/lib/upstream_sv2/upstream.rs | 3 +- roles/pool/src/lib/mining_pool/mod.rs | 27 +++--- .../src/lib/upstream_sv2/upstream.rs | 3 +- 7 files changed, 84 insertions(+), 64 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 56ba006161..dfb141d7b3 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -15,7 +15,11 @@ use mining_sv2::{ }; use nohash_hasher::BuildNoHashHasher; -use std::{collections::{HashMap, HashSet}, convert::TryInto, sync::Arc}; +use std::{ + collections::{HashMap, HashSet}, + convert::TryInto, + sync::Arc, +}; use template_distribution_sv2::{NewTemplate, SetNewPrevHash as SetNewPrevHashFromTp}; use tracing::{debug, error, info, trace, warn}; @@ -210,9 +214,9 @@ struct ChannelFactory { last_prev_hash: Option<(StagedPhash, Vec)>, last_prev_hash_: Option, // (NewExtendedMiningJob,group ids that already received the job) - last_valid_jobs: [Option<(NewExtendedMiningJob<'static>, Vec)>;3], + last_valid_jobs: [Option<(NewExtendedMiningJob<'static>, Vec)>; 3], // Index of the last valid job for channel_id ++ job_id - id_to_job: HashMap>, + id_to_job: HashMap>, // Used to understand which is the last added element in last_valid_jobs added_elements: usize, kind: ExtendedChannelKind, @@ -224,27 +228,27 @@ struct ChannelFactory { impl ChannelFactory { fn add_valid_job(&mut self, job: NewExtendedMiningJob<'static>, group_ids: Vec) { match self.last_valid_jobs { - [None,None,None] => { + [None, None, None] => { self.id_to_job.insert(job.job_id, 0); - self.last_valid_jobs[0] = Some((job,group_ids)); + self.last_valid_jobs[0] = Some((job, group_ids)); self.added_elements = 1; - }, - [Some(_),None,None] => { + } + [Some(_), None, None] => { self.id_to_job.insert(job.job_id, 1); - self.last_valid_jobs[1] = Some((job,group_ids)); + self.last_valid_jobs[1] = Some((job, group_ids)); self.added_elements = 2; - }, - [Some(_),Some(_),None] => { + } + [Some(_), Some(_), None] => { self.id_to_job.insert(job.job_id, 2); - self.last_valid_jobs[2] = Some((job,group_ids)); + self.last_valid_jobs[2] = Some((job, group_ids)); self.added_elements = 3; - }, - [Some(_),Some(_),Some(_)] => { + } + [Some(_), Some(_), Some(_)] => { let to_remove = self.added_elements % 3; self.id_to_job.retain(|_, v| *v != to_remove as u8); self.id_to_job.insert(job.job_id, to_remove as u8); - self.last_valid_jobs[to_remove] = Some((job,group_ids)); - }, + self.last_valid_jobs[to_remove] = Some((job, group_ids)); + } _ => panic!("Internal error: invalid last_valid_jobs state"), } } @@ -262,23 +266,15 @@ impl ChannelFactory { } fn get_last_valid_job_index(&self) -> Option { match self.last_valid_jobs { - [None,None,None] => { - None - }, - [Some(_),None,None] => { - Some(0) - }, - [Some(_),Some(_),None] => { - Some(1) - }, - [Some(_),Some(_),Some(_)] => { - Some(2) - }, + [None, None, None] => None, + [Some(_), None, None] => Some(0), + [Some(_), Some(_), None] => Some(1), + [Some(_), Some(_), Some(_)] => Some(2), _ => panic!("Internal error: invalid last_valid_jobs state"), } } fn clear_valid_jobs(&mut self) { - self.last_valid_jobs = [None,None,None]; + self.last_valid_jobs = [None, None, None]; self.id_to_job.clear(); self.added_elements = 0; } @@ -663,11 +659,7 @@ impl ChannelFactory { // But using the pattern match is more clear how each option is handled let last_prev_hash = self.last_prev_hash.clone(); let is_empty = self.future_jobs.is_empty(); - match ( - last_prev_hash, - self.get_mut_last_valid_job(), - is_empty, - ) { + match (last_prev_hash, self.get_mut_last_valid_job(), is_empty) { // If we do not have anything just do nothing (None, None, true) => (), // If we have only future jobs we need to send them all after the @@ -1113,7 +1105,11 @@ impl PoolChannelFactory { kind: ExtendedChannelKind, pool_coinbase_outputs: Vec, _additional_coinbase_script_data: Vec, - ) -> Self { + ) -> Result { + if _additional_coinbase_script_data.len() + extranonces.get_len() > 32 { + error!("Additional coinbase script data is too big"); + return Err(Error::AdditionalCoinbaseScriptDataTooBig); + } let inner = ChannelFactory { ids, standard_channels_for_non_hom_downstreams: HashMap::with_hasher( @@ -1128,7 +1124,7 @@ impl PoolChannelFactory { future_jobs: Vec::new(), last_prev_hash: None, last_prev_hash_: None, - last_valid_jobs: [None,None,None], + last_valid_jobs: [None, None, None], id_to_job: HashMap::with_hasher(BuildNoHashHasher::default()), added_elements: 0, kind, @@ -1137,7 +1133,7 @@ impl PoolChannelFactory { future_templates: HashMap::with_hasher(BuildNoHashHasher::default()), }; - Self { + Ok(Self { inner, job_creator, pool_coinbase_outputs, @@ -1145,7 +1141,7 @@ impl PoolChannelFactory { _additional_coinbase_script_data_old: None, job_ids_using_old_add_data: HashSet::with_hasher(BuildNoHashHasher::default()), negotiated_jobs: HashMap::with_hasher(BuildNoHashHasher::default()), - } + }) } /// Calls [`ChannelFactory::add_standard_channel`] pub fn add_standard_channel( @@ -1222,8 +1218,10 @@ impl PoolChannelFactory { self.pool_coinbase_outputs.clone(), self.get_last_additional_coinbase_script_data().len() as u8, )?; - self.inner - .on_new_extended_mining_job(new_job, Some(&self.get_last_additional_coinbase_script_data())) + self.inner.on_new_extended_mining_job( + new_job, + Some(&self.get_last_additional_coinbase_script_data()), + ) } /// Called when a `SubmitSharesStandard` message is received from the downstream. We check the /// shares against the channel's respective target and return `OnNewShare` to let us know if @@ -1232,7 +1230,8 @@ impl PoolChannelFactory { &mut self, m: SubmitSharesStandard, ) -> Result { - let additional_coinbase_script_data = self.get_additional_coinbase_script_data(m.channel_id,m.job_id); + let additional_coinbase_script_data = + self.get_additional_coinbase_script_data(m.channel_id, m.job_id); match self.inner.channel_to_group_id.get(&m.channel_id) { Some(g_id) => { let referenced_job = self @@ -1293,7 +1292,8 @@ impl PoolChannelFactory { m: SubmitSharesExtended, ) -> Result { let target = self.job_creator.last_target(); - let additional_coinbase_script_data = self.get_additional_coinbase_script_data(m.channel_id,m.job_id); + let additional_coinbase_script_data = + self.get_additional_coinbase_script_data(m.channel_id, m.job_id); // When downstream set a custom mining job we add the job to the negotiated job // hashmap, with the extended channel id as a key. Whenever the pool receive a share must // first check if the channel have a negotiated job if so we can not retreive the template @@ -1440,7 +1440,6 @@ impl PoolChannelFactory { (true, Some(additional_coinbase_script_data)) => additional_coinbase_script_data.clone(), (false, _) => self._additional_coinbase_script_data.clone(), _ => panic!("Internal error: when job_ids_using_old_add_data contains elements _additional_coinbase_script_data_old must be Some") - } } // TODO ret can not be larger then 32 bytes maybe use the stack for it? @@ -1507,7 +1506,7 @@ impl ProxyExtendedChannelFactory { future_jobs: Vec::new(), last_prev_hash: None, last_prev_hash_: None, - last_valid_jobs: [None,None,None], + last_valid_jobs: [None, None, None], id_to_job: HashMap::with_hasher(BuildNoHashHasher::default()), added_elements: 0, kind, @@ -1772,7 +1771,12 @@ impl ProxyExtendedChannelFactory { if let Some(job_creator) = self.job_creator.as_mut() { let template_id = job_creator .get_template_id_from_job( - self.inner.get_valid_job(m.job_id).as_ref().unwrap().0.job_id, + self.inner + .get_valid_job(m.job_id) + .as_ref() + .unwrap() + .0 + .job_id, ) .ok_or(Error::NoTemplateForId)?; let bitcoin_target = job_creator.last_target(); @@ -1863,7 +1867,10 @@ impl ProxyExtendedChannelFactory { self.inner.kind.set_target(new_target); } pub fn last_valid_job_version(&self) -> Option { - self.inner.get_last_valid_job().as_ref().map(|j| j.0.version) + self.inner + .get_last_valid_job() + .as_ref() + .map(|j| j.0.version) } /// Returns the full extranonce, extranonce1 (static for channel) + extranonce2 (miner nonce /// space) diff --git a/protocols/v2/roles-logic-sv2/src/errors.rs b/protocols/v2/roles-logic-sv2/src/errors.rs index 20c4bcd554..da95e2d532 100644 --- a/protocols/v2/roles-logic-sv2/src/errors.rs +++ b/protocols/v2/roles-logic-sv2/src/errors.rs @@ -61,6 +61,7 @@ pub enum Error { HashrateError(InputError), LogicErrorMessage(std::boxed::Box>), JDSMissingTransactions, + AdditionalCoinbaseScriptDataTooBig, } impl From for Error { @@ -153,6 +154,7 @@ impl Display for Error { HashrateError(e) => write!(f, "Impossible to get Hashrate: {:?}", e), LogicErrorMessage(e) => write!(f, "Message is well formatted but can not be handled: {:?}", e), JDSMissingTransactions => write!(f, "JD server cannot propagate the block: missing transactions"), + AdditionalCoinbaseScriptDataTooBig => write!(f, "Additional coinbase script data too big"), } } } diff --git a/protocols/v2/roles-logic-sv2/src/job_creator.rs b/protocols/v2/roles-logic-sv2/src/job_creator.rs index 0e65c35b35..e52ee9b787 100644 --- a/protocols/v2/roles-logic-sv2/src/job_creator.rs +++ b/protocols/v2/roles-logic-sv2/src/job_creator.rs @@ -224,7 +224,12 @@ fn new_extended_job( version_rolling_allowed, merkle_path: new_template.merkle_path.clone().into_static(), coinbase_tx_prefix: coinbase_tx_prefix(&coinbase, script_prefix_len)?, - coinbase_tx_suffix: coinbase_tx_suffix(&coinbase, extranonce_len, script_prefix_len)?, + coinbase_tx_suffix: coinbase_tx_suffix( + &coinbase, + extranonce_len, + script_prefix_len, + additional_coinbase_script_data_len as usize, + )?, }; debug!( @@ -249,10 +254,10 @@ fn coinbase_tx_prefix( }; let index = 4 // tx version + segwit_bytes - + 1 // number of inputs TODO can be also 3 + + 1 // number of inputs (always 1) + 32 // prev OutPoint + 4 // index - + 1 // bytes in script TODO can be also 3 + + 1 // bytes in script (max 100 so always 1 byte) + script_prefix_len; // bip34_bytes let r = encoded[0..index].to_vec(); r.try_into().map_err(Error::BinarySv2Error) @@ -264,6 +269,7 @@ fn coinbase_tx_suffix( coinbase: &Transaction, extranonce_len: u8, script_prefix_len: usize, + additional_coinbase_script_data_len: usize, ) -> Result, Error> { let encoded = coinbase.serialize(); // If script_prefix_len is not 0 we are not in a test enviornment and the coinbase have the 0 @@ -279,6 +285,7 @@ fn coinbase_tx_suffix( + 4 // index + 1 // bytes in script TODO can be also 3 + script_prefix_len // bip34_bytes + + additional_coinbase_script_data_len + (extranonce_len as usize)..] .to_vec(); r.try_into().map_err(Error::BinarySv2Error) diff --git a/roles/jd-client/src/lib/downstream.rs b/roles/jd-client/src/lib/downstream.rs index 3823b400a3..36dad45142 100644 --- a/roles/jd-client/src/lib/downstream.rs +++ b/roles/jd-client/src/lib/downstream.rs @@ -493,7 +493,8 @@ impl kind, coinbase_outputs, "SOLO".as_bytes().to_vec(), - ); + ) + .expect("Signature + extranonce lens exceed 32 bytes"); self.status.set_channel(channel_factory); let request_id = m.request_id; diff --git a/roles/jd-client/src/lib/upstream_sv2/upstream.rs b/roles/jd-client/src/lib/upstream_sv2/upstream.rs index 25dfb5bc00..4461c40a09 100644 --- a/roles/jd-client/src/lib/upstream_sv2/upstream.rs +++ b/roles/jd-client/src/lib/upstream_sv2/upstream.rs @@ -587,7 +587,8 @@ impl ParseUpstreamMiningMessages, status_tx: status::Sender, ) -> Arc> { - let extranonce_len = 16; + let extranonce_len = 13; let range_0 = std::ops::Range { start: 0, end: 0 }; - let range_1 = std::ops::Range { start: 0, end: 8 }; + let range_1 = std::ops::Range { start: 0, end: 5 }; let range_2 = std::ops::Range { - start: 8, + start: 5, end: extranonce_len, }; let ids = Arc::new(Mutex::new(roles_logic_sv2::utils::GroupId::new())); @@ -611,15 +611,18 @@ impl Pool { let creator = JobsCreators::new(extranonce_len as u8); let share_per_min = 1.0; let kind = roles_logic_sv2::channel_logic::channel_factory::ExtendedChannelKind::Pool; - let channel_factory = Arc::new(Mutex::new(PoolChannelFactory::new( - ids, - extranonces, - creator, - share_per_min, - kind, - pool_coinbase_outputs.expect("Invalid coinbase output in config"), - config.pool_signature.clone().into_bytes(), - ))); + let channel_factory = Arc::new(Mutex::new( + PoolChannelFactory::new( + ids, + extranonces, + creator, + share_per_min, + kind, + pool_coinbase_outputs.expect("Invalid coinbase output in config"), + config.pool_signature.clone().into_bytes(), + ) + .expect("Signature + extranonce lens exceed 32 bytes"), + )); let pool = Arc::new(Mutex::new(Pool { downstreams: HashMap::with_hasher(BuildNoHashHasher::default()), solution_sender, diff --git a/roles/translator/src/lib/upstream_sv2/upstream.rs b/roles/translator/src/lib/upstream_sv2/upstream.rs index c025c14071..9b4a2a20c1 100644 --- a/roles/translator/src/lib/upstream_sv2/upstream.rs +++ b/roles/translator/src/lib/upstream_sv2/upstream.rs @@ -171,8 +171,7 @@ impl Upstream { job_id: None, last_job_id: None, min_extranonce_size, - upstream_extranonce1_size: 16, /* 16 is the default since that is the only value the - * pool supports currently */ + upstream_extranonce1_size: 8, tx_sv2_extranonce, tx_status, target, From f5895c9c3c74b4c0767af839b6edd9bca32ca3ca Mon Sep 17 00:00:00 2001 From: fi3 Date: Fri, 15 Nov 2024 17:21:38 +0100 Subject: [PATCH 20/27] Fix transalator segwit remover The translator normalize the coinbase and remove the segwit data from the coinbase prefix and suffix. In order to do that it need to know the extranonce len, we used a default value of 322 bytes, but the pool could use also smalle extranonces. --- .../roles-logic-sv2/src/channel_logic/channel_factory.rs | 4 ++++ roles/translator/src/lib/proxy/bridge.rs | 9 +++++++++ roles/translator/src/lib/proxy/next_mining_notify.rs | 3 ++- test/config/interop-jd-translator/pool-config.toml | 2 +- 4 files changed, 16 insertions(+), 2 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index dfb141d7b3..bccb389ff8 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -1924,6 +1924,10 @@ impl ProxyExtendedChannelFactory { ) -> Option { self.inner.update_target_for_channel(channel_id, new_target) } + + pub fn get_extranonce_len(&self) -> usize { + self.inner.extranonces.get_len() + } } /// Used by proxies for tracking upstream targets. diff --git a/roles/translator/src/lib/proxy/bridge.rs b/roles/translator/src/lib/proxy/bridge.rs index ac65140c46..a931e391eb 100644 --- a/roles/translator/src/lib/proxy/bridge.rs +++ b/roles/translator/src/lib/proxy/bridge.rs @@ -340,6 +340,10 @@ impl Bridge { }) .map_err(|_| PoisonLock)?; + let extranonce_len = self_ + .safe_lock(|s| s.channel_factory.get_extranonce_len()) + .unwrap(); + let mut match_a_future_job = false; while let Some(job) = future_jobs.pop() { if job.job_id == sv2_set_new_prev_hash.job_id { @@ -349,6 +353,7 @@ impl Bridge { sv2_set_new_prev_hash.clone(), job, true, + extranonce_len, ); // Get the sender to send the mining.notify to the Downstream @@ -428,6 +433,9 @@ impl Bridge { .on_new_extended_mining_job(sv2_new_extended_mining_job.as_static().clone()) }) .map_err(|_| PoisonLock)??; + let extranonce_len = self_ + .safe_lock(|s| s.channel_factory.get_extranonce_len()) + .unwrap(); // If future_job=true, this job is meant for a future SetNewPrevHash that the proxy // has yet to receive. Insert this new job into the job_mapper . @@ -456,6 +464,7 @@ impl Bridge { last_p_hash, sv2_new_extended_mining_job.clone(), false, + extranonce_len, ); // Get the sender to send the mining.notify to the Downstream tx_sv1_notify.send(notify.clone())?; diff --git a/roles/translator/src/lib/proxy/next_mining_notify.rs b/roles/translator/src/lib/proxy/next_mining_notify.rs index 7bcaf44f1a..c611986672 100644 --- a/roles/translator/src/lib/proxy/next_mining_notify.rs +++ b/roles/translator/src/lib/proxy/next_mining_notify.rs @@ -16,9 +16,10 @@ pub fn create_notify( new_prev_hash: SetNewPrevHash<'static>, new_job: NewExtendedMiningJob<'static>, clean_jobs: bool, + extranonce_len: usize, ) -> server_to_client::Notify<'static> { // TODO 32 must be changed! - let new_job = extended_job_to_non_segwit(new_job, 32) + let new_job = extended_job_to_non_segwit(new_job, extranonce_len) .expect("failed to convert extended job to non segwit"); // Make sure that SetNewPrevHash + NewExtendedMiningJob is matching (not future) let job_id = new_job.job_id.to_string(); diff --git a/test/config/interop-jd-translator/pool-config.toml b/test/config/interop-jd-translator/pool-config.toml index 9de7b11b12..91d50f203e 100644 --- a/test/config/interop-jd-translator/pool-config.toml +++ b/test/config/interop-jd-translator/pool-config.toml @@ -16,7 +16,7 @@ coinbase_outputs = [ ] # Pool signature (string to be included in coinbase tx) # e.g. "Foundry USA", "Antpool", "/ViaBTC/Mined by gitgab19", etc -pool_signature = "Stratum v2 SRI Pool - gitgab19" +pool_signature = "Stratum v2 SRI Pool" # Template Provider config # hosted testnet TP From f179fe5d5f29b69bad9979e93643c858dc901dd5 Mon Sep 17 00:00:00 2001 From: fi3 Date: Sat, 16 Nov 2024 10:30:48 +0100 Subject: [PATCH 21/27] Update extranonce size for sv1 devices in translato config --- .../config-examples/tproxy-config-hosted-pool-example.toml | 2 +- .../config-examples/tproxy-config-local-pool-example.toml | 2 +- test/config/tproxy-config-no-jd-sv1-cpu-md.toml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/roles/translator/config-examples/tproxy-config-hosted-pool-example.toml b/roles/translator/config-examples/tproxy-config-hosted-pool-example.toml index 47d4ea8758..cd1202f5bf 100644 --- a/roles/translator/config-examples/tproxy-config-hosted-pool-example.toml +++ b/roles/translator/config-examples/tproxy-config-hosted-pool-example.toml @@ -20,7 +20,7 @@ min_supported_version = 2 # Max value: 16 (leaves 0 bytes for search space splitting of downstreams) # Max value for CGminer: 8 # Min value: 2 -min_extranonce2_size = 8 +min_extranonce2_size = 5 # Difficulty params [downstream_difficulty_config] diff --git a/roles/translator/config-examples/tproxy-config-local-pool-example.toml b/roles/translator/config-examples/tproxy-config-local-pool-example.toml index b4359d5ab4..12d6f2f173 100644 --- a/roles/translator/config-examples/tproxy-config-local-pool-example.toml +++ b/roles/translator/config-examples/tproxy-config-local-pool-example.toml @@ -20,7 +20,7 @@ min_supported_version = 2 # Max value: 16 (leaves 0 bytes for search space splitting of downstreams) # Max value for CGminer: 8 # Min value: 2 -min_extranonce2_size = 8 +min_extranonce2_size = 5 # Difficulty params [downstream_difficulty_config] diff --git a/test/config/tproxy-config-no-jd-sv1-cpu-md.toml b/test/config/tproxy-config-no-jd-sv1-cpu-md.toml index 7c90479f0a..2b05ec8793 100644 --- a/test/config/tproxy-config-no-jd-sv1-cpu-md.toml +++ b/test/config/tproxy-config-no-jd-sv1-cpu-md.toml @@ -24,7 +24,7 @@ min_supported_version = 2 # Max value: 16 (leaves 0 bytes for search space splitting of downstreams) # Max value for CGminer: 8 # Min value: 2 -min_extranonce2_size = 8 +min_extranonce2_size = 5 coinbase_reward_sat = 5_000_000_000 # optional jn config, if set the tproxy start on JN mode @@ -45,4 +45,4 @@ shares_per_minute = 100.0 # interval in seconds to elapse before updating channel hashrate with the pool channel_diff_update_interval = 60 # estimated accumulated hashrate of all downstream miners -channel_nominal_hashrate = 500.0 \ No newline at end of file +channel_nominal_hashrate = 500.0 From b803a4f2cb96fe4a4482196a1dd6e585eee6d678 Mon Sep 17 00:00:00 2001 From: fi3 Date: Sat, 16 Nov 2024 13:53:27 +0100 Subject: [PATCH 22/27] Update pool channel factory, send new extranonce prefix Add a method that the pool can use in order to change coinbase_script_additional_data of an already opened channel and send downstream the new extranonce_prefix --- .../src/channel_logic/channel_factory.rs | 225 ++++++++++++++---- protocols/v2/roles-logic-sv2/src/errors.rs | 2 + 2 files changed, 184 insertions(+), 43 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index bccb389ff8..3e1bfbfb22 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -10,8 +10,8 @@ use crate::{ use mining_sv2::{ ExtendedExtranonce, NewExtendedMiningJob, NewMiningJob, OpenExtendedMiningChannelSuccess, OpenMiningChannelError, OpenStandardMiningChannelSuccess, SetCustomMiningJob, - SetCustomMiningJobSuccess, SetNewPrevHash, SubmitSharesError, SubmitSharesExtended, - SubmitSharesStandard, Target, + SetCustomMiningJobSuccess, SetExtranoncePrefix, SetNewPrevHash, SubmitSharesError, + SubmitSharesExtended, SubmitSharesStandard, Target, }; use nohash_hasher::BuildNoHashHasher; @@ -225,6 +225,17 @@ struct ChannelFactory { future_templates: HashMap, BuildNoHashHasher>, } +impl ChannelFactory { + // TODO channels in groups channel must be handled in a different way + // get the group_id construct group_id ++ channel_id and remove it + // this will be done in a future PR since no one is using them + fn close_channel(&mut self, channel_id: u32) { + self.standard_channels_for_hom_downstreams + .remove(&channel_id); + self.extended_channels.remove(&channel_id); + } +} + impl ChannelFactory { fn add_valid_job(&mut self, job: NewExtendedMiningJob<'static>, group_ids: Vec) { match self.last_valid_jobs { @@ -317,7 +328,7 @@ impl ChannelFactory { hash_rate: f32, min_extranonce_size: u16, additional_coinbase_script_data: Option<&[u8]>, - ) -> Result>, Error> { + ) -> Result<(Vec>, Option), Error> { let extended_channels_group = 0; let max_extranonce_size = self.extranonces.get_range2_len() as u16; if min_extranonce_size <= max_extranonce_size { @@ -378,11 +389,14 @@ impl ChannelFactory { for (job, _) in &self.future_jobs { result.push(Mining::NewExtendedMiningJob(job.clone())) } - Ok(result) + Ok((result, Some(channel_id))) } else { - Ok(vec![Mining::OpenMiningChannelError( - OpenMiningChannelError::unsupported_extranonce_size(request_id), - )]) + Ok(( + vec![Mining::OpenMiningChannelError( + OpenMiningChannelError::unsupported_extranonce_size(request_id), + )], + None, + )) } } /// Called when we want to replicate a channel already opened by another actor. @@ -831,6 +845,8 @@ impl ChannelFactory { channel.extranonce.as_ref(), *id, Some(job_id), + // We dont' care about what we have in the additional data since downstream do not + // have to handle extranonces in that case, whatever is ok. additional_coinbase_script_data, ) .unwrap(); @@ -1079,19 +1095,19 @@ pub struct PoolChannelFactory { inner: ChannelFactory, job_creator: JobsCreators, pool_coinbase_outputs: Vec, + // Per channel additional data that the pool may want to include in the coinbase input script + // as first part of the extranonce. This can be used to put things like the pool signature + // or commitments. It is per channel since the pool may want to include different + // commitment data based on downstream hash rate. + // channel_if -> (additional_coinbase_script_data, old_additional_coinbase_script_data) + #[allow(clippy::type_complexity)] + channel_to_additional_coinbase_script_data: + HashMap, Option>), BuildNoHashHasher>, // Additional data that the pool may want to include in the coinbase input script as first part // of the extranonce. This can be used to put things like the pool signature. - // I prepend with _ cause it means that I don't want to use this value directly. - _additional_coinbase_script_data: Vec, - // This is normally set to None. When the pool change the additional_coinbase_script_data we - // set it to Some(old value). We need it cause for a short time frame we will have job that are - // supposed to use the new one and jobs that are supposed to use the old value. As soon we - // have only job that use the new value, this is set to None. We do not support more then 2 - // additional_coinbase_script_data at time. - // I prepend with _ cause it means that I don't want to use this value directly. - _additional_coinbase_script_data_old: Option>, + additional_coinbase_script_data: Vec, // channel_id ++ job_id - job_ids_using_old_add_data: HashSet>, + job_ids_using_old_add_data: HashSet>, // extedned_channel_id -> SetCustomMiningJob negotiated_jobs: HashMap, BuildNoHashHasher>, } @@ -1104,9 +1120,9 @@ impl PoolChannelFactory { share_per_min: f32, kind: ExtendedChannelKind, pool_coinbase_outputs: Vec, - _additional_coinbase_script_data: Vec, + additional_coinbase_script_data: Vec, ) -> Result { - if _additional_coinbase_script_data.len() + extranonces.get_len() > 32 { + if additional_coinbase_script_data.len() + extranonces.get_len() > 32 { error!("Additional coinbase script data is too big"); return Err(Error::AdditionalCoinbaseScriptDataTooBig); } @@ -1137,8 +1153,10 @@ impl PoolChannelFactory { inner, job_creator, pool_coinbase_outputs, - _additional_coinbase_script_data, - _additional_coinbase_script_data_old: None, + channel_to_additional_coinbase_script_data: HashMap::with_hasher( + BuildNoHashHasher::default(), + ), + additional_coinbase_script_data, job_ids_using_old_add_data: HashSet::with_hasher(BuildNoHashHasher::default()), negotiated_jobs: HashMap::with_hasher(BuildNoHashHasher::default()), }) @@ -1151,12 +1169,14 @@ impl PoolChannelFactory { is_header_only: bool, id: u32, ) -> Result, Error> { + self.channel_to_additional_coinbase_script_data + .insert(id, (self.additional_coinbase_script_data.clone(), None)); self.inner.add_standard_channel( request_id, downstream_hash_rate, is_header_only, id, - Some(&self.get_last_additional_coinbase_script_data()), + Some(&self.additional_coinbase_script_data), ) } /// Calls [`ChannelFactory::new_extended_channel`] @@ -1166,12 +1186,25 @@ impl PoolChannelFactory { hash_rate: f32, min_extranonce_size: u16, ) -> Result>, Error> { - self.inner.new_extended_channel( + match self.inner.new_extended_channel( request_id, hash_rate, min_extranonce_size, - Some(&self.get_last_additional_coinbase_script_data()), - ) + Some(&self.additional_coinbase_script_data), + ) { + // Channel is opened + Ok((res, Some(channel_id))) => { + self.channel_to_additional_coinbase_script_data.insert( + channel_id, + (self.additional_coinbase_script_data.clone(), None), + ); + Ok(res) + } + // Channel is not opened and we can return an error downtream + Ok((res, None)) => Ok(res), + // Channel is not opened and we can not return an error downtream + Err(e) => Err(e), + } } /// Called when we want to replicate a channel already opened by another actor. /// is used only in the jd client from the template provider module to mock a pool. @@ -1183,6 +1216,9 @@ impl PoolChannelFactory { channel_id: u32, extranonce_size: u16, ) -> Option<()> { + // This initialise a PoolChannelFactory for a JDC that can not have + // additional_coinbase_script_data as it is set only by the pool. + assert!(self.additional_coinbase_script_data.is_empty()); self.inner.replicate_upstream_extended_channel_only_jd( target, extranonce, @@ -1216,11 +1252,13 @@ impl PoolChannelFactory { m, true, self.pool_coinbase_outputs.clone(), - self.get_last_additional_coinbase_script_data().len() as u8, + self.additional_coinbase_script_data.len() as u8, )?; self.inner.on_new_extended_mining_job( new_job, - Some(&self.get_last_additional_coinbase_script_data()), + // Here we can use the data that we used to initialize this channel factory. Since this + // value it will be used only to create standard jobs for HOM downstreams. + Some(&self.additional_coinbase_script_data), ) } /// Called when a `SubmitSharesStandard` message is received from the downstream. We check the @@ -1309,8 +1347,8 @@ impl PoolChannelFactory { .unwrap(); let prev_blockhash = crate::utils::u256_to_block_hash(referenced_job.prev_hash.clone()); let bits = referenced_job.nbits; - self.inner.check_target( - Share::Extended(m.into_static()), + match self.inner.check_target( + Share::Extended(m.clone().into_static()), target, None, 0, @@ -1320,7 +1358,42 @@ impl PoolChannelFactory { prev_blockhash, bits, Some(&additional_coinbase_script_data), - ) + ) { + // Since this is a share for a custom job and there is no way to know if the share + // do not met target cause pool sent a new extranonce prefix and the miner is still + // using the old one we check also against the old one since we don't want to fail + // in that case. + Ok(OnNewShare::SendErrorDownstream(m_)) => { + match self.get_old_additional_coinbase_script_data(m.channel_id) { + Some(additional_coinbase_script_data) => { + let target = self.job_creator.last_target(); + let referenced_job = self.negotiated_jobs.get(&m.channel_id).unwrap(); + let merkle_path = referenced_job.merkle_path.to_vec(); + let extended_job = job_creator::extended_job_from_custom_job( + referenced_job, + additional_coinbase_script_data.len() as u8, + self.inner.extranonces.get_len() as u8, + ) + .unwrap(); + self.inner.check_target( + Share::Extended(m.into_static()), + target, + None, + 0, + merkle_path, + extended_job.coinbase_tx_prefix.as_ref(), + extended_job.coinbase_tx_suffix.as_ref(), + prev_blockhash, + bits, + Some(&additional_coinbase_script_data), + ) + } + None => Ok(OnNewShare::SendErrorDownstream(m_)), + } + } + Ok(res) => Ok(res), + Err(err) => Err(err), + } } else { let referenced_job = self .inner @@ -1435,25 +1508,90 @@ impl PoolChannelFactory { // TODO ret can not be larger then 32 bytes maybe use the stack for it? #[inline(always)] fn get_additional_coinbase_script_data(&self, channel_id: u32, job_id: u32) -> Vec { - let id = ((channel_id as u64) << 32) | (job_id as u64); - match (self.job_ids_using_old_add_data.contains(&id), &self._additional_coinbase_script_data_old) { - (true, Some(additional_coinbase_script_data)) => additional_coinbase_script_data.clone(), - (false, _) => self._additional_coinbase_script_data.clone(), - _ => panic!("Internal error: when job_ids_using_old_add_data contains elements _additional_coinbase_script_data_old must be Some") + debug_assert!({ + let have_old = self.job_ids_using_old_add_data.contains(&job_id); + let not_have_old = self + .channel_to_additional_coinbase_script_data + .get(&channel_id) + .unwrap() + .1 + .is_some(); + if have_old { + !not_have_old + } else { + true + } + }); + match self + .channel_to_additional_coinbase_script_data + .get(&channel_id) + { + Some((add_data, None)) => add_data.clone(), + Some((add_data, Some(old_data))) => { + if self.job_ids_using_old_add_data.contains(&job_id) { + old_data.clone() + } else { + add_data.clone() + } + } + None => panic!("Internal error: channel not initialized can not get additional data"), } } + // TODO ret can not be larger then 32 bytes maybe use the stack for it? #[inline(always)] - fn get_last_additional_coinbase_script_data(&self) -> Vec { - self._additional_coinbase_script_data.clone() - } - - pub fn change_additional_coinbase_script_data(&mut self, new_data: Vec) { - todo!() + fn get_old_additional_coinbase_script_data(&self, channel_id: u32) -> Option> { + self.channel_to_additional_coinbase_script_data + .get(&channel_id)? + .1 + .clone() + } + + /// This set a new additional coinbase script data for a particular channel. Think to keep in + /// mind before using this function: + /// 1. Standard hom channels are not affected by the change + /// 2. The new additional data MUST have the exact same len as the additonal data used to + /// initialize the channle factory with PoolChannelFactory::new + /// 3. For job provided by the pool, all the non future sent before the new additional data will + /// have the old additional data. All the future jobs and the non future jobs sent after the + /// new additional data will have the new additional data + /// 4. Custom jobs will be checked against the new additional data, if the check fail we check + /// against the old additional data if also this check fail we return SubmitShareError + pub fn change_additional_coinbase_script_data( + &mut self, + new_data: Vec, + channel_id: u32, + ) -> Result { + if self.additional_coinbase_script_data.len() == new_data.len() { + let mut ids_for_old_data = HashSet::with_hasher(BuildNoHashHasher::default()); + for id in self.inner.id_to_job.keys() { + ids_for_old_data.insert(*id); + } + self.job_ids_using_old_add_data = ids_for_old_data; + match self + .channel_to_additional_coinbase_script_data + .get_mut(&channel_id) + { + Some(data) => { + data.1 = Some(data.0.clone()); + data.0 = new_data.clone(); + let res = SetExtranoncePrefix { + channel_id, + extranonce_prefix: new_data.try_into().expect(""), + }; + Ok(Mining::SetExtranoncePrefix(res)) + } + None => Err(Error::NotFoundChannelId), + } + } else { + Err(Error::NewAdditionalCoinbaseDataLenDoNotMatch) + } } - fn active_jobs(&self) -> Vec { - todo!() + pub fn close_channel(&mut self, channel_id: u32) { + self.channel_to_additional_coinbase_script_data + .retain(|k, _| k != &channel_id); + self.inner.close_channel(channel_id); } } @@ -1541,6 +1679,7 @@ impl ProxyExtendedChannelFactory { ) -> Result, Error> { self.inner .new_extended_channel(request_id, hash_rate, min_extranonce_size, None) + .map(|x| x.0) } /// Called only when a new prev hash is received by a Template Provider when job declaration is /// used. It matches the message with a `job_id`, creates a new custom job, and calls diff --git a/protocols/v2/roles-logic-sv2/src/errors.rs b/protocols/v2/roles-logic-sv2/src/errors.rs index da95e2d532..5ad78b42ed 100644 --- a/protocols/v2/roles-logic-sv2/src/errors.rs +++ b/protocols/v2/roles-logic-sv2/src/errors.rs @@ -62,6 +62,7 @@ pub enum Error { LogicErrorMessage(std::boxed::Box>), JDSMissingTransactions, AdditionalCoinbaseScriptDataTooBig, + NewAdditionalCoinbaseDataLenDoNotMatch, } impl From for Error { @@ -155,6 +156,7 @@ impl Display for Error { LogicErrorMessage(e) => write!(f, "Message is well formatted but can not be handled: {:?}", e), JDSMissingTransactions => write!(f, "JD server cannot propagate the block: missing transactions"), AdditionalCoinbaseScriptDataTooBig => write!(f, "Additional coinbase script data too big"), + NewAdditionalCoinbaseDataLenDoNotMatch => write!(f, "Channel factory can update the additional data only if the new data is the same size as the old one"), } } } From f7009fdc1138091848397d5844e703eb9f32dd27 Mon Sep 17 00:00:00 2001 From: fi3 Date: Mon, 18 Nov 2024 11:24:14 +0100 Subject: [PATCH 23/27] Upgrade roles-logic-sv2, Fix tests --- benches/Cargo.toml | 4 +- protocols/fuzz-tests/Cargo.toml | 2 +- protocols/v2/roles-logic-sv2/Cargo.toml | 6 +- .../src/channel_logic/channel_factory.rs | 165 +++++++++++++++++- .../v2/roles-logic-sv2/src/job_creator.rs | 6 +- .../v2/roles-logic-sv2/src/job_dispatcher.rs | 39 ++++- protocols/v2/subprotocols/mining/Cargo.toml | 4 +- protocols/v2/subprotocols/mining/src/lib.rs | 6 +- roles/Cargo.lock | 4 +- roles/jd-client/Cargo.toml | 2 +- roles/jd-server/Cargo.toml | 4 +- roles/mining-proxy/Cargo.toml | 2 +- roles/pool/Cargo.toml | 2 +- .../src/lib/mining_pool/message_handler.rs | 5 +- roles/test-utils/mining-device/Cargo.toml | 2 +- roles/translator/Cargo.toml | 2 +- .../pool-sri-test-1-standard.json | 2 +- utils/message-generator/Cargo.toml | 2 +- 18 files changed, 219 insertions(+), 40 deletions(-) diff --git a/benches/Cargo.toml b/benches/Cargo.toml index 525c526e5b..4041b4a8d8 100644 --- a/benches/Cargo.toml +++ b/benches/Cargo.toml @@ -10,8 +10,8 @@ async-channel = "1.4.0" v1 = { path="../protocols/v1", package="sv1_api", version = "^1.0.0" } serde_json = { version = "1.0.64", default-features = false, features = ["alloc"] } iai="0.1" -mining_sv2 = { path = "../protocols/v2/subprotocols/mining", version = "^1.0.0" } -roles_logic_sv2 = { path = "../protocols/v2/roles-logic-sv2", version = "^1.0.0" } +mining_sv2 = { path = "../protocols/v2/subprotocols/mining", version = "^2.0.0" } +roles_logic_sv2 = { path = "../protocols/v2/roles-logic-sv2", version = "^2.0.0" } framing_sv2 = { version = "2.0.0", path = "../protocols/v2/framing-sv2" } serde = { version = "1.0.89", default-features = false, features = ["derive", "alloc"] } num-bigint = "0.4.3" diff --git a/protocols/fuzz-tests/Cargo.toml b/protocols/fuzz-tests/Cargo.toml index 5f0ee7ef61..fabb3d2b28 100644 --- a/protocols/fuzz-tests/Cargo.toml +++ b/protocols/fuzz-tests/Cargo.toml @@ -19,7 +19,7 @@ arbitrary = { version = "1", features = ["derive"] } rand = "0.8.3" binary_codec_sv2 = { version = "1.0.0", path = "../v2/binary-sv2/no-serde-sv2/codec"} codec_sv2 = { version = "1.0.0", path = "../v2/codec-sv2", features = ["noise_sv2"]} -roles_logic_sv2 = { version = "1.0.0", path = "../v2/roles-logic-sv2"} +roles_logic_sv2 = { version = "2.0.0", path = "../v2/roles-logic-sv2"} affinity = "0.1.1" threadpool = "1.8.1" lazy_static = "1.4.0" diff --git a/protocols/v2/roles-logic-sv2/Cargo.toml b/protocols/v2/roles-logic-sv2/Cargo.toml index f1b73e7e20..691dee313e 100644 --- a/protocols/v2/roles-logic-sv2/Cargo.toml +++ b/protocols/v2/roles-logic-sv2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roles_logic_sv2" -version = "1.2.1" +version = "2.0.0" authors = ["The Stratum V2 Developers"] edition = "2018" readme = "README.md" @@ -18,7 +18,7 @@ stratum-common = { version="1.0.0", path = "../../../common", features=["bitcoin serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = false, optional = true} binary_sv2 = {version = "^1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2", default-features = true } common_messages_sv2 = { path = "../../../protocols/v2/subprotocols/common-messages", version = "^2.0.0" } -mining_sv2 = { path = "../../../protocols/v2/subprotocols/mining", version = "^1.0.0" } +mining_sv2 = { path = "../../../protocols/v2/subprotocols/mining", version = "^2.0.0" } template_distribution_sv2 = { path = "../../../protocols/v2/subprotocols/template-distribution", version = "^1.0.1" } job_declaration_sv2 = { path = "../../../protocols/v2/subprotocols/job-declaration", version = "^1.0.0" } const_sv2 = { version = "^2.0.0", path = "../../../protocols/v2/const-sv2"} @@ -46,4 +46,4 @@ prop_test = ["template_distribution_sv2/prop_test"] disable_nopanic = [] [package.metadata.docs.rs] -all-features = true \ No newline at end of file +all-features = true diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 3e1bfbfb22..7a627cb0cc 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -363,6 +363,7 @@ impl ChannelFactory { additional_coinbase_script_data.unwrap_or(&[]), ) .unwrap(); + dbg!(&extranonce_prefix); let success = OpenExtendedMiningChannelSuccess { request_id, channel_id, @@ -461,13 +462,22 @@ impl ChannelFactory { self.standard_channels_for_hom_downstreams .insert(channel_id, standard_channel); + let extranonce: Vec = match additional_coinbase_script_data { + Some(data) => { + let mut data = data.to_vec(); + data.extend_from_slice(extranonce.as_ref()); + data + } + None => extranonce.into(), + }; + // First message to be sent is OpenStandardMiningChannelSuccess result.push(Mining::OpenStandardMiningChannelSuccess( OpenStandardMiningChannelSuccess { request_id: request_id.into(), channel_id, target, - extranonce_prefix: extranonce.into(), + extranonce_prefix: extranonce.try_into().expect("Internal error: On initialization we make sure that extranonce + coinbase script additional data are not > then 32 bytes"), group_channel_id: hom_group_id, }, )); @@ -521,13 +531,13 @@ impl ChannelFactory { self.standard_channels_for_non_hom_downstreams .insert(complete_id, standard_channel); - let extranonce = match additional_coinbase_script_data { + let extranonce: Vec = match additional_coinbase_script_data { Some(data) => { let mut data = data.to_vec(); data.extend_from_slice(extranonce.as_ref()); - extranonce + data } - None => extranonce, + None => extranonce.into(), }; // First message to be sent is OpenStandardMiningChannelSuccess result.push(Mining::OpenStandardMiningChannelSuccess( @@ -535,7 +545,7 @@ impl ChannelFactory { request_id: request_id.into(), channel_id, target, - extranonce_prefix: extranonce.into(), + extranonce_prefix: extranonce.try_into().expect(""), group_channel_id: group_id, }, )); @@ -2092,7 +2102,7 @@ mod test { use super::*; use binary_sv2::{Seq0255, B064K, U256}; use bitcoin::{hash_types::WPubkeyHash, PublicKey, TxOut}; - use mining_sv2::OpenStandardMiningChannel; + use mining_sv2::{OpenExtendedMiningChannel, OpenStandardMiningChannel}; const BLOCK_REWARD: u64 = 2_000_000_000; @@ -2206,7 +2216,8 @@ mod test { channel_kind, vec![out], additional_coinbase_script_data.into_bytes(), - ); + ) + .unwrap(); // Build a NewTemplate let new_template = NewTemplate { @@ -2313,4 +2324,144 @@ mod test { OnNewShare::ShareMeetDownstreamTarget => panic!(), }; } + #[test] + fn test_extranonce_prefix_in_hom() { + let extranonce_prefix1 = [10, 11, 12]; + let (prefix, _, _) = get_coinbase(); + + // Initialize a Channel of type Pool + let out = TxOut {value: BLOCK_REWARD, script_pubkey: decode_hex("4104c6d0969c2d98a5c19ba7c36c7937c5edbd60ff2a01397c4afe54f16cd641667ea0049ba6f9e1796ba3c8e49e1b504c532ebbaaa1010c3f7d9b83a8ea7fd800e2ac").unwrap().into()}; + let creator = JobsCreators::new(7); + let share_per_min = 1.0; + let extranonces = ExtendedExtranonce::new(0..0, 0..0, 0..7); + + let ids = Arc::new(Mutex::new(GroupId::new())); + let channel_kind = ExtendedChannelKind::Pool; + let mut channel = PoolChannelFactory::new( + ids, + extranonces, + creator, + share_per_min, + channel_kind, + vec![out], + extranonce_prefix1.clone().into(), + ) + .unwrap(); + + // Build a NewTemplate + let new_template = NewTemplate { + template_id: 10, + future_template: true, + version: VERSION, + coinbase_tx_version: 1, + coinbase_prefix: prefix.try_into().unwrap(), + coinbase_tx_input_sequence: u32::MAX, + coinbase_tx_value_remaining: 5_000_000_000, + coinbase_tx_outputs_count: 0, + coinbase_tx_outputs: get_coinbase_outputs(), + coinbase_tx_locktime: 0, + merkle_path: get_merkle_path(), + }; + + // "Send" the NewTemplate to the channel + let _ = channel.on_new_template(&mut (new_template.clone())); + + // Build a PrevHash + let mut p_hash = decode_hex(PREV_HASH).unwrap(); + p_hash.reverse(); + let prev_hash = SetNewPrevHashFromTp { + template_id: 10, + prev_hash: p_hash.try_into().unwrap(), + header_timestamp: PREV_HEADER_TIMESTAMP, + n_bits: PREV_HEADER_NBITS, + target: nbit_to_target(PREV_HEADER_NBITS), + }; + + // "Send" the SetNewPrevHash to channel + let _ = channel.on_new_prev_hash_from_tp(&prev_hash); + + let result = channel + .add_standard_channel(100, 100_000_000_000_000.0, true, 2) + .unwrap(); + let extranonce_prefix = match &result[0] { + Mining::OpenStandardMiningChannelSuccess(msg) => msg.extranonce_prefix.clone().to_vec(), + _ => panic!(), + }; + assert!(&extranonce_prefix.to_vec()[0..3] == extranonce_prefix1); + } + #[test] + fn test_extranonce_prefix_in_extended() { + let extranonce_prefix1 = [10, 11, 12]; + let extranonce_prefix2 = [14, 11, 12]; + let (prefix, _, _) = get_coinbase(); + + // Initialize a Channel of type Pool + let out = TxOut {value: BLOCK_REWARD, script_pubkey: decode_hex("4104c6d0969c2d98a5c19ba7c36c7937c5edbd60ff2a01397c4afe54f16cd641667ea0049ba6f9e1796ba3c8e49e1b504c532ebbaaa1010c3f7d9b83a8ea7fd800e2ac").unwrap().into()}; + let creator = JobsCreators::new(16); + let share_per_min = 1.0; + let extranonces = ExtendedExtranonce::new(0..0, 0..8, 8..16); + + let ids = Arc::new(Mutex::new(GroupId::new())); + let channel_kind = ExtendedChannelKind::Pool; + let mut channel = PoolChannelFactory::new( + ids, + extranonces, + creator, + share_per_min, + channel_kind, + vec![out], + extranonce_prefix1.clone().into(), + ) + .unwrap(); + + // Build a NewTemplate + let new_template = NewTemplate { + template_id: 10, + future_template: true, + version: VERSION, + coinbase_tx_version: 1, + coinbase_prefix: prefix.try_into().unwrap(), + coinbase_tx_input_sequence: u32::MAX, + coinbase_tx_value_remaining: 5_000_000_000, + coinbase_tx_outputs_count: 0, + coinbase_tx_outputs: get_coinbase_outputs(), + coinbase_tx_locktime: 0, + merkle_path: get_merkle_path(), + }; + + // "Send" the NewTemplate to the channel + let _ = channel.on_new_template(&mut (new_template.clone())); + + // Build a PrevHash + let mut p_hash = decode_hex(PREV_HASH).unwrap(); + p_hash.reverse(); + let prev_hash = SetNewPrevHashFromTp { + template_id: 10, + prev_hash: p_hash.try_into().unwrap(), + header_timestamp: PREV_HEADER_TIMESTAMP, + n_bits: PREV_HEADER_NBITS, + target: nbit_to_target(PREV_HEADER_NBITS), + }; + + let _ = channel.on_new_prev_hash_from_tp(&prev_hash); + + let result = channel + .new_extended_channel(100, 100_000_000_000_000.0, 2) + .unwrap(); + let (extranonce_prefix, channel_id) = match &result[0] { + Mining::OpenExtendedMiningChannelSuccess(msg) => { + (msg.extranonce_prefix.clone().to_vec(), msg.channel_id) + } + _ => panic!(), + }; + assert!(&extranonce_prefix.to_vec()[0..3] == extranonce_prefix1); + match channel + .change_additional_coinbase_script_data(extranonce_prefix2.to_vec(), channel_id) + { + Ok(Mining::SetExtranoncePrefix(msg)) => { + assert!(&msg.extranonce_prefix.to_vec()[0..3] == extranonce_prefix2); + } + _ => panic!(), + } + } } diff --git a/protocols/v2/roles-logic-sv2/src/job_creator.rs b/protocols/v2/roles-logic-sv2/src/job_creator.rs index e52ee9b787..0e93e87d78 100644 --- a/protocols/v2/roles-logic-sv2/src/job_creator.rs +++ b/protocols/v2/roles-logic-sv2/src/job_creator.rs @@ -564,7 +564,7 @@ pub mod tests { let mut jobs_creators = JobsCreators::new(32); let job = jobs_creators - .on_new_template(template.borrow_mut(), false, vec![out]) + .on_new_template(template.borrow_mut(), false, vec![out], 0) .unwrap(); assert_eq!( @@ -588,7 +588,7 @@ pub mod tests { assert_eq!(jobs_creators.lasts_new_template.len(), 0); - let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out]); + let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out], 0); assert_eq!(jobs_creators.lasts_new_template.len(), 1); assert_eq!(jobs_creators.lasts_new_template[0], template); @@ -622,7 +622,7 @@ pub mod tests { let mut jobs_creators = JobsCreators::new(32); //Create a template - let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out]); + let _ = jobs_creators.on_new_template(template.borrow_mut(), false, vec![out], 0); let test_id = template.template_id; // Create a SetNewPrevHash with matching template_id diff --git a/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs b/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs index a0f4cb6172..780687efe0 100644 --- a/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs +++ b/protocols/v2/roles-logic-sv2/src/job_dispatcher.rs @@ -146,6 +146,7 @@ impl GroupChannelJobDispatcher { &mut self, extended: &NewExtendedMiningJob, channel: &StandardChannel, + additional_coinbase_script_data: &[u8], // should be changed to return a Result> ) -> Option> { if extended.is_future() { @@ -161,9 +162,17 @@ impl GroupChannelJobDispatcher { let standard_job_id = self.ids.safe_lock(|ids| ids.next()).unwrap(); let extranonce: Vec = channel.extranonce.clone().into(); + let mut prefix: Vec = + Vec::with_capacity(extranonce.len() + additional_coinbase_script_data.len()); + for b in additional_coinbase_script_data { + prefix.push(*b); + } + for b in extranonce { + prefix.push(b); + } let new_mining_job_message = extended_to_standard_job_for_group_channel( extended, - &extranonce, + &prefix, channel.channel_id, standard_job_id, )?; @@ -310,12 +319,13 @@ mod tests { #[test] fn test_group_channel_job_dispatcher() { + let extranonce_len = 16; let out = TxOut { value: BLOCK_REWARD, script_pubkey: Script::new_p2pk(&new_pub_key()), }; - let pool_signature = "Stratum v2 SRI Pool".to_string(); - let mut jobs_creators = JobsCreators::new(32); + let pool_signature = "Stratum v2 SRI".to_string(); + let mut jobs_creators = JobsCreators::new(extranonce_len); let group_channel_id = 1; //Create a template let mut template = template_from_gen(&mut Gen::new(255)); @@ -331,8 +341,9 @@ mod tests { // create standard channel let target = Target::from(U256::try_from(utils::extranonce_gen()).unwrap()); let standard_channel_id = 2; - let extranonce = Extranonce::try_from(utils::extranonce_gen()) - .expect("Failed to convert bytes to extranonce"); + let extranonce = + Extranonce::try_from(utils::extranonce_gen()[0..extranonce_len as usize].to_vec()) + .expect("Failed to convert bytes to extranonce"); let standard_channel = StandardChannel { channel_id: standard_channel_id, group_id: group_channel_id, @@ -341,7 +352,11 @@ mod tests { }; // call target function (on_new_extended_mining_job) let new_mining_job = group_channel_dispatcher - .on_new_extended_mining_job(&extended_mining_job, &standard_channel) + .on_new_extended_mining_job( + &extended_mining_job, + &standard_channel, + &pool_signature.clone().into_bytes(), + ) .unwrap(); // on_new_extended_mining_job assertions @@ -351,6 +366,7 @@ mod tests { &extended_mining_job, extranonce.clone(), standard_channel_id, + &pool_signature, ); // on_new_prev_hash assertions if extended_mining_job.is_future() { @@ -374,12 +390,21 @@ mod tests { extended_mining_job: &NewExtendedMiningJob, extranonce: Extranonce, standard_channel_id: u32, + pool_signature: &String, ) -> (u32, Vec) { + let extranonce: Vec = extranonce.clone().into(); + let mut prefix: Vec = Vec::new(); + for b in pool_signature.clone().into_bytes() { + prefix.push(b); + } + for b in extranonce { + prefix.push(b); + } // compute test merkle path let new_root = merkle_root_from_path( extended_mining_job.coinbase_tx_prefix.inner_as_ref(), extended_mining_job.coinbase_tx_suffix.inner_as_ref(), - extranonce.to_vec().as_slice(), + prefix.as_slice(), &extended_mining_job.merkle_path.inner_as_ref(), &[], ) diff --git a/protocols/v2/subprotocols/mining/Cargo.toml b/protocols/v2/subprotocols/mining/Cargo.toml index 020ab39ec9..f000187595 100644 --- a/protocols/v2/subprotocols/mining/Cargo.toml +++ b/protocols/v2/subprotocols/mining/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "mining_sv2" -version = "1.0.0" +version = "2.0.0" authors = ["The Stratum V2 Developers"] edition = "2018" readme = "README.md" @@ -28,4 +28,4 @@ no_std = [] with_serde = ["binary_sv2/with_serde", "serde"] [package.metadata.docs.rs] -all-features = true \ No newline at end of file +all-features = true diff --git a/protocols/v2/subprotocols/mining/src/lib.rs b/protocols/v2/subprotocols/mining/src/lib.rs index d71d5605ad..901ee671d1 100644 --- a/protocols/v2/subprotocols/mining/src/lib.rs +++ b/protocols/v2/subprotocols/mining/src/lib.rs @@ -328,12 +328,12 @@ impl Extranonce { None } else { let mut prefix = Vec::with_capacity(prefix_len + additional_coinbase_script_data.len()); - for i in 0..prefix_len { - prefix.push(self.extranonce[i]); - } for b in additional_coinbase_script_data { prefix.push(*b); } + for i in 0..prefix_len { + prefix.push(self.extranonce[i]); + } // unwrap is sage as prefix_len can not be greater than 32 cause is not possible to // contruct Extranonce with the inner vecto greater than 32. Some(prefix.try_into().unwrap()) diff --git a/roles/Cargo.lock b/roles/Cargo.lock index d3fb2df942..a8ade1d1af 100644 --- a/roles/Cargo.lock +++ b/roles/Cargo.lock @@ -1611,7 +1611,7 @@ dependencies = [ [[package]] name = "mining_sv2" -version = "1.0.0" +version = "2.0.0" dependencies = [ "binary_sv2", "const_sv2", @@ -2021,7 +2021,7 @@ dependencies = [ [[package]] name = "roles_logic_sv2" -version = "1.2.1" +version = "2.0.0" dependencies = [ "binary_sv2", "chacha20poly1305", diff --git a/roles/jd-client/Cargo.toml b/roles/jd-client/Cargo.toml index 1efd666b9b..5d138e9b4b 100644 --- a/roles/jd-client/Cargo.toml +++ b/roles/jd-client/Cargo.toml @@ -24,7 +24,7 @@ buffer_sv2 = { version = "^1.0.0", path = "../../utils/buffer" } codec_sv2 = { version = "^1.0.1", path = "../../protocols/v2/codec-sv2", features = ["noise_sv2", "with_buffer_pool"] } framing_sv2 = { version = "^2.0.0", path = "../../protocols/v2/framing-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features=["with_tokio", "with_buffer_pool"] } -roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "^2.0.0", path = "../../protocols/v2/roles-logic-sv2" } serde = { version = "1.0.89", default-features = false, features = ["derive", "alloc"] } futures = "0.3.25" tokio = { version = "1", features = ["full"] } diff --git a/roles/jd-server/Cargo.toml b/roles/jd-server/Cargo.toml index 9e5ad8986a..08d566753a 100644 --- a/roles/jd-server/Cargo.toml +++ b/roles/jd-server/Cargo.toml @@ -26,7 +26,7 @@ const_sv2 = { version = "^2.0.0", path = "../../protocols/v2/const-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features = ["with_tokio"] } noise_sv2 = { version = "1.1.0", path = "../../protocols/v2/noise-sv2" } rand = "0.8.4" -roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "^2.0.0", path = "../../protocols/v2/roles-logic-sv2" } tokio = { version = "1", features = ["full"] } ext-config = { version = "0.14.0", features = ["toml"], package = "config" } tracing = { version = "0.1" } @@ -38,4 +38,4 @@ serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = hashbrown = { version = "0.11", default-features = false, features = ["ahash", "serde"] } key-utils = { version = "^1.0.0", path = "../../utils/key-utils" } rpc_sv2 = { version = "1.0.0", path = "../roles-utils/rpc" } -hex = "0.4.3" \ No newline at end of file +hex = "0.4.3" diff --git a/roles/mining-proxy/Cargo.toml b/roles/mining-proxy/Cargo.toml index 175fca0392..431a1ec850 100644 --- a/roles/mining-proxy/Cargo.toml +++ b/roles/mining-proxy/Cargo.toml @@ -27,7 +27,7 @@ const_sv2 = { version = "^2.0.0", path = "../../protocols/v2/const-sv2" } futures = "0.3.19" network_helpers_sv2 = {version = "2.0.0", path = "../roles-utils/network-helpers", features = ["with_tokio","with_buffer_pool"] } once_cell = "1.12.0" -roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "^2.0.0", path = "../../protocols/v2/roles-logic-sv2" } serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = false } tokio = { version = "1", features = ["full"] } ext-config = { version = "0.14.0", features = ["toml"], package = "config" } diff --git a/roles/pool/Cargo.toml b/roles/pool/Cargo.toml index 95c21957bc..626ddbd25b 100644 --- a/roles/pool/Cargo.toml +++ b/roles/pool/Cargo.toml @@ -26,7 +26,7 @@ const_sv2 = { version = "^2.0.0", path = "../../protocols/v2/const-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features =["with_tokio","with_buffer_pool"] } noise_sv2 = { version = "1.1.0", path = "../../protocols/v2/noise-sv2" } rand = "0.8.4" -roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "^2.0.0", path = "../../protocols/v2/roles-logic-sv2" } serde = { version = "1.0.89", features = ["derive", "alloc"], default-features = false } tokio = { version = "1", features = ["full"] } ext-config = { version = "0.14.0", features = ["toml"], package = "config" } diff --git a/roles/pool/src/lib/mining_pool/message_handler.rs b/roles/pool/src/lib/mining_pool/message_handler.rs index 8d8b1ce882..3c07c369c9 100644 --- a/roles/pool/src/lib/mining_pool/message_handler.rs +++ b/roles/pool/src/lib/mining_pool/message_handler.rs @@ -144,7 +144,10 @@ impl ParseDownstreamMiningMessages<(), NullDownstreamMiningSelector, NoRouting> Ok(SendTo::Respond(Mining::SubmitSharesSuccess(success))) }, }, - Err(_) => todo!(), + Err(e) => { + dbg!(e); + panic!("Internal Error: unexpected message from channel factory"); + } } } diff --git a/roles/test-utils/mining-device/Cargo.toml b/roles/test-utils/mining-device/Cargo.toml index 5fa557db08..b0262677f3 100644 --- a/roles/test-utils/mining-device/Cargo.toml +++ b/roles/test-utils/mining-device/Cargo.toml @@ -22,7 +22,7 @@ path = "src/lib/mod.rs" [dependencies] stratum-common = { version = "1.0.0", path = "../../../common" } codec_sv2 = { version = "^1.0.1", path = "../../../protocols/v2/codec-sv2", features=["noise_sv2"] } -roles_logic_sv2 = { version = "1.0.0", path = "../../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "2.0.0", path = "../../../protocols/v2/roles-logic-sv2" } const_sv2 = { version = "2.0.0", path = "../../../protocols/v2/const-sv2" } async-channel = "1.5.1" binary_sv2 = { version = "1.0.0", path = "../../../protocols/v2/binary-sv2/binary-sv2" } diff --git a/roles/translator/Cargo.toml b/roles/translator/Cargo.toml index cf58e40930..834b92a429 100644 --- a/roles/translator/Cargo.toml +++ b/roles/translator/Cargo.toml @@ -30,7 +30,7 @@ codec_sv2 = { version = "^1.0.1", path = "../../protocols/v2/codec-sv2", feature framing_sv2 = { version = "^2.0.0", path = "../../protocols/v2/framing-sv2" } network_helpers_sv2 = { version = "2.0.0", path = "../roles-utils/network-helpers", features=["async_std", "with_buffer_pool"] } once_cell = "1.12.0" -roles_logic_sv2 = { version = "^1.0.0", path = "../../protocols/v2/roles-logic-sv2" } +roles_logic_sv2 = { version = "^2.0.0", path = "../../protocols/v2/roles-logic-sv2" } serde = { version = "1.0.89", default-features = false, features = ["derive", "alloc"] } serde_json = { version = "1.0.64", default-features = false, features = ["alloc"] } futures = "0.3.25" diff --git a/test/message-generator/test/pool-sri-test-1-standard/pool-sri-test-1-standard.json b/test/message-generator/test/pool-sri-test-1-standard/pool-sri-test-1-standard.json index 24ae109c50..85cfd0b429 100644 --- a/test/message-generator/test/pool-sri-test-1-standard/pool-sri-test-1-standard.json +++ b/test/message-generator/test/pool-sri-test-1-standard/pool-sri-test-1-standard.json @@ -48,7 +48,7 @@ "type": "SubmitSharesStandard", "channel_id": 1, "sequence_number": 0, - "job_id": 0, + "job_id": 1, "nonce": 4035255480, "ntime": 1698941362, "version": 536870912 diff --git a/utils/message-generator/Cargo.toml b/utils/message-generator/Cargo.toml index 55f17d365e..120d03e99f 100644 --- a/utils/message-generator/Cargo.toml +++ b/utils/message-generator/Cargo.toml @@ -20,7 +20,7 @@ codec_sv2 = { version = "1.0.0", path = "../../protocols/v2/codec-sv2", features const_sv2 = { version = "2.0.0", path = "../../protocols/v2/const-sv2" } load_file = "1.0.1" network_helpers_sv2 = { version = "2.0.0", path = "../../roles/roles-utils/network-helpers", features = ["with_tokio","with_serde"] } -roles_logic_sv2 = { version = "1.0.0", path = "../../protocols/v2/roles-logic-sv2", features = ["with_serde"] } +roles_logic_sv2 = { version = "2.0.0", path = "../../protocols/v2/roles-logic-sv2", features = ["with_serde"] } v1 = { version = "^1.0.0", path = "../../protocols/v1", package="sv1_api" } serde = { version = "*", features = ["derive", "alloc"], default-features = false } serde_json = { version = "1.0", default-features = false, features = ["alloc"] } From f69753eb5b91f45083b7d011487f20e0dbde3a7d Mon Sep 17 00:00:00 2001 From: fi3 Date: Mon, 18 Nov 2024 11:52:26 +0100 Subject: [PATCH 24/27] Fix JDC do not use pool signature in config but the implicit one in the extranonce --- .../roles-logic-sv2/src/channel_logic/channel_factory.rs | 1 - roles/jd-client/src/lib/mod.rs | 1 - roles/jd-client/src/lib/upstream_sv2/upstream.rs | 7 +------ .../config-examples/jds-config-local-example.toml | 2 +- 4 files changed, 2 insertions(+), 9 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 7a627cb0cc..064b2f64c9 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -363,7 +363,6 @@ impl ChannelFactory { additional_coinbase_script_data.unwrap_or(&[]), ) .unwrap(); - dbg!(&extranonce_prefix); let success = OpenExtendedMiningChannelSuccess { request_id, channel_id, diff --git a/roles/jd-client/src/lib/mod.rs b/roles/jd-client/src/lib/mod.rs index 467ac52b54..b5b1fc0fa8 100644 --- a/roles/jd-client/src/lib/mod.rs +++ b/roles/jd-client/src/lib/mod.rs @@ -247,7 +247,6 @@ impl JobDeclaratorClient { upstream_addr, upstream_config.authority_pubkey, 0, // TODO - upstream_config.pool_signature.clone(), status::Sender::Upstream(tx_status.clone()), task_collector.clone(), Arc::new(Mutex::new(PoolChangerTrigger::new(timeout))), diff --git a/roles/jd-client/src/lib/upstream_sv2/upstream.rs b/roles/jd-client/src/lib/upstream_sv2/upstream.rs index 4461c40a09..ddae5ea56d 100644 --- a/roles/jd-client/src/lib/upstream_sv2/upstream.rs +++ b/roles/jd-client/src/lib/upstream_sv2/upstream.rs @@ -114,8 +114,6 @@ pub struct Upstream { pub min_extranonce_size: u16, #[allow(dead_code)] pub upstream_extranonce1_size: usize, - /// String be included in coinbase tx input scriptsig - pub pool_signature: String, /// Receives messages from the SV2 Upstream role pub receiver: Receiver, /// Sends messages to the SV2 Upstream role @@ -151,7 +149,6 @@ impl Upstream { address: SocketAddr, authority_public_key: Secp256k1PublicKey, min_extranonce_size: u16, - pool_signature: String, tx_status: status::Sender, task_collector: Arc>>, pool_chaneger_trigger: Arc>, @@ -189,7 +186,6 @@ impl Upstream { min_extranonce_size, upstream_extranonce1_size: 16, /* 16 is the default since that is the only value the * pool supports currently */ - pool_signature, tx_status, receiver, sender, @@ -564,7 +560,6 @@ impl ParseUpstreamMiningMessages Result, RolesLogicError> { info!("Receive open extended mining channel success"); let ids = Arc::new(Mutex::new(roles_logic_sv2::utils::GroupId::new())); - let pool_signature = self.pool_signature.clone().into(); let prefix_len = m.extranonce_prefix.to_vec().len(); let self_len = 0; let total_len = prefix_len + m.extranonce_size as usize; @@ -586,7 +581,7 @@ impl ParseUpstreamMiningMessages Date: Mon, 18 Nov 2024 12:36:19 +0100 Subject: [PATCH 25/27] Fix translator config file to use shorter extranonce --- .../config-examples/tproxy-config-local-jdc-example.toml | 4 ++-- roles/translator/src/lib/mod.rs | 1 + roles/translator/src/lib/upstream_sv2/upstream.rs | 6 +++--- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/roles/translator/config-examples/tproxy-config-local-jdc-example.toml b/roles/translator/config-examples/tproxy-config-local-jdc-example.toml index 5fe4a8eebd..1e18457ac2 100644 --- a/roles/translator/config-examples/tproxy-config-local-jdc-example.toml +++ b/roles/translator/config-examples/tproxy-config-local-jdc-example.toml @@ -19,8 +19,8 @@ min_supported_version = 2 # Minimum extranonce2 size for downstream # Max value: 16 (leaves 0 bytes for search space splitting of downstreams) # Max value for CGminer: 8 -# Min value: 2 -min_extranonce2_size = 8 +# Min value: 5 +min_extranonce2_size = 5 # Difficulty params [downstream_difficulty_config] diff --git a/roles/translator/src/lib/mod.rs b/roles/translator/src/lib/mod.rs index 7b47a40e6b..e225a133af 100644 --- a/roles/translator/src/lib/mod.rs +++ b/roles/translator/src/lib/mod.rs @@ -206,6 +206,7 @@ impl TranslatorSv2 { upstream.clone(), proxy_config.min_supported_version, proxy_config.max_supported_version, + proxy_config.min_extranonce2_size, ) .await { diff --git a/roles/translator/src/lib/upstream_sv2/upstream.rs b/roles/translator/src/lib/upstream_sv2/upstream.rs index 9b4a2a20c1..bdefe6373d 100644 --- a/roles/translator/src/lib/upstream_sv2/upstream.rs +++ b/roles/translator/src/lib/upstream_sv2/upstream.rs @@ -185,6 +185,7 @@ impl Upstream { self_: Arc>, min_version: u16, max_version: u16, + min_extranonce_size: u16, ) -> ProxyResult<'static, ()> { // Get the `SetupConnection` message with Mining Device information (currently hard coded) let setup_connection = Self::get_setup_connection_message(min_version, max_version, false)?; @@ -241,9 +242,8 @@ impl Upstream { request_id: 0, // TODO user_identity, // TODO nominal_hash_rate, - max_target: u256_from_int(u64::MAX), // TODO - min_extranonce_size: 8, /* 8 is the max extranonce2 size the braiins - * pool supports */ + max_target: u256_from_int(u64::MAX), + min_extranonce_size, }); // reset channel hashrate so downstreams can manage from now on out From 3875ee4748b5e2a44e74b4fef66452594b178ee7 Mon Sep 17 00:00:00 2001 From: fi3 Date: Tue, 19 Nov 2024 10:30:37 +0100 Subject: [PATCH 26/27] Fix MG tests --- .../src/channel_logic/channel_factory.rs | 60 +++++++++++++++---- test/config/pool-config-sri-tp.toml | 2 +- .../pool-mock-tp-standard-coverage.toml | 2 +- test/config/pool-mock-tp.toml | 2 +- .../pool-sri-test-extended_0.json | 4 +- .../pool-sri-test-extended_1.json | 2 +- .../standard-coverage-test.json | 6 +- .../translation-proxy-old-share.json | 2 +- 8 files changed, 57 insertions(+), 23 deletions(-) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 064b2f64c9..6bfa5e4c85 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -357,6 +357,18 @@ impl ChannelFactory { .extranonces .next_extended(max_extranonce_size as usize) .unwrap(); + let extranonce_with_stripped_data = extranonce + .into_prefix(self.extranonces.get_prefix_len(), &[]) + .unwrap(); + let success_with_stirpped_extranonce_add_data = OpenExtendedMiningChannelSuccess { + request_id, + channel_id, + target: target.clone(), + extranonce_size: max_extranonce_size, + extranonce_prefix: extranonce_with_stripped_data, + }; + self.extended_channels + .insert(channel_id, success_with_stirpped_extranonce_add_data); let extranonce_prefix = extranonce .into_prefix( self.extranonces.get_prefix_len(), @@ -370,7 +382,6 @@ impl ChannelFactory { extranonce_size: max_extranonce_size, extranonce_prefix, }; - self.extended_channels.insert(channel_id, success.clone()); let mut result = vec![Mining::OpenExtendedMiningChannelSuccess(success)]; if let Some((job, _)) = &self.get_last_valid_job() { let mut job = job.clone(); @@ -909,9 +920,7 @@ impl ChannelFactory { } => upstream_target.clone(), }; - let (downstream_target, extranonce) = self - .get_channel_specific_mining_info(&m) - .ok_or(Error::ShareDoNotMatchAnyChannel)?; + let (downstream_target, extranonce) = self.get_channel_specific_mining_info(&m)?; let extranonce_1_len = self.extranonces.get_range0_len(); let extranonce_2 = extranonce[extranonce_1_len..].to_vec(); match &mut m { @@ -1037,10 +1046,16 @@ impl ChannelFactory { } } /// Returns the downstream target and extranonce for the channel - fn get_channel_specific_mining_info(&self, m: &Share) -> Option<(mining_sv2::Target, Vec)> { + fn get_channel_specific_mining_info( + &self, + m: &Share, + ) -> Result<(mining_sv2::Target, Vec), Error> { match m { Share::Extended(share) => { - let channel = self.extended_channels.get(&m.get_channel_id())?; + let channel = self + .extended_channels + .get(&m.get_channel_id()) + .ok_or(Error::ShareDoNotMatchAnyChannel)?; let extranonce_prefix = channel.extranonce_prefix.to_vec(); let dowstream_target = channel.target.clone().into(); let extranonce = [&extranonce_prefix[..], &share.extranonce.to_vec()[..]] @@ -1052,8 +1067,9 @@ impl ChannelFactory { self.extranonces.get_len(), extranonce.len() ); + return Err(Error::InvalidCoinbase); } - Some((dowstream_target, extranonce)) + Ok((dowstream_target, extranonce)) } Share::Standard((share, group_id)) => match &self.kind { ExtendedChannelKind::Pool => { @@ -1066,9 +1082,16 @@ impl ChannelFactory { .standard_channels_for_hom_downstreams .get(&share.channel_id); }; - Some(( - channel?.target.clone(), - channel?.extranonce.clone().to_vec(), + Ok(( + channel + .ok_or(Error::ShareDoNotMatchAnyChannel)? + .target + .clone(), + channel + .ok_or(Error::ShareDoNotMatchAnyChannel)? + .extranonce + .clone() + .to_vec(), )) } ExtendedChannelKind::Proxy { .. } | ExtendedChannelKind::ProxyJd { .. } => { @@ -1081,9 +1104,16 @@ impl ChannelFactory { .standard_channels_for_hom_downstreams .get(&share.channel_id); }; - Some(( - channel?.target.clone(), - channel?.extranonce.clone().to_vec(), + Ok(( + channel + .ok_or(Error::ShareDoNotMatchAnyChannel)? + .target + .clone(), + channel + .ok_or(Error::ShareDoNotMatchAnyChannel)? + .extranonce + .clone() + .to_vec(), )) } }, @@ -1228,6 +1258,10 @@ impl PoolChannelFactory { // This initialise a PoolChannelFactory for a JDC that can not have // additional_coinbase_script_data as it is set only by the pool. assert!(self.additional_coinbase_script_data.is_empty()); + self.channel_to_additional_coinbase_script_data.insert( + channel_id, + (self.additional_coinbase_script_data.clone(), None), + ); self.inner.replicate_upstream_extended_channel_only_jd( target, extranonce, diff --git a/test/config/pool-config-sri-tp.toml b/test/config/pool-config-sri-tp.toml index 3d7c18baf7..be433b26aa 100644 --- a/test/config/pool-config-sri-tp.toml +++ b/test/config/pool-config-sri-tp.toml @@ -11,4 +11,4 @@ coinbase_outputs = [ { output_script_type = "P2WPKH", output_script_value = "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075" }, ] # Pool signature (string to be included in coinbase tx) -pool_signature = "Stratum v2 SRI Pool" \ No newline at end of file +pool_signature = "Stratum v2 SRI" diff --git a/test/config/pool-mock-tp-standard-coverage.toml b/test/config/pool-mock-tp-standard-coverage.toml index 181981c3ff..98e4a5cc6b 100644 --- a/test/config/pool-mock-tp-standard-coverage.toml +++ b/test/config/pool-mock-tp-standard-coverage.toml @@ -12,4 +12,4 @@ coinbase_outputs = [ { output_script_type = "P2WPKH", output_script_value = "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075" }, ] # Pool signature (string to be included in coinbase tx) -pool_signature = "Stratum v2 SRI Pool" \ No newline at end of file +pool_signature = "Stratum v2 SRI" diff --git a/test/config/pool-mock-tp.toml b/test/config/pool-mock-tp.toml index 252d0637a6..98e4a5cc6b 100644 --- a/test/config/pool-mock-tp.toml +++ b/test/config/pool-mock-tp.toml @@ -12,4 +12,4 @@ coinbase_outputs = [ { output_script_type = "P2WPKH", output_script_value = "036adc3bdf21e6f9a0f0fb0066bf517e5b7909ed1563d6958a10993849a7554075" }, ] # Pool signature (string to be included in coinbase tx) -pool_signature = "Stratum v2 SRI Pool" +pool_signature = "Stratum v2 SRI" diff --git a/test/message-generator/test/pool-sri-test-extended_0/pool-sri-test-extended_0.json b/test/message-generator/test/pool-sri-test-extended_0/pool-sri-test-extended_0.json index b8a5290174..0cbe477ba4 100644 --- a/test/message-generator/test/pool-sri-test-extended_0/pool-sri-test-extended_0.json +++ b/test/message-generator/test/pool-sri-test-extended_0/pool-sri-test-extended_0.json @@ -14,7 +14,7 @@ "user_identity": "", "nominal_hash_rate": 10, "max_target": [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], - "min_extranonce_size": 16 + "min_extranonce_size": 8 }, "replace_fields": [["request_id", "ARBITRARY"]], "id": "open_extended_mining_channel" @@ -54,7 +54,7 @@ [ [ "extranonce_size", - {"U16": 16} + {"U16": 8} ] ] ] diff --git a/test/message-generator/test/pool-sri-test-extended_1/pool-sri-test-extended_1.json b/test/message-generator/test/pool-sri-test-extended_1/pool-sri-test-extended_1.json index 0567ae1e3f..ecd4fa743b 100644 --- a/test/message-generator/test/pool-sri-test-extended_1/pool-sri-test-extended_1.json +++ b/test/message-generator/test/pool-sri-test-extended_1/pool-sri-test-extended_1.json @@ -66,7 +66,7 @@ [ [ "coinbase_tx_prefix", - {"B064K": [2, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 56, 3, 76, 163, 38, 0, 83, 116, 114, 97, 116, 117, 109, 32, 118, 50, 32, 83, 82, 73, 32, 80, 111, 111, 108]} + {"B064K": [2, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 32, 3, 76, 163, 38, 0]} ], [ diff --git a/test/message-generator/test/standard-coverage-test/standard-coverage-test.json b/test/message-generator/test/standard-coverage-test/standard-coverage-test.json index a9a8daa93b..e11d2d7263 100644 --- a/test/message-generator/test/standard-coverage-test/standard-coverage-test.json +++ b/test/message-generator/test/standard-coverage-test/standard-coverage-test.json @@ -54,7 +54,7 @@ "type": "SubmitSharesStandard", "channel_id": 1, "sequence_number": 0, - "job_id": 0, + "job_id": 1, "nonce": 927894720, "ntime": 1671039088, "version": 536870912 @@ -66,8 +66,8 @@ "type": "SubmitSharesStandard", "channel_id": 1, "sequence_number": 0, - "job_id": 0, - "nonce": 1751, + "job_id": 1, + "nonce": 1752, "ntime": 1671116742, "version": 536870912 }, diff --git a/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json b/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json index 79833f2c04..52ada3a763 100644 --- a/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json +++ b/test/message-generator/test/translation-proxy-old-share/translation-proxy-old-share.json @@ -22,7 +22,7 @@ "message": { "id": 0, "method": "mining.submit", - "params": ["username", "0", "0000000000000000", "641577b0", "7a600640"] + "params": ["username", "0", "0000000000", "641577b0", "7a600640"] }, "id": "mining.submit" } From 66f61f1211a3398c1c5e16fd3b7c3c029b7cf960 Mon Sep 17 00:00:00 2001 From: fi3 Date: Mon, 16 Dec 2024 09:32:36 +0100 Subject: [PATCH 27/27] Fix ChannelFactory add_job --- .../v2/roles-logic-sv2/src/channel_logic/channel_factory.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs index 6bfa5e4c85..57d38c13e6 100644 --- a/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs +++ b/protocols/v2/roles-logic-sv2/src/channel_logic/channel_factory.rs @@ -259,6 +259,7 @@ impl ChannelFactory { self.id_to_job.retain(|_, v| *v != to_remove as u8); self.id_to_job.insert(job.job_id, to_remove as u8); self.last_valid_jobs[to_remove] = Some((job, group_ids)); + self.added_elements += 1; } _ => panic!("Internal error: invalid last_valid_jobs state"), }