From 89effd9dd636779058d31c71606fad9d34d6bc6f Mon Sep 17 00:00:00 2001 From: sinitcin Date: Fri, 1 Oct 2021 01:09:08 +0300 Subject: [PATCH 01/53] Removed the old header.rs, now TxOutputHeader turn into Enum --- pallets/utxo/src/header.rs | 317 ------------------------------------- pallets/utxo/src/lib.rs | 148 +++++++++-------- pallets/utxo/src/tests.rs | 34 ++++ 3 files changed, 118 insertions(+), 381 deletions(-) delete mode 100644 pallets/utxo/src/header.rs diff --git a/pallets/utxo/src/header.rs b/pallets/utxo/src/header.rs deleted file mode 100644 index 6a71482..0000000 --- a/pallets/utxo/src/header.rs +++ /dev/null @@ -1,317 +0,0 @@ -// Copyright (c) 2021 RBB S.r.l -// opensource@mintlayer.org -// SPDX-License-Identifier: MIT -// Licensed under the MIT License; -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://spdx.org/licenses/MIT -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// Author(s): C. Yap, Anton Sinitsyn - -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; -use sp_core::sp_std::convert::TryFrom; - -use codec::{Decode, Encode}; - -pub type TXOutputHeader = u128; -pub type TokenID = u64; - -// Check one bit in a number -#[inline(always)] -fn check_bit(number: u128, pos: u32) -> bool { - (number & (1u128.overflowing_shl(pos).0)) != 0 -} - -#[inline(always)] -fn set_bit(number: u128, pos: u32) -> u128 { - number | (1u128.overflowing_shl(pos).0) -} - -// Copy number to bits field -fn fit_in_bits(number: u128, pos: u32, length: u32) -> u128 { - let mut result = 0u128; - for i in pos..pos + length { - if check_bit(number, i) { - result = set_bit(result, i - pos); - } - } - result -} - -fn move_bits(from: u128, f_offset: u32, f_length: u32, to_offset: u32) -> u128 { - let mut result = 0u128; - for i in f_offset..f_offset + f_length { - if check_bit(from, i) { - result = set_bit(result, i - f_offset + to_offset); - } - } - result -} - -#[derive(Debug)] -struct BitsField { - length: u32, - offset: u32, - pub data: u128, -} - -// Size of bit fields, total 72 bits -const TOKEN_TYPE_SIZE: u32 = 3; -const TOKEN_ID_SIZE: u32 = 64; -const VERSION_SIZE: u32 = 5; - -#[derive(Debug)] -pub struct OutputHeaderData { - token_type: BitsField, - token_id: BitsField, - version: BitsField, - reserve: BitsField, -} - -impl OutputHeaderData { - pub fn new(header: u128) -> OutputHeaderData { - let mut offset = 0; - - // Signature method - let token_type = BitsField { - length: TOKEN_TYPE_SIZE, - offset, - data: fit_in_bits(header, offset, TOKEN_TYPE_SIZE), - }; - offset += TOKEN_TYPE_SIZE; - - // Token ID - let token_id = BitsField { - length: TOKEN_ID_SIZE, - offset, - data: fit_in_bits(header, offset, TOKEN_ID_SIZE), - }; - offset += TOKEN_ID_SIZE; - - // Version number - let version = BitsField { - length: VERSION_SIZE, - offset, - data: fit_in_bits(header, offset, VERSION_SIZE), - }; - offset += VERSION_SIZE; - - // You can add another field here. Just do not forget to add offset - OutputHeaderData { - token_type, - token_id, - version, - reserve: BitsField { - length: u128::BITS - offset, - offset, - data: fit_in_bits(header, offset, u128::BITS - offset), - }, - } - } - - pub fn as_u128(&self) -> u128 { - // Easy one because these bits have a concrete place - let mut result = 0u128; - let mut offset = 0; - result += move_bits(self.token_type.data, 0, TOKEN_TYPE_SIZE, offset); - offset += TOKEN_TYPE_SIZE; - result += move_bits(self.token_id.data, 0, TOKEN_ID_SIZE, offset); - offset += TOKEN_ID_SIZE; - result += move_bits(self.version.data, 0, VERSION_SIZE, offset); - - result - } - - pub fn token_type(&self) -> Option { - TryFrom::try_from(self.token_type.data).ok() - } - - pub fn set_token_type(&mut self, token_id: TokenType) { - self.token_type.data = token_id as u128; - } - - pub fn token_id(&self) -> TokenID { - self.token_id.data as u64 - } - - pub fn set_token_id(&mut self, token_id: TokenID) { - self.token_id.data = token_id as u128; - } - - pub fn version(&self) -> u128 { - self.version.data - } - - pub fn set_version(&mut self, version: u64) { - self.version.data = version as u128; - } - - pub fn validate(&self) -> bool { - self.token_type().is_some() - } -} - -pub trait OutputHeaderHelper { - fn as_tx_output_header(&self) -> OutputHeaderData; -} - -impl OutputHeaderHelper for TXOutputHeader { - fn as_tx_output_header(&self) -> OutputHeaderData { - OutputHeaderData::new(*self) - } -} - -// https://stackoverflow.com/posts/57578431/revisions from Shepmaster -// whenever a new type/variant is supported, we don't have to code a lot of 'matches' boilerplate. -macro_rules! u128_to_enum { - ($(#[$meta:meta])* $vis:vis enum $name:ident { - $($(#[$vmeta:meta])* $vname:ident $(= $val:expr)?,)* - }) => { - $(#[$meta])* - $vis enum $name { - $($(#[$vmeta])* $vname $(= $val)?,)* - } - - impl TryFrom for $name { - type Error = &'static str; - - fn try_from(v: u128) -> Result { - match v { - $(x if x == $name::$vname as u128 => Ok($name::$vname),)* - _ => { - Err(stringify!(unsupported $name)) - }, - } - } - } - } -} - -u128_to_enum! { -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, Hash, Debug)] -pub enum TokenType { - MLT = 0, - Normal = 1, - CT = 2, - NFT = 3, - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn validate() { - // improper sig meth - assert_eq!(OutputHeaderData::new(0b11111_111u128).validate(), false); - // improper token type - assert_eq!(OutputHeaderData::new(0b11000_100u128).validate(), false); - - // Proper header - assert!(OutputHeaderData::new( - 0b10_0000000000000000000000000000000000000000000000000000000000000000_010u128 - ) - .validate()); - assert!(OutputHeaderData::new( - 0b01_0000000000000000000000000000000000000000000000000000000000000001_000u128 - ) - .validate()); - assert!(OutputHeaderData::new(0u128).validate()); - } - - #[test] - fn token_types() { - let x = 0b11011_000u128; // last 3 bits are 000, so token_type should be 0 or MLT. - let header = OutputHeaderData::new(x); - assert!(header.token_type().is_some()); - assert_eq!(header.token_type().unwrap(), TokenType::MLT); - - let x = 0b0000100_001; // last 3 bits are 001, so token_type should be Normal - assert_eq!( - OutputHeaderData::new(x).token_type().unwrap(), - TokenType::Normal - ); - - let x = 0b111110_010; // last 3 bits are 010, so token_type should be CT - assert_eq!( - OutputHeaderData::new(x).token_type().unwrap(), - TokenType::CT - ); - - let x = 0b111110_011; // last 3 bits are 011, so token_type should be NFT - assert_eq!( - OutputHeaderData::new(x).token_type().unwrap(), - TokenType::NFT - ); - - let x = 0b10_111; // last 3 bits is are, and it's not yet supported. - assert_eq!(OutputHeaderData::new(x).token_type(), None); - - // last 3 bits are 001. Convert to 000 for MLT. - let mut header = OutputHeaderData::new(185u128); - header.set_token_type(TokenType::MLT); - assert_eq!(header.as_u128(), 184); - - // last 3 bits of header are 000. Convert to 010 for CT. - header.set_token_type(TokenType::CT); - assert_eq!(header.as_u128(), 186); - } - - #[allow(dead_code)] - fn print_bits(number: u128) { - let mut space = 0; - for i in 0..128 { - if check_bit(number, 127 - i) { - print!("1"); - } else { - print!("0"); - } - space += 1; - if space == 4 { - space = 0; - print!("_"); - } - } - println!(""); - } - - #[test] - fn token_ids() { - const TOKENID_TEST_0: u64 = 0; - const TOKENID_TEST_1: u64 = 1; - const TOKENID_TEST_2: u64 = 2; - - // the middle 64 bits are 000000, so type is TOKENID_TEST_0. - let header = OutputHeaderData::new( - 0b1010_0000000000000000000000000000000000000000000000000000000000000000_110, - ); - assert_eq!(header.token_id(), TOKENID_TEST_0); - - // the middle 64 bits are 000001, so type is TOKENID_TEST_1. - let header = OutputHeaderData::new( - 0b1010_0000000000000000000000000000000000000000000000000000000000000001_110, - ); - assert_eq!(header.token_id(), TOKENID_TEST_1); - - // the first 64 bits are 000010, so type is TOKENID_TEST_1. - assert_eq!( - OutputHeaderData::new(0b000001_101).token_id(), - TOKENID_TEST_1 - ); - assert_eq!(OutputHeaderData::new(3u128).token_id(), TOKENID_TEST_0); - - let mut improper_header = OutputHeaderData::new(u128::MAX); - improper_header.set_token_id(TOKENID_TEST_2); - assert_eq!(improper_header.token_id(), TOKENID_TEST_2); - } -} diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 0d492ea..3714b79 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -17,7 +17,6 @@ #![cfg_attr(not(feature = "std"), no_std)] -pub use header::*; pub use pallet::*; #[cfg(test)] @@ -29,14 +28,28 @@ mod tests; #[cfg(feature = "runtime-benchmarks")] mod benchmarking; -mod header; mod script; pub mod weights; +// Pure MLT without any tokens +const MLT_ID: u64 = 0; +pub const SR25519: sp_runtime::KeyTypeId = sp_runtime::KeyTypeId(*b"sr25"); + +pub type TokenId = u64; +pub type NftId = u64; +pub type Value = u128; +pub type String = Vec; + +pub struct Mlt(Value); +impl Mlt { + pub fn to_munit(&self) -> Value { + self.0 * 1_000 * 100_000_000 + } +} + #[frame_support::pallet] pub mod pallet { - use crate::TXOutputHeader; - use crate::{OutputHeaderData, OutputHeaderHelper, TokenID}; + use crate::{Mlt, NftId, String, TokenId, Value, MLT_ID, SR25519}; use chainscript::Script; use codec::{Decode, Encode}; use core::convert::TryInto; @@ -55,12 +68,10 @@ pub mod pallet { use pp_api::ProgrammablePoolApi; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; - use sp_core::sr25519::Public; use sp_core::{ sp_std::collections::btree_map::BTreeMap, sp_std::{str, vec}, sr25519::{Public as SR25Pub, Signature as SR25Sig}, - testing::SR25519, H256, H512, }; use sp_runtime::traits::{ @@ -68,16 +79,6 @@ pub mod pallet { }; use sp_runtime::DispatchErrorWithPostInfo; - pub type Value = u128; - pub type String = Vec; - - pub struct Mlt(Value); - impl Mlt { - pub fn to_munit(&self) -> Value { - self.0 * 1_000 * 100_000_000 - } - } - #[pallet::error] pub enum Error { /// Account balance must be greater than or equal to the transfer amount. @@ -230,12 +231,42 @@ pub mod pallet { } } + #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] + #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug, Hash)] + pub enum TxHeaderAndExtraData { + NormalTx { + token_id: TokenId, + }, + ConfidentialTx, /* not implemented yet */ + NFT { + id: NftId, + data: [u8; 32], + creator: SR25Pub, + }, + } + + impl TxHeaderAndExtraData { + pub fn token_id(&self) -> Option { + match self { + Self::NormalTx { token_id } => Some(*token_id), + _ => None, + } + } + pub fn nft_id(&self) -> Option { + match self { + Self::NFT { id, .. } => Some(*id), + _ => None, + } + } + } + /// Output of a transaction #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug, Hash)] + #[repr(C)] pub struct TransactionOutput { + pub(crate) header: TxHeaderAndExtraData, pub(crate) value: Value, - pub(crate) header: TXOutputHeader, pub(crate) destination: Destination, } @@ -246,8 +277,8 @@ pub mod pallet { /// functions are available in TXOutputHeaderImpls to update the header. pub fn new_pubkey(value: Value, pub_key: H256) -> Self { Self { + header: TxHeaderAndExtraData::NormalTx { token_id: MLT_ID }, value, - header: 0, destination: Destination::Pubkey(pub_key), } } @@ -255,8 +286,8 @@ pub mod pallet { /// Create a new output to create a smart contract. pub fn new_create_pp(value: Value, code: Vec, data: Vec) -> Self { Self { + header: TxHeaderAndExtraData::NormalTx { token_id: MLT_ID }, value, - header: 0, destination: Destination::CreatePP(code, data), } } @@ -264,19 +295,16 @@ pub mod pallet { /// Create a new output to call a smart contract routine. pub fn new_call_pp(value: Value, dest_account: AccountId, input: Vec) -> Self { Self { + header: TxHeaderAndExtraData::NormalTx { token_id: MLT_ID }, value, - header: 0, destination: Destination::CallPP(dest_account, input), } } - pub fn new_token(token_id: TokenID, value: Value, pub_key: H256) -> Self { - let mut header = OutputHeaderData::new(0); - header.set_token_id(token_id); - let header = header.as_u128(); + pub fn new_token(token_id: TokenId, value: Value, pub_key: H256) -> Self { Self { value, - header, + header: TxHeaderAndExtraData::NormalTx { token_id }, destination: Destination::Pubkey(pub_key), } } @@ -284,8 +312,8 @@ pub mod pallet { /// Create a new output to given script hash. pub fn new_script_hash(value: Value, hash: H256) -> Self { Self { + header: TxHeaderAndExtraData::NormalTx { token_id: MLT_ID }, value, - header: 0, destination: Destination::ScriptHash(hash), } } @@ -293,24 +321,13 @@ pub mod pallet { /// Create a new output to given pubkey hash pub fn new_pubkey_hash(value: Value, script: Script) -> Self { Self { + header: TxHeaderAndExtraData::NormalTx { token_id: MLT_ID }, value, - header: 0, destination: Destination::PubkeyHash(script.into_bytes()), } } } - impl TransactionOutput { - fn validate_header(&self) -> Result<(), &'static str> { - // Check signature and token id - self.header - .as_tx_output_header() - .validate() - .then(|| ()) - .ok_or("Incorrect header") - } - } - #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash, Default)] pub struct Transaction { @@ -352,7 +369,7 @@ pub mod pallet { #[pallet::storage] #[pallet::getter(fn tokens_higher_id)] - pub(super) type TokensHigherID = StorageValue<_, TokenID, ValueQuery>; + pub(super) type TokensHigherID = StorageValue<_, TokenId, ValueQuery>; #[pallet::storage] #[pallet::getter(fn reward_total)] @@ -478,31 +495,41 @@ pub mod pallet { ); } - let full_inputs: Vec<(crate::TokenID, TransactionOutputFor)> = tx + let full_inputs: Vec<(TokenId, TransactionOutputFor)> = tx .inputs .iter() .filter_map(|input| >::get(&input.outpoint)) - .map(|output| (OutputHeaderData::new(output.header).token_id(), output)) + .filter_map(|output| { + if let TxHeaderAndExtraData::NormalTx { token_id } = &output.header { + Some((*token_id, output)) + } else { + None + } + }) .collect(); - let input_vec: Vec<(crate::TokenID, Value)> = + let input_vec: Vec<(TokenId, Value)> = full_inputs.iter().map(|output| (output.0, output.1.value)).collect(); - let out_vec: Vec<(crate::TokenID, Value)> = tx + let out_vec: Vec<(TokenId, Value)> = tx .outputs .iter() - .map(|output| { - ( - OutputHeaderData::new(output.header).token_id(), - output.value, - ) + .filter_map(|output| { + if let TxHeaderAndExtraData::NormalTx { token_id } = &output.header { + Some((*token_id, output.value)) + } else { + None + } }) .collect(); // Check for token creation let tokens_list = >::get(); for output in tx.outputs.iter() { - let tid = OutputHeaderData::new(output.header).token_id(); + let tid = match output.header { + TxHeaderAndExtraData::NormalTx { token_id } => token_id, + _ => continue, + }; // If we have input and output for the same token it's not a problem if full_inputs.iter().find(|&x| (x.0 == tid) && (x.1 != *output)).is_some() { continue; @@ -579,13 +606,6 @@ pub mod pallet { // Check that outputs are valid for (output_index, output) in tx.enumerate_outputs()? { - // Check the header is valid - let res = output.validate_header(); - if let Err(e) = res { - log::error!("Header error: {}", e); - } - ensure!(res.is_ok(), "header error. Please check the logs."); - match output.destination { Destination::Pubkey(_) | Destination::ScriptHash(_) @@ -607,8 +627,8 @@ pub mod pallet { // if no race condition, check the math if missing_utxos.is_empty() { // We have to check sum of input tokens is less or equal to output tokens. - let mut inputs_sum: BTreeMap = BTreeMap::new(); - let mut outputs_sum: BTreeMap = BTreeMap::new(); + let mut inputs_sum: BTreeMap = BTreeMap::new(); + let mut outputs_sum: BTreeMap = BTreeMap::new(); for x in input_vec { let value = @@ -642,14 +662,14 @@ pub mod pallet { } // Reward at the moment only in MLT - reward = if inputs_sum.contains_key(&(crate::TokenType::MLT as TokenID)) - && outputs_sum.contains_key(&(crate::TokenType::MLT as TokenID)) + reward = if inputs_sum.contains_key(&(MLT_ID as TokenId)) + && outputs_sum.contains_key(&(MLT_ID as TokenId)) { - inputs_sum[&(crate::TokenType::MLT as TokenID)] - .checked_sub(outputs_sum[&(crate::TokenType::MLT as TokenID)]) + inputs_sum[&(MLT_ID as TokenId)] + .checked_sub(outputs_sum[&(MLT_ID as TokenId)]) .ok_or("reward underflow")? } else { - *inputs_sum.get(&(crate::TokenType::MLT as TokenID)).ok_or("fee doesn't exist")? + *inputs_sum.get(&(MLT_ID as TokenId)).ok_or("fee doesn't exist")? } } @@ -864,7 +884,7 @@ pub mod pallet { } }; - let pubkey: Public = Public::from_raw(pubkey_raw); + let pubkey: SR25Pub = SR25Pub::from_raw(pubkey_raw); let wit_prog = match bech32::wit_prog::WitnessProgram::from_address( address[..2].to_vec(), address, diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index bf86962..b56a8a1 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -516,3 +516,37 @@ fn test_send_to_address() { )); }) } + +#[test] +fn nft_test() { + execute_with_alice(|alice_pub_key| { + let token_id = 100; + let empty = b"0000000000000000000000000000000000000000000000000000000000000000"; + let data = b"0101010101010101010101010101010101010101010101010101010101010101"; + // let data_token = ERC1948.new(); + // dataToken.mint(accounts[0], firstTokenId); + + // const ERC1948 = artifacts.require('./ERC1948.sol'); + // + // contract('ERC1948', (accounts) => { + // const firstTokenId = 100; + // const empty = '0x0000000000000000000000000000000000000000000000000000000000000000'; + // const data = '0x0101010101010101010101010101010101010101010101010101010101010101'; + // let dataToken; + // + // beforeEach(async () => { + // dataToken = await ERC1948.new(); + // await dataToken.mint(accounts[0], firstTokenId); + // }); + // + // it('should allow to write and read', async () => { + // let rsp = await dataToken.readData(firstTokenId); + // assert.equal(rsp, empty); + // await dataToken.writeData(firstTokenId, data); + // rsp = await dataToken.readData(firstTokenId); + // assert.equal(rsp, data); + // }); + // + // }); + }); +} From f42ac625803908e5136babd5e899ba9923bf9337 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 4 Oct 2021 08:49:14 +0300 Subject: [PATCH 02/53] Added possibility to create a NFT, and removed old TxHeader Signed-off-by: sinitcin --- Cargo.lock | 1 + pallets/utxo/src/lib.rs | 239 ++++++++++++++++++++++++--------- pallets/utxo/src/tests.rs | 77 ++++++----- pallets/utxo/tokens/Cargo.toml | 6 + pallets/utxo/tokens/src/lib.rs | 55 ++++++-- 5 files changed, 271 insertions(+), 107 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 50601f6..5514764 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4190,6 +4190,7 @@ dependencies = [ "log", "parity-scale-codec", "serde", + "sp-core", ] [[package]] diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 3714b79..d300db9 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -31,12 +31,9 @@ mod benchmarking; mod script; pub mod weights; -// Pure MLT without any tokens -const MLT_ID: u64 = 0; pub const SR25519: sp_runtime::KeyTypeId = sp_runtime::KeyTypeId(*b"sr25"); -pub type TokenId = u64; -pub type NftId = u64; +pub type TokenId = H256; pub type Value = u128; pub type String = Vec; @@ -49,7 +46,7 @@ impl Mlt { #[frame_support::pallet] pub mod pallet { - use crate::{Mlt, NftId, String, TokenId, Value, MLT_ID, SR25519}; + use crate::{Mlt, String, TokenId, Value, SR25519}; use chainscript::Script; use codec::{Decode, Encode}; use core::convert::TryInto; @@ -64,7 +61,7 @@ pub mod pallet { }; use frame_system::pallet_prelude::*; use hex_literal::hex; - use pallet_utxo_tokens::TokenListData; + use pallet_utxo_tokens::{TokenInstance, TokenListData}; use pp_api::ProgrammablePoolApi; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -105,6 +102,8 @@ pub mod pallet { Unapproved, /// The source account would not survive the transfer and it needs to stay alive. WouldDie, + // Thrown when there is an attempt to mint a duplicate collection. + NftCollectionExists, } #[pallet::pallet] @@ -235,27 +234,38 @@ pub mod pallet { #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug, Hash)] pub enum TxHeaderAndExtraData { NormalTx { - token_id: TokenId, + // Normal token ID + id: TokenId, }, ConfidentialTx, /* not implemented yet */ - NFT { - id: NftId, - data: [u8; 32], - creator: SR25Pub, + Nft { + id: TokenId, + data_hash: [u8; 32], + data_url: String, + creator_pkh: H256, }, } impl TxHeaderAndExtraData { - pub fn token_id(&self) -> Option { + pub fn id(&self) -> Option { match self { - Self::NormalTx { token_id } => Some(*token_id), + Self::NormalTx { id } => Some(*id), + Self::Nft { id, .. } => Some(*id), _ => None, } } - pub fn nft_id(&self) -> Option { + + pub fn is_normal(&self) -> bool { match self { - Self::NFT { id, .. } => Some(*id), - _ => None, + Self::NormalTx { .. } => true, + _ => false, + } + } + + pub fn is_nft(&self) -> bool { + match self { + Self::Nft { .. } => true, + _ => true, } } } @@ -277,7 +287,7 @@ pub mod pallet { /// functions are available in TXOutputHeaderImpls to update the header. pub fn new_pubkey(value: Value, pub_key: H256) -> Self { Self { - header: TxHeaderAndExtraData::NormalTx { token_id: MLT_ID }, + header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, value, destination: Destination::Pubkey(pub_key), } @@ -286,7 +296,7 @@ pub mod pallet { /// Create a new output to create a smart contract. pub fn new_create_pp(value: Value, code: Vec, data: Vec) -> Self { Self { - header: TxHeaderAndExtraData::NormalTx { token_id: MLT_ID }, + header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, value, destination: Destination::CreatePP(code, data), } @@ -295,24 +305,37 @@ pub mod pallet { /// Create a new output to call a smart contract routine. pub fn new_call_pp(value: Value, dest_account: AccountId, input: Vec) -> Self { Self { - header: TxHeaderAndExtraData::NormalTx { token_id: MLT_ID }, + header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, value, destination: Destination::CallPP(dest_account, input), } } - pub fn new_token(token_id: TokenId, value: Value, pub_key: H256) -> Self { + pub fn new_token(id: TokenId, value: Value, pub_key: H256) -> Self { Self { value, - header: TxHeaderAndExtraData::NormalTx { token_id }, + header: TxHeaderAndExtraData::NormalTx { id }, destination: Destination::Pubkey(pub_key), } } + pub fn new_nft(id: TokenId, data_hash: [u8; 32], data_url: String, creator: H256) -> Self { + Self { + value: 0, + header: TxHeaderAndExtraData::Nft { + id, + data_hash, + creator_pkh: creator.clone(), + data_url, + }, + destination: Destination::Pubkey(creator), + } + } + /// Create a new output to given script hash. pub fn new_script_hash(value: Value, hash: H256) -> Self { Self { - header: TxHeaderAndExtraData::NormalTx { token_id: MLT_ID }, + header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, value, destination: Destination::ScriptHash(hash), } @@ -321,7 +344,7 @@ pub mod pallet { /// Create a new output to given pubkey hash pub fn new_pubkey_hash(value: Value, script: Script) -> Self { Self { - header: TxHeaderAndExtraData::NormalTx { token_id: MLT_ID }, + header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, value, destination: Destination::PubkeyHash(script.into_bytes()), } @@ -368,8 +391,9 @@ pub mod pallet { pub(super) type TokenList = StorageValue<_, TokenListData, ValueQuery>; #[pallet::storage] - #[pallet::getter(fn tokens_higher_id)] - pub(super) type TokensHigherID = StorageValue<_, TokenId, ValueQuery>; + #[pallet::getter(fn owner_nft)] + pub(super) type OwnerNft = + StorageMap<_, Identity, TokenId, /* PKH */ Option, ValueQuery>; #[pallet::storage] #[pallet::getter(fn reward_total)] @@ -384,7 +408,8 @@ pub mod pallet { #[pallet::generate_deposit(pub(super) fn deposit_event)] #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { - TokenCreated(u64, T::AccountId), + TokenCreated(H256, T::AccountId), + Minted(H256, T::AccountId, String), TransactionSuccess(TransactionFor), } @@ -426,7 +451,6 @@ pub mod pallet { for authority in auths { // TODO: where do we get the header info? - // TODO: are the rewards always of MLT token type? let utxo = TransactionOutput::new_pubkey(share_value, *authority); let hash = { @@ -500,8 +524,8 @@ pub mod pallet { .iter() .filter_map(|input| >::get(&input.outpoint)) .filter_map(|output| { - if let TxHeaderAndExtraData::NormalTx { token_id } = &output.header { - Some((*token_id, output)) + if let TxHeaderAndExtraData::NormalTx { id } = &output.header { + Some((*id, output)) } else { None } @@ -515,8 +539,8 @@ pub mod pallet { .outputs .iter() .filter_map(|output| { - if let TxHeaderAndExtraData::NormalTx { token_id } = &output.header { - Some((*token_id, output.value)) + if let TxHeaderAndExtraData::NormalTx { id } = &output.header { + Some((*id, output.value)) } else { None } @@ -527,7 +551,7 @@ pub mod pallet { let tokens_list = >::get(); for output in tx.outputs.iter() { let tid = match output.header { - TxHeaderAndExtraData::NormalTx { token_id } => token_id, + TxHeaderAndExtraData::NormalTx { id } => id, _ => continue, }; // If we have input and output for the same token it's not a problem @@ -536,7 +560,13 @@ pub mod pallet { } else { // But when we don't have an input for token but token id exist in TokenList ensure!( - tokens_list.iter().find(|&x| x.id == tid).is_none(), + tokens_list + .iter() + .find(|&x| match x { + crate::TokenInstance::Normal { id, .. } + | crate::TokenInstance::Nft { id, .. } => id, + } == &tid) + .is_none(), "no inputs for the token id" ); } @@ -610,7 +640,9 @@ pub mod pallet { Destination::Pubkey(_) | Destination::ScriptHash(_) | Destination::PubkeyHash(_) => { - ensure!(output.value > 0, "output value must be nonzero"); + if output.header.is_normal() { + ensure!(output.value > 0, "output value must be nonzero"); + } let hash = tx.outpoint(output_index); ensure!(!>::contains_key(hash), "output already exists"); new_utxos.push(hash.as_fixed_bytes().to_vec()); @@ -662,14 +694,14 @@ pub mod pallet { } // Reward at the moment only in MLT - reward = if inputs_sum.contains_key(&(MLT_ID as TokenId)) - && outputs_sum.contains_key(&(MLT_ID as TokenId)) + reward = if inputs_sum.contains_key(&(H256::zero() as TokenId)) + && outputs_sum.contains_key(&(H256::zero() as TokenId)) { - inputs_sum[&(MLT_ID as TokenId)] - .checked_sub(outputs_sum[&(MLT_ID as TokenId)]) + inputs_sum[&(H256::default() as TokenId)] + .checked_sub(outputs_sum[&(H256::zero() as TokenId)]) .ok_or("reward underflow")? } else { - *inputs_sum.get(&(MLT_ID as TokenId)).ok_or("fee doesn't exist")? + *inputs_sum.get(&(H256::zero() as TokenId)).ok_or("fee doesn't exist")? } } @@ -739,21 +771,23 @@ pub mod pallet { token_name: String, token_ticker: String, supply: Value, - ) -> Result> { + ) -> Result> { ensure!(token_name.len() <= 25, Error::::Unapproved); ensure!(token_ticker.len() <= 5, Error::::Unapproved); ensure!(!supply.is_zero(), Error::::MinBalanceZero); - // Take a free TokenID - let token_id = - >::get().checked_add(1).ok_or("All tokens IDs has taken")?; - // Input with MLT FEE let fee = UtxoStore::::get(input_for_fee.outpoint).ok_or(Error::::Unapproved)?.value; ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); // Save in UTXO - let instance = crate::TokenInstance::new(token_id, token_name, token_ticker, supply); + let instance = crate::TokenInstance::new_normal( + BlakeTwo256::hash_of(&(&token_name, &token_ticker)), + token_name, + token_ticker, + supply, + ); + let mut tx = Transaction { inputs: crate::vec![ // Fee an input equal 100 MLT @@ -761,7 +795,7 @@ pub mod pallet { ], outputs: crate::vec![ // Output a new tokens - TransactionOutput::new_token(token_id, supply, public), + TransactionOutput::new_token(*instance.id(), supply, public), ], }; @@ -773,21 +807,88 @@ pub mod pallet { )); } + let sig = crypto::sr25519_sign( + SR25519, + &sp_core::sr25519::Public::from_h256(public), + &tx.encode(), + ) + .unwrap(); + for i in 0..tx.inputs.len() { + tx.inputs[i].witness = sig.0.to_vec(); + } + // Success + spend::(caller, &tx)?; + // Save in Store >::mutate(|x| { - if x.iter().find(|&x| x.id == token_id).is_none() { + if x.iter().find(|&x| x.id() == instance.id()).is_none() { x.push(instance.clone()) } else { panic!("the token has already existed with the same id") } }); + Ok(*instance.id()) + } + + fn mint( + caller: &T::AccountId, + creator_pubkey: sp_core::sr25519::Public, + data_url: String, + data_hash: [u8; 32], + ) -> Result> { + let (fee, inputs_hashes) = pick_utxo::(caller, Mlt(100).to_munit()); + ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); + + let instance = TokenInstance::new_nft( + BlakeTwo256::hash_of(&data_hash), + data_hash, + data_url.clone(), + creator_pubkey, + ); + + let inputs_for_fee = inputs_hashes + .iter() + .filter_map(|x| >::get(&x)) + .map(|output| TransactionInput::new_empty(BlakeTwo256::hash_of(&(&output, 0 as u64)))) + .collect(); + ensure!( + !OwnerNft::::contains_key(instance.id()), + Error::::NftCollectionExists + ); + + let mut tx = Transaction { + inputs: inputs_for_fee, + outputs: crate::vec![ + // Output a new tokens + TransactionOutput::new_nft( + *instance.id(), + data_hash, + data_url, + H256::from(creator_pubkey) + ), + ], + }; + + let sig = crypto::sr25519_sign(SR25519, &creator_pubkey, &tx.encode()).unwrap(); + for i in 0..tx.inputs.len() { + tx.inputs[i].witness = sig.0.to_vec(); + } // Success spend::(caller, &tx)?; - Ok(token_id) + + // Save in Store + >::mutate(|x| { + if x.iter().find(|&x| x.id() == instance.id()).is_none() { + x.push(instance.clone()) + } else { + panic!("the token has already existed with the same id") + } + }); + Ok(*instance.id()) } - /// Pick the UTXOs of `caller` from UtxoStore that satify request `value` + /// Pick the UTXOs of `caller` from UtxoStore that satisfy request `value` /// /// Return a list of UTXOs that satisfy the request /// Return empty vector if caller doesn't have enough UTXO @@ -800,21 +901,21 @@ pub mod pallet { let mut total = 0; for (hash, utxo) in UtxoStore::::iter() { - let utxo = utxo.unwrap(); - - match utxo.destination { - Destination::Pubkey(pubkey) => { - if caller.encode() == pubkey.encode() { - utxos.push(hash); - total += utxo.value; - - if utxo.value >= value { - break; + if let Some(utxo) = utxo { + match utxo.destination { + Destination::Pubkey(pubkey) => { + if caller.encode() == pubkey.encode() { + utxos.push(hash); + total += utxo.value; + + if utxo.value >= value { + break; + } + value -= utxo.value; } - value -= utxo.value; } + _ => {} } - _ => {} } } @@ -855,6 +956,22 @@ pub mod pallet { Ok(().into()) } + /// Create a new NFT from the provided NFT info and identify the specified + /// account as its owner. The ID of the new NFT will be equal to the hash of the info + /// that defines it, as calculated by the runtime system's hashing algorithm. + #[pallet::weight(10_000)] + pub fn mint( + origin: OriginFor, + creator_pubkey: sp_core::sr25519::Public, + data_url: String, + data_hash: [u8; 32], + ) -> DispatchResultWithPostInfo { + let caller = &ensure_signed(origin)?; + let nft_id = mint::(caller, creator_pubkey, data_url.clone(), data_hash)?; + Self::deposit_event(Event::::Minted(nft_id, caller.clone(), data_url)); + Ok(().into()) + } + #[pallet::weight(T::WeightInfo::send_to_address(16_u32.saturating_add(address.len() as u32)))] pub fn send_to_address( origin: OriginFor, diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index b56a8a1..bd636d0 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -350,20 +350,20 @@ fn test_script() { #[test] fn test_tokens() { - use crate::TokensHigherID; - let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { // Let's create a new test token - let token_id = >::get() - .checked_add(1) - .ok_or("All tokens IDs has taken") - .unwrap(); + let token_id = BlakeTwo256::hash_of(&b"TEST"); + let supply = 1000; // Let's make a tx for a new token: // * We need at least one input for the fee and one output for a new token. // * TokenID for a new token has to be unique. - let instance = - TokenInstance::new(token_id, b"New token test".to_vec(), b"NTT".to_vec(), 1000); + let instance = TokenInstance::new_normal( + token_id, + b"New token test".to_vec(), + b"NTT".to_vec(), + supply, + ); let mut first_tx = Transaction { inputs: vec![ // 100 MLT @@ -371,7 +371,7 @@ fn test_tokens() { ], outputs: vec![ // 100 a new tokens - TransactionOutput::new_token(token_id, instance.supply, H256::from(alice_pub_key)), + TransactionOutput::new_token(token_id, supply, H256::from(alice_pub_key)), // 20 MLT to be paid as a fee, 80 MLT returning TransactionOutput::new_pubkey(80, H256::from(alice_pub_key)), ], @@ -381,7 +381,7 @@ fn test_tokens() { assert_ok!(Utxo::spend(Origin::signed(H256::zero()), first_tx.clone())); // Store a new TokenInstance to the Storage >::mutate(|x| { - if x.iter().find(|&x| x.id == token_id).is_none() { + if x.iter().find(|&x| x.id() == &token_id).is_none() { x.push(instance.clone()) } else { panic!("the token has already existed with the same id") @@ -520,33 +520,44 @@ fn test_send_to_address() { #[test] fn nft_test() { execute_with_alice(|alice_pub_key| { - let token_id = 100; - let empty = b"0000000000000000000000000000000000000000000000000000000000000000"; - let data = b"0101010101010101010101010101010101010101010101010101010101010101"; - // let data_token = ERC1948.new(); - // dataToken.mint(accounts[0], firstTokenId); - - // const ERC1948 = artifacts.require('./ERC1948.sol'); - // - // contract('ERC1948', (accounts) => { - // const firstTokenId = 100; - // const empty = '0x0000000000000000000000000000000000000000000000000000000000000000'; - // const data = '0x0101010101010101010101010101010101010101010101010101010101010101'; - // let dataToken; - // - // beforeEach(async () => { - // dataToken = await ERC1948.new(); - // await dataToken.mint(accounts[0], firstTokenId); - // }); - // - // it('should allow to write and read', async () => { + // Let's create a new test nft + let nft_id = BlakeTwo256::hash_of(&b"TEST"); + let instance = TokenInstance::new_nft( + nft_id, + *b"01010101010101010101010101010101", + b"http://facebook.com".to_vec(), + alice_pub_key, + ); + + if let TokenInstance::Nft { + id, + data_hash, + data_url, + creator_pubkey: alice_pub_key, + } = instance + { + let mut tx = Transaction { + inputs: vec![ + // 100 MLT + tx_input_gen_no_signature(), + ], + outputs: vec![TransactionOutput::new_nft( + id, + data_hash, + data_url, + H256::from(alice_pub_key), + )], + }; + let alice_sig = crypto::sr25519_sign(SR25519, &alice_pub_key, &tx.encode()).unwrap(); + tx.inputs[0].witness = alice_sig.0.to_vec(); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + } + + // it should allow to write and read ? // let rsp = await dataToken.readData(firstTokenId); // assert.equal(rsp, empty); // await dataToken.writeData(firstTokenId, data); // rsp = await dataToken.readData(firstTokenId); // assert.equal(rsp, data); - // }); - // - // }); }); } diff --git a/pallets/utxo/tokens/Cargo.toml b/pallets/utxo/tokens/Cargo.toml index f78e558..f97a4da 100644 --- a/pallets/utxo/tokens/Cargo.toml +++ b/pallets/utxo/tokens/Cargo.toml @@ -23,3 +23,9 @@ package = "parity-scale-codec" version = "2.0.0" default-features = false features = ["derive"] + +[dependencies.sp-core] +default-features = false +git = 'https://github.com/paritytech/substrate.git' +version = '4.0.0-dev' +branch = "master" \ No newline at end of file diff --git a/pallets/utxo/tokens/src/lib.rs b/pallets/utxo/tokens/src/lib.rs index 8c08575..b7d7d84 100644 --- a/pallets/utxo/tokens/src/lib.rs +++ b/pallets/utxo/tokens/src/lib.rs @@ -5,24 +5,33 @@ use serde::{Deserialize, Serialize}; use codec::{Decode, Encode}; use frame_support::{dispatch::Vec, RuntimeDebug}; +use sp_core::{sr25519::Public, H256}; #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash)] -pub struct TokenInstance { - pub id: u64, - pub name: Vec, - pub ticker: Vec, - pub supply: u128, - // We can add another fields like: - // pub number_format: NumberFormat, - // pub image: UUID, - // pub transaction: XXX, +pub enum TokenInstance { + Normal { + id: H256, + name: Vec, + ticker: Vec, + supply: u128, + // We can add another fields like: + // pub number_format: NumberFormat, + // pub image: UUID, + // pub transaction: XXX, + }, + Nft { + id: H256, + data_hash: [u8; 32], + data_url: Vec, + creator_pubkey: Public, + }, } impl Default for TokenInstance { fn default() -> Self { - Self { - id: 0, + Self::Normal { + id: H256::zero(), name: Vec::new(), ticker: Vec::new(), supply: 0, @@ -31,14 +40,34 @@ impl Default for TokenInstance { } impl TokenInstance { - pub fn new(id: u64, name: Vec, ticker: Vec, supply: u128) -> Self { - Self { + pub fn new_normal(id: H256, name: Vec, ticker: Vec, supply: u128) -> Self { + Self::Normal { id, name, ticker, supply, } } + pub fn new_nft( + id: H256, + data_hash: [u8; 32], + data_url: Vec, + creator_pubkey: Public, + ) -> Self { + Self::Nft { + id, + data_hash, + data_url, + creator_pubkey, + } + } + + pub fn id(&self) -> &H256 { + match self { + Self::Normal { id, .. } => id, + Self::Nft { id, .. } => id, + } + } } pub type TokenListData = Vec; From aad0bc34f77a243d26ce5f0912bb5ea00ec4bec1 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 4 Oct 2021 09:48:00 +0300 Subject: [PATCH 03/53] RPC for reading a NFT has been added Signed-off-by: sinitcin --- Cargo.lock | 2 ++ pallets/utxo/rpc/Cargo.toml | 6 +++++ pallets/utxo/rpc/runtime-api/Cargo.toml | 6 +++++ pallets/utxo/rpc/runtime-api/src/lib.rs | 5 ++++- pallets/utxo/rpc/src/lib.rs | 30 +++++++++++++++++++++++-- pallets/utxo/src/lib.rs | 19 +++++++++++++--- runtime/src/lib.rs | 10 +++++++-- 7 files changed, 70 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5514764..ca38b6e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4165,6 +4165,7 @@ dependencies = [ "serde", "sp-api", "sp-blockchain", + "sp-core", "sp-runtime", ] @@ -4178,6 +4179,7 @@ dependencies = [ "serde", "serde_json", "sp-api", + "sp-core", "sp-runtime", ] diff --git a/pallets/utxo/rpc/Cargo.toml b/pallets/utxo/rpc/Cargo.toml index d609b1e..bc19661 100644 --- a/pallets/utxo/rpc/Cargo.toml +++ b/pallets/utxo/rpc/Cargo.toml @@ -44,3 +44,9 @@ default-features = false git = 'https://github.com/paritytech/substrate.git' version = '4.0.0-dev' branch = "master" + +[dependencies.sp-core] +default-features = false +git = 'https://github.com/paritytech/substrate.git' +version = '4.0.0-dev' +branch = "master" \ No newline at end of file diff --git a/pallets/utxo/rpc/runtime-api/Cargo.toml b/pallets/utxo/rpc/runtime-api/Cargo.toml index b7c739e..ef3d2f6 100644 --- a/pallets/utxo/rpc/runtime-api/Cargo.toml +++ b/pallets/utxo/rpc/runtime-api/Cargo.toml @@ -36,6 +36,12 @@ git = 'https://github.com/paritytech/substrate.git' version = '4.0.0-dev' branch = "master" +[dependencies.sp-core] +default-features = false +git = 'https://github.com/paritytech/substrate.git' +version = '4.0.0-dev' +branch = "master" + [dev-dependencies] serde_json = "1.0.48" diff --git a/pallets/utxo/rpc/runtime-api/src/lib.rs b/pallets/utxo/rpc/runtime-api/src/lib.rs index ebaa307..aa9f354 100644 --- a/pallets/utxo/rpc/runtime-api/src/lib.rs +++ b/pallets/utxo/rpc/runtime-api/src/lib.rs @@ -17,6 +17,7 @@ #![cfg_attr(not(feature = "std"), no_std)] use frame_support::inherent::Vec; +use sp_core::H256; sp_api::decl_runtime_apis! { pub trait UtxoApi { @@ -25,6 +26,8 @@ sp_api::decl_runtime_apis! { // At the moment we have some problems with use serde in RPC, we can serialize and deserialize // only simple types. This approach allow us to return Vec<(TokenId, TokenName)> instead of // pallet_utxo_tokens::TokenListData - fn tokens_list() -> Vec<(u64, Vec)>; + fn tokens_list() -> Vec<(H256, Vec)>; + // Getting NFT data + fn nft_read(id: H256) -> Option<(/* Data url */ Vec, /* Data hash */ [u8; 32])>; } } diff --git a/pallets/utxo/rpc/src/lib.rs b/pallets/utxo/rpc/src/lib.rs index 6945bad..3c81329 100644 --- a/pallets/utxo/rpc/src/lib.rs +++ b/pallets/utxo/rpc/src/lib.rs @@ -20,6 +20,7 @@ use jsonrpc_derive::rpc; pub use pallet_utxo_rpc_runtime_api::UtxoApi as UtxoRuntimeApi; use sp_api::ProvideRuntimeApi; use sp_blockchain::HeaderBackend; +use sp_core::H256; use sp_runtime::{generic::BlockId, traits::Block as BlockT}; use std::sync::Arc; @@ -30,7 +31,14 @@ pub trait UtxoApi { // What means Vec<(u64, Vec)> ? Have a look at utxo/rpc/runtime-api/src/lib.rs #[rpc(name = "tokens_list")] - fn tokens_list(&self, at: Option) -> Result)>>; + fn tokens_list(&self, at: Option) -> Result)>>; + + #[rpc(name = "nft_read")] + fn nft_read( + &self, + at: Option, + id: H256, + ) -> Result, /* Data hash */ [u8; 32])>>; } /// A struct that implements the [`UtxoApi`]. @@ -80,7 +88,7 @@ where }) } - fn tokens_list(&self, at: Option<::Hash>) -> Result)>> { + fn tokens_list(&self, at: Option<::Hash>) -> Result)>> { let api = self.client.runtime_api(); let at = BlockId::hash(at.unwrap_or_else(|| // If the block hash is not supplied assume the best block. @@ -93,4 +101,22 @@ where data: Some(format!("{:?}", e).into()), }) } + + fn nft_read( + &self, + at: Option<::Hash>, + id: H256, + ) -> Result, /* Data hash */ [u8; 32])>> { + let api = self.client.runtime_api(); + let at = BlockId::hash(at.unwrap_or_else(|| + // If the block hash is not supplied assume the best block. + self.client.info().best_hash)); + + let runtime_api_result = api.nft_read(&at, id); + runtime_api_result.map_err(|e| RpcError { + code: ErrorCode::ServerError(Error::StorageError as i64), + message: "Something wrong".into(), + data: Some(format!("{:?}", e).into()), + }) + } } diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index d300db9..b63e3aa 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -388,12 +388,12 @@ pub mod pallet { #[pallet::storage] #[pallet::getter(fn token_list)] + // todo: Soon it will turn into StorageMap pub(super) type TokenList = StorageValue<_, TokenListData, ValueQuery>; #[pallet::storage] #[pallet::getter(fn owner_nft)] - pub(super) type OwnerNft = - StorageMap<_, Identity, TokenId, /* PKH */ Option, ValueQuery>; + pub(super) type Nft = StorageMap<_, Identity, TokenId, Option, ValueQuery>; #[pallet::storage] #[pallet::getter(fn reward_total)] @@ -838,6 +838,7 @@ pub mod pallet { ) -> Result> { let (fee, inputs_hashes) = pick_utxo::(caller, Mlt(100).to_munit()); ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); + ensure!(data_url.len() <= 50, Error::::Unapproved); let instance = TokenInstance::new_nft( BlakeTwo256::hash_of(&data_hash), @@ -853,7 +854,7 @@ pub mod pallet { .collect(); ensure!( - !OwnerNft::::contains_key(instance.id()), + !Nft::::contains_key(instance.id()), Error::::NftCollectionExists ); @@ -878,6 +879,7 @@ pub mod pallet { spend::(caller, &tx)?; // Save in Store + Nft::::insert(instance.id(), Some(instance.clone())); >::mutate(|x| { if x.iter().find(|&x| x.id() == instance.id()).is_none() { x.push(instance.clone()) @@ -1069,6 +1071,17 @@ impl crate::Pallet { pub fn tokens_list() -> TokenListData { >::get() } + + pub fn nft_read(id: H256) -> Option<(/* Data url */ Vec, /* Data hash */ [u8; 32])> { + match Nft::::get(id)? { + TokenInstance::Nft { + data_hash, + data_url, + .. + } => Some((data_url, data_hash)), + _ => None, + } + } } use frame_support::pallet_prelude::DispatchResultWithPostInfo; diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index d328db1..1cff40c 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -573,10 +573,16 @@ impl_runtime_apis! { } // What means Vec<(u64, Vec)> ? Have a look at utxo/rpc/runtime-api/src/lib.rs - fn tokens_list() -> Vec<(u64, Vec)> { + fn tokens_list() -> Vec<(H256, Vec)> { + use pallet_utxo_tokens::TokenInstance; let list = Utxo::tokens_list(); - list.into_iter().map(|x| (x.id, x.name)).collect() + list.into_iter().filter_map(|x|if let TokenInstance::Normal{id, name, ..} = x { Some((id, name)) } else {None} ).collect() } + + fn nft_read(id: H256) -> Option<(Vec, [u8; 32])> { + Utxo::nft_read(id) + } + } impl pallet_contracts_rpc_runtime_api::ContractsApi< From 9a10db2d0fb0b8ff9fea224b18bd695bce050bda Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 4 Oct 2021 10:18:38 +0300 Subject: [PATCH 04/53] Fixed compilation error Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 47 +++++++++++++++++++--------------- pallets/utxo/tokens/src/lib.rs | 4 +-- 2 files changed, 28 insertions(+), 23 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index b63e3aa..1baed47 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -33,20 +33,9 @@ pub mod weights; pub const SR25519: sp_runtime::KeyTypeId = sp_runtime::KeyTypeId(*b"sr25"); -pub type TokenId = H256; -pub type Value = u128; -pub type String = Vec; - -pub struct Mlt(Value); -impl Mlt { - pub fn to_munit(&self) -> Value { - self.0 * 1_000 * 100_000_000 - } -} - #[frame_support::pallet] pub mod pallet { - use crate::{Mlt, String, TokenId, Value, SR25519}; + use crate::SR25519; use chainscript::Script; use codec::{Decode, Encode}; use core::convert::TryInto; @@ -76,6 +65,17 @@ pub mod pallet { }; use sp_runtime::DispatchErrorWithPostInfo; + pub type TokenId = H256; + pub type Value = u128; + pub type String = Vec; + + pub struct Mlt(Value); + impl Mlt { + pub fn to_munit(&self) -> Value { + self.0 * 1_000 * 100_000_000 + } + } + #[pallet::error] pub enum Error { /// Account balance must be greater than or equal to the transfer amount. @@ -1063,6 +1063,14 @@ pub mod pallet { use pallet_utxo_tokens::{TokenInstance, TokenListData}; +use frame_support::pallet_prelude::DispatchResultWithPostInfo; +use sp_core::{ + crypto::UncheckedFrom, + {H256, H512}, +}; +use sp_runtime::sp_std::vec; +use utxo_api::UtxoApi; + impl crate::Pallet { pub fn send() -> u32 { 1337 @@ -1072,7 +1080,12 @@ impl crate::Pallet { >::get() } - pub fn nft_read(id: H256) -> Option<(/* Data url */ Vec, /* Data hash */ [u8; 32])> { + pub fn nft_read( + id: H256, + ) -> Option<( + /* Data url */ frame_support::inherent::Vec, + /* Data hash */ [u8; 32], + )> { match Nft::::get(id)? { TokenInstance::Nft { data_hash, @@ -1084,14 +1097,6 @@ impl crate::Pallet { } } -use frame_support::pallet_prelude::DispatchResultWithPostInfo; -use sp_core::{ - crypto::UncheckedFrom, - {H256, H512}, -}; -use sp_runtime::sp_std::vec; -use utxo_api::UtxoApi; - impl UtxoApi for Pallet where T::AccountId: UncheckedFrom + AsRef<[u8]>, diff --git a/pallets/utxo/tokens/src/lib.rs b/pallets/utxo/tokens/src/lib.rs index b7d7d84..5339dc0 100644 --- a/pallets/utxo/tokens/src/lib.rs +++ b/pallets/utxo/tokens/src/lib.rs @@ -24,7 +24,7 @@ pub enum TokenInstance { id: H256, data_hash: [u8; 32], data_url: Vec, - creator_pubkey: Public, + creator_pubkey: [u8; 32], }, } @@ -58,7 +58,7 @@ impl TokenInstance { id, data_hash, data_url, - creator_pubkey, + creator_pubkey: creator_pubkey.0, } } From 7e0cd950ecf7e57d3f92975c923edfd78b12118d Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 4 Oct 2021 10:22:52 +0300 Subject: [PATCH 05/53] Fixed test Signed-off-by: sinitcin --- pallets/utxo/src/tests.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index bd636d0..99b4372 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -548,7 +548,12 @@ fn nft_test() { H256::from(alice_pub_key), )], }; - let alice_sig = crypto::sr25519_sign(SR25519, &alice_pub_key, &tx.encode()).unwrap(); + let alice_sig = crypto::sr25519_sign( + SR25519, + &sp_core::sr25519::Public::from_raw(alice_pub_key), + &tx.encode(), + ) + .unwrap(); tx.inputs[0].witness = alice_sig.0.to_vec(); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); } From 9441a768b08321a62829a139feb23d92959f5d67 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 5 Oct 2021 07:51:25 +0300 Subject: [PATCH 06/53] Added serialization of fields that related to NFT Signed-off-by: sinitcin --- pallets/utxo/rpc/runtime-api/src/lib.rs | 2 +- pallets/utxo/rpc/src/lib.rs | 4 +- pallets/utxo/src/lib.rs | 35 ++++------ pallets/utxo/src/tests.rs | 20 +++--- pallets/utxo/src/weights.rs | 5 +- pallets/utxo/tokens/src/lib.rs | 92 ++++++++++++++++++++++--- runtime/src/lib.rs | 2 +- 7 files changed, 110 insertions(+), 50 deletions(-) diff --git a/pallets/utxo/rpc/runtime-api/src/lib.rs b/pallets/utxo/rpc/runtime-api/src/lib.rs index aa9f354..9f97736 100644 --- a/pallets/utxo/rpc/runtime-api/src/lib.rs +++ b/pallets/utxo/rpc/runtime-api/src/lib.rs @@ -28,6 +28,6 @@ sp_api::decl_runtime_apis! { // pallet_utxo_tokens::TokenListData fn tokens_list() -> Vec<(H256, Vec)>; // Getting NFT data - fn nft_read(id: H256) -> Option<(/* Data url */ Vec, /* Data hash */ [u8; 32])>; + fn nft_read(id: H256) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)>; } } diff --git a/pallets/utxo/rpc/src/lib.rs b/pallets/utxo/rpc/src/lib.rs index 3c81329..76b835b 100644 --- a/pallets/utxo/rpc/src/lib.rs +++ b/pallets/utxo/rpc/src/lib.rs @@ -38,7 +38,7 @@ pub trait UtxoApi { &self, at: Option, id: H256, - ) -> Result, /* Data hash */ [u8; 32])>>; + ) -> Result, /* Data hash */ Vec)>>; } /// A struct that implements the [`UtxoApi`]. @@ -106,7 +106,7 @@ where &self, at: Option<::Hash>, id: H256, - ) -> Result, /* Data hash */ [u8; 32])>> { + ) -> Result, /* Data hash */ Vec)>> { let api = self.client.runtime_api(); let at = BlockId::hash(at.unwrap_or_else(|| // If the block hash is not supplied assume the best block. diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 1baed47..318970e 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -240,7 +240,7 @@ pub mod pallet { ConfidentialTx, /* not implemented yet */ Nft { id: TokenId, - data_hash: [u8; 32], + data: Vec, data_url: String, creator_pkh: H256, }, @@ -273,7 +273,6 @@ pub mod pallet { /// Output of a transaction #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug, Hash)] - #[repr(C)] pub struct TransactionOutput { pub(crate) header: TxHeaderAndExtraData, pub(crate) value: Value, @@ -319,12 +318,12 @@ pub mod pallet { } } - pub fn new_nft(id: TokenId, data_hash: [u8; 32], data_url: String, creator: H256) -> Self { + pub fn new_nft(id: TokenId, data: Vec, data_url: String, creator: H256) -> Self { Self { value: 0, header: TxHeaderAndExtraData::Nft { id, - data_hash, + data, creator_pkh: creator.clone(), data_url, }, @@ -834,17 +833,17 @@ pub mod pallet { caller: &T::AccountId, creator_pubkey: sp_core::sr25519::Public, data_url: String, - data_hash: [u8; 32], + data: Vec, ) -> Result> { let (fee, inputs_hashes) = pick_utxo::(caller, Mlt(100).to_munit()); ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); ensure!(data_url.len() <= 50, Error::::Unapproved); let instance = TokenInstance::new_nft( - BlakeTwo256::hash_of(&data_hash), - data_hash, + BlakeTwo256::hash_of(&data), + data.clone(), data_url.clone(), - creator_pubkey, + creator_pubkey.to_vec(), ); let inputs_for_fee = inputs_hashes @@ -864,7 +863,7 @@ pub mod pallet { // Output a new tokens TransactionOutput::new_nft( *instance.id(), - data_hash, + data, data_url, H256::from(creator_pubkey) ), @@ -966,10 +965,10 @@ pub mod pallet { origin: OriginFor, creator_pubkey: sp_core::sr25519::Public, data_url: String, - data_hash: [u8; 32], + data: Vec, ) -> DispatchResultWithPostInfo { let caller = &ensure_signed(origin)?; - let nft_id = mint::(caller, creator_pubkey, data_url.clone(), data_hash)?; + let nft_id = mint::(caller, creator_pubkey, data_url.clone(), data)?; Self::deposit_event(Event::::Minted(nft_id, caller.clone(), data_url)); Ok(().into()) } @@ -1063,6 +1062,7 @@ pub mod pallet { use pallet_utxo_tokens::{TokenInstance, TokenListData}; +use frame_support::inherent::Vec; use frame_support::pallet_prelude::DispatchResultWithPostInfo; use sp_core::{ crypto::UncheckedFrom, @@ -1080,18 +1080,9 @@ impl crate::Pallet { >::get() } - pub fn nft_read( - id: H256, - ) -> Option<( - /* Data url */ frame_support::inherent::Vec, - /* Data hash */ [u8; 32], - )> { + pub fn nft_read(id: H256) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)> { match Nft::::get(id)? { - TokenInstance::Nft { - data_hash, - data_url, - .. - } => Some((data_url, data_hash)), + TokenInstance::Nft { data, data_url, .. } => Some((data_url, data.to_vec())), _ => None, } } diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 99b4372..58538c9 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -524,16 +524,17 @@ fn nft_test() { let nft_id = BlakeTwo256::hash_of(&b"TEST"); let instance = TokenInstance::new_nft( nft_id, - *b"01010101010101010101010101010101", + (*b"01010101010101010101010101010101").to_vec(), b"http://facebook.com".to_vec(), - alice_pub_key, + alice_pub_key.to_vec(), ); if let TokenInstance::Nft { id, - data_hash, + data, data_url, - creator_pubkey: alice_pub_key, + creator_pubkey, + .. } = instance { let mut tx = Transaction { @@ -543,17 +544,12 @@ fn nft_test() { ], outputs: vec![TransactionOutput::new_nft( id, - data_hash, + data.to_vec(), data_url, - H256::from(alice_pub_key), + H256::from_slice(creator_pubkey.as_slice()), )], }; - let alice_sig = crypto::sr25519_sign( - SR25519, - &sp_core::sr25519::Public::from_raw(alice_pub_key), - &tx.encode(), - ) - .unwrap(); + let alice_sig = crypto::sr25519_sign(SR25519, &alice_pub_key, &tx.encode()).unwrap(); tx.inputs[0].witness = alice_sig.0.to_vec(); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); } diff --git a/pallets/utxo/src/weights.rs b/pallets/utxo/src/weights.rs index aae1adf..4e06f8d 100644 --- a/pallets/utxo/src/weights.rs +++ b/pallets/utxo/src/weights.rs @@ -43,11 +43,12 @@ impl crate::WeightInfo for WeightInfo { fn token_create(u: u32) -> Weight { // Under construction - (u as Weight).saturating_add((100 as Weight)) + (u as Weight) + .saturating_add((100 as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - + fn send_to_address(s: u32) -> Weight { (348_270_000 as Weight) // Standard Error: 2_000 diff --git a/pallets/utxo/tokens/src/lib.rs b/pallets/utxo/tokens/src/lib.rs index 5339dc0..569d851 100644 --- a/pallets/utxo/tokens/src/lib.rs +++ b/pallets/utxo/tokens/src/lib.rs @@ -7,11 +7,77 @@ use codec::{Decode, Encode}; use frame_support::{dispatch::Vec, RuntimeDebug}; use sp_core::{sr25519::Public, H256}; +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash)] +pub struct NftDataRaw { + inner: Vec, +} + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash)] +pub enum NftData { + Hash32([u8; 32]), + Hash64([u8; 64]), + Raw(Vec), + // Or any type that you want to implement +} + +impl NftDataRaw { + pub fn new(data: Vec) -> NftDataRaw { + Self { inner: data } + } + + pub fn into_data(&mut self) -> Option { + NftData::decode(&mut self.as_slice()).ok() + } + + pub fn to_vec(&self) -> Vec { + self.inner.clone() + } + + pub fn as_slice(&self) -> &[u8] { + self.inner.as_slice() + } +} + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash)] +pub struct NftOwnerRaw { + inner: Vec, +} + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +pub enum NftOwner { + Sr25519(Public), + Raw(Vec), + // Or any type that you want to implement +} + +impl NftOwnerRaw { + pub fn new(data: Vec) -> Self { + Self { inner: data } + } + + pub fn into_data(&mut self) -> Option { + NftOwner::decode(&mut self.as_slice()).ok() + } + + pub fn to_vec(&self) -> Vec { + self.inner.clone() + } + + pub fn as_slice(&self) -> &[u8] { + self.inner.as_slice() + } +} + #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash)] pub enum TokenInstance { Normal { id: H256, + version: u16, name: Vec, ticker: Vec, supply: u128, @@ -22,9 +88,10 @@ pub enum TokenInstance { }, Nft { id: H256, - data_hash: [u8; 32], + version: u16, + data: NftDataRaw, data_url: Vec, - creator_pubkey: [u8; 32], + creator_pubkey: NftOwnerRaw, }, } @@ -32,6 +99,7 @@ impl Default for TokenInstance { fn default() -> Self { Self::Normal { id: H256::zero(), + version: 0, name: Vec::new(), ticker: Vec::new(), supply: 0, @@ -43,22 +111,19 @@ impl TokenInstance { pub fn new_normal(id: H256, name: Vec, ticker: Vec, supply: u128) -> Self { Self::Normal { id, + version: 0, name, ticker, supply, } } - pub fn new_nft( - id: H256, - data_hash: [u8; 32], - data_url: Vec, - creator_pubkey: Public, - ) -> Self { + pub fn new_nft(id: H256, data: Vec, data_url: Vec, creator_pubkey: Vec) -> Self { Self::Nft { id, - data_hash, + version: 0, + data: NftDataRaw::new(data), data_url, - creator_pubkey: creator_pubkey.0, + creator_pubkey: NftOwnerRaw::new(creator_pubkey), } } @@ -68,6 +133,13 @@ impl TokenInstance { Self::Nft { id, .. } => id, } } + + pub fn version(&self) -> u16 { + *match self { + Self::Normal { version, .. } => version, + Self::Nft { version, .. } => version, + } + } } pub type TokenListData = Vec; diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 1cff40c..0bf66de 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -579,7 +579,7 @@ impl_runtime_apis! { list.into_iter().filter_map(|x|if let TokenInstance::Normal{id, name, ..} = x { Some((id, name)) } else {None} ).collect() } - fn nft_read(id: H256) -> Option<(Vec, [u8; 32])> { + fn nft_read(id: H256) -> Option<(Vec, Vec)> { Utxo::nft_read(id) } From b3d42474471bdf95b02f42f95a14ec24927e15b1 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 5 Oct 2021 07:59:41 +0300 Subject: [PATCH 07/53] Removed unwraps from sr25519_sign fn Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 318970e..29af0cf 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -811,7 +811,7 @@ pub mod pallet { &sp_core::sr25519::Public::from_h256(public), &tx.encode(), ) - .unwrap(); + .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; for i in 0..tx.inputs.len() { tx.inputs[i].witness = sig.0.to_vec(); } @@ -870,7 +870,8 @@ pub mod pallet { ], }; - let sig = crypto::sr25519_sign(SR25519, &creator_pubkey, &tx.encode()).unwrap(); + let sig = crypto::sr25519_sign(SR25519, &creator_pubkey, &tx.encode()) + .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; for i in 0..tx.inputs.len() { tx.inputs[i].witness = sig.0.to_vec(); } @@ -1025,7 +1026,8 @@ pub mod pallet { ], }; - let sig = crypto::sr25519_sign(SR25519, &pubkey, &tx.encode()).unwrap(); + let sig = crypto::sr25519_sign(SR25519, &pubkey, &tx.encode()) + .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; for i in 0..tx.inputs.len() { tx.inputs[i].witness = sig.0.to_vec(); } From 847bf4d56c2ce9af75a61afc3cfe00a5ed59c0d3 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 5 Oct 2021 18:14:46 +0300 Subject: [PATCH 08/53] Removed RPC calls `tokens_list` and `nft_read`. TokenList now turned into StorageMap Signed-off-by: sinitcin --- pallets/utxo/rpc/runtime-api/src/lib.rs | 10 ----- pallets/utxo/rpc/src/lib.rs | 44 --------------------- pallets/utxo/src/lib.rs | 52 +++++++++---------------- pallets/utxo/src/tests.rs | 10 +---- runtime/src/lib.rs | 12 ------ 5 files changed, 21 insertions(+), 107 deletions(-) diff --git a/pallets/utxo/rpc/runtime-api/src/lib.rs b/pallets/utxo/rpc/runtime-api/src/lib.rs index 9f97736..40b77ab 100644 --- a/pallets/utxo/rpc/runtime-api/src/lib.rs +++ b/pallets/utxo/rpc/runtime-api/src/lib.rs @@ -16,18 +16,8 @@ // Author(s): A. Altonen, Anton Sinitsyn #![cfg_attr(not(feature = "std"), no_std)] -use frame_support::inherent::Vec; -use sp_core::H256; - sp_api::decl_runtime_apis! { pub trait UtxoApi { fn send() -> u32; - // What means Vec<(u64, Vec)> ? - // At the moment we have some problems with use serde in RPC, we can serialize and deserialize - // only simple types. This approach allow us to return Vec<(TokenId, TokenName)> instead of - // pallet_utxo_tokens::TokenListData - fn tokens_list() -> Vec<(H256, Vec)>; - // Getting NFT data - fn nft_read(id: H256) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)>; } } diff --git a/pallets/utxo/rpc/src/lib.rs b/pallets/utxo/rpc/src/lib.rs index 76b835b..5bb25cb 100644 --- a/pallets/utxo/rpc/src/lib.rs +++ b/pallets/utxo/rpc/src/lib.rs @@ -20,7 +20,6 @@ use jsonrpc_derive::rpc; pub use pallet_utxo_rpc_runtime_api::UtxoApi as UtxoRuntimeApi; use sp_api::ProvideRuntimeApi; use sp_blockchain::HeaderBackend; -use sp_core::H256; use sp_runtime::{generic::BlockId, traits::Block as BlockT}; use std::sync::Arc; @@ -28,17 +27,6 @@ use std::sync::Arc; pub trait UtxoApi { #[rpc(name = "utxo_send")] fn send(&self, at: Option) -> Result; - - // What means Vec<(u64, Vec)> ? Have a look at utxo/rpc/runtime-api/src/lib.rs - #[rpc(name = "tokens_list")] - fn tokens_list(&self, at: Option) -> Result)>>; - - #[rpc(name = "nft_read")] - fn nft_read( - &self, - at: Option, - id: H256, - ) -> Result, /* Data hash */ Vec)>>; } /// A struct that implements the [`UtxoApi`]. @@ -87,36 +75,4 @@ where data: Some(format!("{:?}", e).into()), }) } - - fn tokens_list(&self, at: Option<::Hash>) -> Result)>> { - let api = self.client.runtime_api(); - let at = BlockId::hash(at.unwrap_or_else(|| - // If the block hash is not supplied assume the best block. - self.client.info().best_hash)); - - let runtime_api_result = api.tokens_list(&at); - runtime_api_result.map_err(|e| RpcError { - code: ErrorCode::ServerError(Error::StorageError as i64), - message: "Something wrong".into(), - data: Some(format!("{:?}", e).into()), - }) - } - - fn nft_read( - &self, - at: Option<::Hash>, - id: H256, - ) -> Result, /* Data hash */ Vec)>> { - let api = self.client.runtime_api(); - let at = BlockId::hash(at.unwrap_or_else(|| - // If the block hash is not supplied assume the best block. - self.client.info().best_hash)); - - let runtime_api_result = api.nft_read(&at, id); - runtime_api_result.map_err(|e| RpcError { - code: ErrorCode::ServerError(Error::StorageError as i64), - message: "Something wrong".into(), - data: Some(format!("{:?}", e).into()), - }) - } } diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 29af0cf..bb65d60 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -50,7 +50,7 @@ pub mod pallet { }; use frame_system::pallet_prelude::*; use hex_literal::hex; - use pallet_utxo_tokens::{TokenInstance, TokenListData}; + use pallet_utxo_tokens::TokenInstance; use pp_api::ProgrammablePoolApi; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -388,11 +388,8 @@ pub mod pallet { #[pallet::storage] #[pallet::getter(fn token_list)] // todo: Soon it will turn into StorageMap - pub(super) type TokenList = StorageValue<_, TokenListData, ValueQuery>; - - #[pallet::storage] - #[pallet::getter(fn owner_nft)] - pub(super) type Nft = StorageMap<_, Identity, TokenId, Option, ValueQuery>; + pub(super) type TokenList = + StorageMap<_, Identity, TokenId, Option, ValueQuery>; #[pallet::storage] #[pallet::getter(fn reward_total)] @@ -547,7 +544,6 @@ pub mod pallet { .collect(); // Check for token creation - let tokens_list = >::get(); for output in tx.outputs.iter() { let tid = match output.header { TxHeaderAndExtraData::NormalTx { id } => id, @@ -559,13 +555,7 @@ pub mod pallet { } else { // But when we don't have an input for token but token id exist in TokenList ensure!( - tokens_list - .iter() - .find(|&x| match x { - crate::TokenInstance::Normal { id, .. } - | crate::TokenInstance::Nft { id, .. } => id, - } == &tid) - .is_none(), + !>::contains_key(tid), "no inputs for the token id" ); } @@ -786,6 +776,12 @@ pub mod pallet { token_ticker, supply, ); + let token_id = *instance.id(); + + ensure!( + !>::contains_key(instance.id()), + Error::::InUse + ); let mut tx = Transaction { inputs: crate::vec![ @@ -819,14 +815,8 @@ pub mod pallet { spend::(caller, &tx)?; // Save in Store - >::mutate(|x| { - if x.iter().find(|&x| x.id() == instance.id()).is_none() { - x.push(instance.clone()) - } else { - panic!("the token has already existed with the same id") - } - }); - Ok(*instance.id()) + >::insert(token_id, Some(instance)); + Ok(token_id) } fn mint( @@ -853,7 +843,7 @@ pub mod pallet { .collect(); ensure!( - !Nft::::contains_key(instance.id()), + !TokenList::::contains_key(instance.id()), Error::::NftCollectionExists ); @@ -879,14 +869,7 @@ pub mod pallet { spend::(caller, &tx)?; // Save in Store - Nft::::insert(instance.id(), Some(instance.clone())); - >::mutate(|x| { - if x.iter().find(|&x| x.id() == instance.id()).is_none() { - x.push(instance.clone()) - } else { - panic!("the token has already existed with the same id") - } - }); + TokenList::::insert(instance.id(), Some(instance.clone())); Ok(*instance.id()) } @@ -1079,11 +1062,14 @@ impl crate::Pallet { } pub fn tokens_list() -> TokenListData { - >::get() + >::iter() + .enumerate() + .filter_map(|(_, instance)| instance.1) + .collect() } pub fn nft_read(id: H256) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)> { - match Nft::::get(id)? { + match TokenList::::get(id)? { TokenInstance::Nft { data, data_url, .. } => Some((data_url, data.to_vec())), _ => None, } diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 58538c9..7d1c66c 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -380,14 +380,8 @@ fn test_tokens() { first_tx.inputs[0].witness = alice_sig.0.to_vec(); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), first_tx.clone())); // Store a new TokenInstance to the Storage - >::mutate(|x| { - if x.iter().find(|&x| x.id() == &token_id).is_none() { - x.push(instance.clone()) - } else { - panic!("the token has already existed with the same id") - } - }); - dbg!(&>::get()); + >::insert(token_id, Some(instance.clone())); + dbg!(&>::get(token_id)); // alice sends 1000 tokens to karl and the rest back to herself 10 tokens let utxo_hash_mlt = BlakeTwo256::hash_of(&(&first_tx, 0 as u64)); diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 0bf66de..f469987 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -571,18 +571,6 @@ impl_runtime_apis! { fn send() -> u32 { Utxo::send() } - - // What means Vec<(u64, Vec)> ? Have a look at utxo/rpc/runtime-api/src/lib.rs - fn tokens_list() -> Vec<(H256, Vec)> { - use pallet_utxo_tokens::TokenInstance; - let list = Utxo::tokens_list(); - list.into_iter().filter_map(|x|if let TokenInstance::Normal{id, name, ..} = x { Some((id, name)) } else {None} ).collect() - } - - fn nft_read(id: H256) -> Option<(Vec, Vec)> { - Utxo::nft_read(id) - } - } impl pallet_contracts_rpc_runtime_api::ContractsApi< From fa89a24700e748ec3e9ce7bef5186514d3a6646d Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 19 Oct 2021 22:38:02 +0300 Subject: [PATCH 09/53] Merge remote-tracking branch 'origin/staging' into nft Signed-off-by: sinitcin # Conflicts: # pallets/utxo/src/lib.rs # pallets/utxo/src/tests.rs --- pallets/utxo/src/lib.rs | 210 +++++++++++++++++----------------------- 1 file changed, 90 insertions(+), 120 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 52a1fde..2f45daf 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -55,8 +55,8 @@ pub mod pallet { use serde::{Deserialize, Serialize}; use sp_core::{ sp_std::collections::btree_map::BTreeMap, - sr25519::{Public as SR25Pub, Signature as SR25Sig}, sp_std::{convert::TryInto, str, vec}, + sr25519::{Public as SR25Pub, Signature as SR25Sig}, testing::SR25519, H256, H512, }; @@ -202,7 +202,7 @@ pub mod pallet { #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] pub enum Destination { /// Plain pay-to-pubkey - Pubkey(sr25519::Public), + Pubkey(SR25Pub), /// Pay to fund a new programmable pool. Takes code and data. CreatePP(Vec, Vec), /// Pay to an existing contract. Takes a destination account and input data. @@ -287,7 +287,6 @@ pub mod pallet { Self { header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, value, - destination: Destination::Pubkey(pub_key), destination: Destination::Pubkey(pubkey.into()), } } @@ -310,15 +309,17 @@ pub mod pallet { } } - pub fn new_token(id: TokenId, value: Value, pub_key: H256) -> Self { + pub fn new_token(id: TokenId, value: Value, pubkey: H256) -> Self { + let pubkey = sp_core::sr25519::Public::from_h256(pubkey); Self { value, header: TxHeaderAndExtraData::NormalTx { id }, - destination: Destination::Pubkey(pub_key), + destination: Destination::Pubkey(pubkey), } } pub fn new_nft(id: TokenId, data: Vec, data_url: String, creator: H256) -> Self { + let pubkey = sp_core::sr25519::Public::from_h256(creator); Self { value: 0, header: TxHeaderAndExtraData::Nft { @@ -327,7 +328,7 @@ pub mod pallet { creator_pkh: creator.clone(), data_url, }, - destination: Destination::Pubkey(creator), + destination: Destination::Pubkey(pubkey), } } @@ -339,15 +340,6 @@ pub mod pallet { destination: Destination::ScriptHash(hash), } } - - /// Create a new output to given pubkey hash - pub fn new_pubkey_hash(value: Value, script: Script) -> Self { - Self { - header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, - value, - destination: Destination::PubkeyHash(script.into_bytes()), - } - } } #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] @@ -368,7 +360,7 @@ pub mod pallet { mut self, utxos: &[TransactionOutput], index: usize, - pk: &sr25519::Public, + pk: &SR25Pub, ) -> Option { let msg = crate::sign::TransactionSigMsg::construct( Default::default(), @@ -522,6 +514,32 @@ pub mod pallet { "each output should be used once" ); } + let simple_tx = get_simple_transaction(tx); + let mut reward = 0; + // Resolve the transaction inputs by looking up UTXOs being spent by them. + // + // This will cointain one of the following: + // * Ok(utxos): a vector of UTXOs each input spends. + // * Err(missing): a vector of outputs missing from the store + let input_utxos = { + let mut missing = Vec::new(); + let mut resolved: Vec> = Vec::new(); + + for input in &tx.inputs { + if let Some(input_utxo) = >::get(&input.outpoint) { + let lock_commitment = input_utxo.destination.lock_commitment(); + ensure!( + input.lock_hash() == *lock_commitment, + "Lock hash does not match" + ); + resolved.push(input_utxo); + } else { + missing.push(input.outpoint.clone().as_fixed_bytes().to_vec()); + } + } + + missing.is_empty().then(|| resolved).ok_or(missing) + }; let full_inputs: Vec<(TokenId, TransactionOutputFor)> = tx .inputs @@ -570,62 +588,10 @@ pub mod pallet { } let mut new_utxos = Vec::new(); - let mut reward = 0; - - // Check that inputs are valid - for input in tx.inputs.iter() { - if let Some(input_utxo) = >::get(&input.outpoint) { - let lock_commitment = input_utxo.destination.lock_commitment(); - ensure!( - input.lock_hash() == *lock_commitment, - "Lock hash does not match" - ); - - match input_utxo.destination { - Destination::Pubkey(pubkey) => { - let sig = (&input.witness[..]) - .try_into() - .map_err(|_| "signature length incorrect")?; - ensure!( - crypto::sr25519_verify( - &SR25Sig::from_raw(sig), - &simple_tx, - &SR25Pub::from_h256(pubkey) - ), - "signature must be valid" - ); - } - Destination::CreatePP(_, _) => { - log::info!("TODO validate spending of OP_CREATE"); - } - Destination::CallPP(_, _) => { - log::info!("TODO validate spending of OP_CALL"); - } - Destination::ScriptHash(_hash) => { - use crate::script::verify; - ensure!( - verify(&simple_tx, input.witness.clone(), input.lock.clone()).is_ok(), - "script verification failed" - ); - } - Destination::PubkeyHash(script) => { - use crate::script::verify; - ensure!( - verify(&simple_tx, input.witness.clone(), script).is_ok(), - "pubkeyhash verification failed" - ); - } - } - } else { - missing_utxos.push(input.outpoint.clone().as_fixed_bytes().to_vec()); - } - } - // Check that outputs are valid - for (output_index, output) in tx.enumerate_outputs()? { + for (output_index, output) in tx.outputs.iter().enumerate() { match output.destination { - Destination::Pubkey(_) - | Destination::ScriptHash(_) => { + Destination::Pubkey(_) | Destination::ScriptHash(_) => { ensure!(output.value > 0, "output value must be nonzero"); let hash = tx.outpoint(output_index as u64); ensure!(!>::contains_key(hash), "output already exists"); @@ -717,7 +683,7 @@ pub mod pallet { .ok_or("reward underflow")? } else { *inputs_sum.get(&(H256::zero() as TokenId)).ok_or("fee doesn't exist")? - } + }; } Ok(ValidTransaction { @@ -849,52 +815,55 @@ pub mod pallet { data_url: String, data: Vec, ) -> Result> { - let (fee, inputs_hashes) = pick_utxo::(caller, Mlt(100).to_munit()); - ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); - ensure!(data_url.len() <= 50, Error::::Unapproved); - - let instance = TokenInstance::new_nft( - BlakeTwo256::hash_of(&data), - data.clone(), - data_url.clone(), - creator_pubkey.to_vec(), - ); - - let inputs_for_fee = inputs_hashes - .iter() - .filter_map(|x| >::get(&x)) - .map(|output| TransactionInput::new_empty(BlakeTwo256::hash_of(&(&output, 0 as u64)))) - .collect(); - - ensure!( - !TokenList::::contains_key(instance.id()), - Error::::NftCollectionExists - ); - - let mut tx = Transaction { - inputs: inputs_for_fee, - outputs: crate::vec![ - // Output a new tokens - TransactionOutput::new_nft( - *instance.id(), - data, - data_url, - H256::from(creator_pubkey) - ), - ], - }; - - let sig = crypto::sr25519_sign(SR25519, &creator_pubkey, &tx.encode()) - .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; - for i in 0..tx.inputs.len() { - tx.inputs[i].witness = sig.0.to_vec(); - } - // Success - spend::(caller, &tx)?; - - // Save in Store - TokenList::::insert(instance.id(), Some(instance.clone())); - Ok(*instance.id()) + /* let (fee, inputs_hashes) = pick_utxo::(caller, Mlt(100).to_munit()); + ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); + ensure!(data_url.len() <= 50, Error::::Unapproved); + + let instance = TokenInstance::new_nft( + BlakeTwo256::hash_of(&data), + data.clone(), + data_url.clone(), + creator_pubkey.to_vec(), + ); + + let inputs_for_fee = inputs_hashes + .iter() + .filter_map(|x| >::get(&x)) + .map(|output| TransactionInput::new_empty(BlakeTwo256::hash_of(&(&output, 0 as u64)))) + .collect(); + + ensure!( + !TokenList::::contains_key(instance.id()), + Error::::NftCollectionExists + ); + + let mut tx = Transaction { + inputs: inputs_for_fee, + outputs: crate::vec![ + // Output a new tokens + TransactionOutput::new_nft( + *instance.id(), + data, + data_url, + H256::from(creator_pubkey) + ), + ], + }; + + let sig = crypto::sr25519_sign(SR25519, &creator_pubkey, &tx.encode()) + .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; + for i in 0..tx.inputs.len() { + tx.inputs[i].witness = sig.0.to_vec(); + } + // Success + spend::(caller, &tx)?; + + // Save in Store + TokenList::::insert(instance.id(), Some(instance.clone())); + Ok(*instance.id()) + + */ + unimplemented!() } /// Pick the UTXOs of `caller` from UtxoStore that satisfy request `value` @@ -927,8 +896,8 @@ pub mod pallet { break; } } - _ => {} } + _ => {} } } @@ -1035,7 +1004,8 @@ pub mod pallet { TransactionOutput { value, destination: dest, - header: Default::default(), + // todo: We need to check what kind of token over here + header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, }, TransactionOutput::new_pubkey(total - value, H256::from(pubkey_raw)), ], @@ -1043,7 +1013,7 @@ pub mod pallet { for i in 0..tx.inputs.len() { tx = tx - .sign(&utxos, i, &sr25519::Public(pubkey_raw)) + .sign(&utxos, i, &SR25Pub(pubkey_raw)) .ok_or(DispatchError::Other("Failed to sign the transaction"))?; } From 1a77c9c2911673631c23741fd889052fb289de1e Mon Sep 17 00:00:00 2001 From: sinitcin Date: Thu, 21 Oct 2021 16:39:27 +0300 Subject: [PATCH 10/53] Added the new data field Signed-off-by: sinitcin --- pallets/utxo/Cargo.toml | 2 +- pallets/utxo/src/lib.rs | 281 +++++++++++++++++++++----------------- pallets/utxo/src/tests.rs | 184 ++++++++++++------------- 3 files changed, 250 insertions(+), 217 deletions(-) diff --git a/pallets/utxo/Cargo.toml b/pallets/utxo/Cargo.toml index d0d6fb5..65b3d8f 100644 --- a/pallets/utxo/Cargo.toml +++ b/pallets/utxo/Cargo.toml @@ -36,7 +36,7 @@ version = '0.1.0' [dependencies.codec] default-features = false -features = ['derive'] +features = ["derive", "chain-error"] package = 'parity-scale-codec' version = '2.0.0' diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 2f45daf..ec30915 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -56,7 +56,7 @@ pub mod pallet { use sp_core::{ sp_std::collections::btree_map::BTreeMap, sp_std::{convert::TryInto, str, vec}, - sr25519::{Public as SR25Pub, Signature as SR25Sig}, + sr25519::Public as SR25Pub, testing::SR25519, H256, H512, }; @@ -228,53 +228,54 @@ pub mod pallet { } } + /// Output of a transaction #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] - #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug, Hash)] - pub enum TxHeaderAndExtraData { - NormalTx { - // Normal token ID - id: TokenId, - }, - ConfidentialTx, /* not implemented yet */ - Nft { - id: TokenId, - data: Vec, - data_url: String, - creator_pkh: H256, - }, + #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] + pub struct TransactionOutput { + pub(crate) value: Value, + pub(crate) destination: Destination, + pub(crate) data: Option, } - impl TxHeaderAndExtraData { - pub fn id(&self) -> Option { - match self { - Self::NormalTx { id } => Some(*id), - Self::Nft { id, .. } => Some(*id), - _ => None, - } - } - - pub fn is_normal(&self) -> bool { - match self { - Self::NormalTx { .. } => true, - _ => false, - } - } - - pub fn is_nft(&self) -> bool { - match self { - Self::Nft { .. } => true, - _ => true, - } - } + #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] + #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] + pub enum TxData { + // TokenTransfer data to another user. If it is a token, then the token data must also be transferred to the recipient. + #[codec(index = 1)] + TokenTransferV1 { token_id: TokenId, amount: Value }, + // A new token creation + #[codec(index = 2)] + TokenIssuanceV1 { + token_id: TokenId, + token_ticker: Vec, + amount_to_issue: Value, + // Should be not more than 18 numbers + number_of_decimals: u8, + metadata_URI: Vec, + }, + // Burning a token or NFT + #[codec(index = 3)] + TokenBurnV1 { + token_id: TokenId, + amount_to_burn: Value, + }, + // A new NFT creation + #[codec(index = 4)] + NftMintV1 { + token_id: TokenId, + data_hash: NftDataHash, + metadata_URI: Vec, + }, } - /// Output of a transaction #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] - pub struct TransactionOutput { - pub(crate) header: TxHeaderAndExtraData, - pub(crate) value: Value, - pub(crate) destination: Destination, + pub enum NftDataHash { + #[codec(index = 1)] + Hash32([u8; 32]), + #[codec(index = 2)] + Raw(Vec), + // Or any type that you want to implement } impl TransactionOutput { @@ -285,36 +286,50 @@ pub mod pallet { pub fn new_pubkey(value: Value, pubkey: H256) -> Self { let pubkey = sp_core::sr25519::Public::from_h256(pubkey); Self { - header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, value, destination: Destination::Pubkey(pubkey.into()), + data: None, } } /// Create a new output to create a smart contract. pub fn new_create_pp(value: Value, code: Vec, data: Vec) -> Self { Self { - header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, value, destination: Destination::CreatePP(code, data), + data: None, } } /// Create a new output to call a smart contract routine. pub fn new_call_pp(value: Value, dest_account: AccountId, input: Vec) -> Self { Self { - header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, value, destination: Destination::CallPP(dest_account, input), + data: None, } } - pub fn new_token(id: TokenId, value: Value, pubkey: H256) -> Self { + pub fn new_token( + token_id: TokenId, + token_ticker: Vec, + amount_to_issue: Value, + number_of_decimals: u8, + metadata_URI: Vec, + pubkey: H256, + ) -> Self { let pubkey = sp_core::sr25519::Public::from_h256(pubkey); Self { - value, - header: TxHeaderAndExtraData::NormalTx { id }, + value: 0, destination: Destination::Pubkey(pubkey), + data: Some(TxData::TokenIssuanceV1 { + token_id, + token_ticker, + amount_to_issue, + // Should be not more than 18 numbers + number_of_decimals, + metadata_URI, + }), } } @@ -322,22 +337,17 @@ pub mod pallet { let pubkey = sp_core::sr25519::Public::from_h256(creator); Self { value: 0, - header: TxHeaderAndExtraData::Nft { - id, - data, - creator_pkh: creator.clone(), - data_url, - }, destination: Destination::Pubkey(pubkey), + data: None, } } /// Create a new output to given script hash. pub fn new_script_hash(value: Value, hash: H256) -> Self { Self { - header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, value, destination: Destination::ScriptHash(hash), + data: None, } } } @@ -385,7 +395,6 @@ pub mod pallet { #[pallet::storage] #[pallet::getter(fn token_list)] - // todo: Soon it will turn into StorageMap pub(super) type TokenList = StorageMap<_, Identity, TokenId, Option, ValueQuery>; @@ -545,12 +554,27 @@ pub mod pallet { .inputs .iter() .filter_map(|input| >::get(&input.outpoint)) - .filter_map(|output| { - if let TxHeaderAndExtraData::NormalTx { id } = &output.header { - Some((*id, output)) - } else { - None - } + .filter_map(|output| match output.data { + Some(ref data) => match data { + TxData::TokenTransferV1 { token_id, amount } => Some((*token_id, output)), + TxData::TokenIssuanceV1 { + token_id, + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_URI, + } => Some((*token_id, output)), + TxData::TokenBurnV1 { .. } => { + // frame_support::fail!("Token gone forever, we can't use it anymore").ok(); + None + } + TxData::NftMintV1 { + token_id, + data_hash, + metadata_URI, + } => Some((*token_id, output)), + }, + None => Some((H256::zero(), output)), }) .collect(); @@ -561,18 +585,26 @@ pub mod pallet { .outputs .iter() .filter_map(|output| { - if let TxHeaderAndExtraData::NormalTx { id } = &output.header { - Some((*id, output.value)) - } else { - None + match output.data { + Some(TxData::TokenTransferV1 { token_id, amount }) => Some((token_id, amount)), + Some(TxData::TokenIssuanceV1 { + token_id, + amount_to_issue, + .. + }) => Some((token_id, amount_to_issue)), + Some(TxData::NftMintV1 { token_id, .. }) => Some((token_id, 1)), + // Token gone forever, we can't use it anymore + Some(TxData::TokenBurnV1 { .. }) => None, + None => Some((H256::zero(), output.value)), } }) .collect(); // Check for token creation for output in tx.outputs.iter() { - let tid = match output.header { - TxHeaderAndExtraData::NormalTx { id } => id, + let tid = match output.data { + Some(TxData::TokenTransferV1 { token_id, .. }) => token_id, + Some(TxData::TokenIssuanceV1 { token_id, .. }) => token_id, _ => continue, }; // If we have input and output for the same token it's not a problem @@ -751,62 +783,63 @@ pub mod pallet { token_ticker: String, supply: Value, ) -> Result> { - ensure!(token_name.len() <= 25, Error::::Unapproved); - ensure!(token_ticker.len() <= 5, Error::::Unapproved); - ensure!(!supply.is_zero(), Error::::MinBalanceZero); - - // Input with MLT FEE - let fee = UtxoStore::::get(input_for_fee.outpoint).ok_or(Error::::Unapproved)?.value; - ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); - - // Save in UTXO - let instance = crate::TokenInstance::new_normal( - BlakeTwo256::hash_of(&(&token_name, &token_ticker)), - token_name, - token_ticker, - supply, - ); - let token_id = *instance.id(); - - ensure!( - !>::contains_key(instance.id()), - Error::::InUse - ); - - let mut tx = Transaction { - inputs: crate::vec![ - // Fee an input equal 100 MLT - input_for_fee, - ], - outputs: crate::vec![ - // Output a new tokens - TransactionOutput::new_token(*instance.id(), supply, public), - ], - }; - - // We shall make an output to return odd funds - if fee > Mlt(100).to_munit() { - tx.outputs.push(TransactionOutput::new_pubkey( - fee - Mlt(100).to_munit(), - public, - )); - } - - let sig = crypto::sr25519_sign( - SR25519, - &sp_core::sr25519::Public::from_h256(public), - &tx.encode(), - ) - .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; - for i in 0..tx.inputs.len() { - tx.inputs[i].witness = sig.0.to_vec(); - } - // Success - spend::(caller, &tx)?; - - // Save in Store - >::insert(token_id, Some(instance)); - Ok(token_id) + // ensure!(token_name.len() <= 25, Error::::Unapproved); + // ensure!(token_ticker.len() <= 5, Error::::Unapproved); + // ensure!(!supply.is_zero(), Error::::MinBalanceZero); + // + // // Input with MLT FEE + // let fee = UtxoStore::::get(input_for_fee.outpoint).ok_or(Error::::Unapproved)?.value; + // ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); + // + // // Save in UTXO + // let instance = crate::TokenInstance::new_normal( + // BlakeTwo256::hash_of(&(&token_name, &token_ticker)), + // token_name, + // token_ticker, + // supply, + // ); + // let token_id = *instance.id(); + // + // ensure!( + // !>::contains_key(instance.id()), + // Error::::InUse + // ); + // + // let mut tx = Transaction { + // inputs: crate::vec![ + // // Fee an input equal 100 MLT + // input_for_fee, + // ], + // outputs: crate::vec![ + // // Output a new tokens + // TransactionOutput::new_token(*instance.id(), supply, public), + // ], + // }; + // + // // We shall make an output to return odd funds + // if fee > Mlt(100).to_munit() { + // tx.outputs.push(TransactionOutput::new_pubkey( + // fee - Mlt(100).to_munit(), + // public, + // )); + // } + // + // let sig = crypto::sr25519_sign( + // SR25519, + // &sp_core::sr25519::Public::from_h256(public), + // &tx.encode(), + // ) + // .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; + // for i in 0..tx.inputs.len() { + // tx.inputs[i].witness = sig.0.to_vec(); + // } + // // Success + // spend::(caller, &tx)?; + // + // // Save in Store + // >::insert(token_id, Some(instance)); + // Ok(token_id) + unimplemented!(); } fn mint( @@ -1005,7 +1038,7 @@ pub mod pallet { value, destination: dest, // todo: We need to check what kind of token over here - header: TxHeaderAndExtraData::NormalTx { id: H256::zero() }, + data: None, }, TransactionOutput::new_pubkey(total - value, H256::from(pubkey_raw)), ], diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 35e2c98..3d5e6a5 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -350,56 +350,56 @@ fn test_script() { #[test] fn test_tokens() { - let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); - test_ext.execute_with(|| { - // Let's create a new test token - let token_id = BlakeTwo256::hash_of(&b"TEST"); - let supply = 1000; - // Let's make a tx for a new token: - // * We need at least one input for the fee and one output for a new token. - // * TokenID for a new token has to be unique. - let instance = TokenInstance::new_normal( - token_id, - b"New token test".to_vec(), - b"NTT".to_vec(), - supply, - ); - let mut first_tx = Transaction { - inputs: vec![ - // 100 MLT - tx_input_gen_no_signature(), - ], - outputs: vec![ - // 100 a new tokens - TransactionOutput::new_token(token_id, supply, H256::from(alice_pub_key)), - // 20 MLT to be paid as a fee, 80 MLT returning - TransactionOutput::new_pubkey(80, H256::from(alice_pub_key)), - ], - } - .sign_unchecked(&[utxo0], 0, &alice_pub_key); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), first_tx.clone())); - - // Store a new TokenInstance to the Storage - >::insert(token_id, Some(instance.clone())); - dbg!(&>::get(token_id)); - - // alice sends 1000 tokens to karl and the rest back to herself 10 tokens - let utxo_hash_mlt = first_tx.outpoint(1); - let utxo_hash_token = first_tx.outpoint(0); - let prev_utxos = [first_tx.outputs[1].clone(), first_tx.outputs[0].clone()]; - - let tx = Transaction { - inputs: vec![ - TransactionInput::new_empty(utxo_hash_mlt), - TransactionInput::new_empty(utxo_hash_token), - ], - outputs: vec![TransactionOutput::new_token(token_id, 10, H256::from(karl_pub_key))], - } - .sign_unchecked(&prev_utxos, 0, &alice_pub_key) - .sign_unchecked(&prev_utxos, 1, &alice_pub_key); - - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - }); + // let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + // test_ext.execute_with(|| { + // // Let's create a new test token + // let token_id = BlakeTwo256::hash_of(&b"TEST"); + // let supply = 1000; + // // Let's make a tx for a new token: + // // * We need at least one input for the fee and one output for a new token. + // // * TokenID for a new token has to be unique. + // let instance = TokenInstance::new_normal( + // token_id, + // b"New token test".to_vec(), + // b"NTT".to_vec(), + // supply, + // ); + // let mut first_tx = Transaction { + // inputs: vec![ + // // 100 MLT + // tx_input_gen_no_signature(), + // ], + // outputs: vec![ + // // 100 a new tokens + // TransactionOutput::new_token(token_id, supply, H256::from(alice_pub_key)), + // // 20 MLT to be paid as a fee, 80 MLT returning + // TransactionOutput::new_pubkey(80, H256::from(alice_pub_key)), + // ], + // } + // .sign_unchecked(&[utxo0], 0, &alice_pub_key); + // assert_ok!(Utxo::spend(Origin::signed(H256::zero()), first_tx.clone())); + // + // // Store a new TokenInstance to the Storage + // >::insert(token_id, Some(instance.clone())); + // dbg!(&>::get(token_id)); + // + // // alice sends 1000 tokens to karl and the rest back to herself 10 tokens + // let utxo_hash_mlt = first_tx.outpoint(1); + // let utxo_hash_token = first_tx.outpoint(0); + // let prev_utxos = [first_tx.outputs[1].clone(), first_tx.outputs[0].clone()]; + // + // let tx = Transaction { + // inputs: vec![ + // TransactionInput::new_empty(utxo_hash_mlt), + // TransactionInput::new_empty(utxo_hash_token), + // ], + // outputs: vec![TransactionOutput::new_token(token_id, 10, H256::from(karl_pub_key))], + // } + // .sign_unchecked(&prev_utxos, 0, &alice_pub_key) + // .sign_unchecked(&prev_utxos, 1, &alice_pub_key); + // + // assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + // }); } #[test] @@ -534,46 +534,46 @@ fn test_send_to_address() { #[test] fn nft_test() { - execute_with_alice(|alice_pub_key| { - // Let's create a new test nft - let nft_id = BlakeTwo256::hash_of(&b"TEST"); - let instance = TokenInstance::new_nft( - nft_id, - (*b"01010101010101010101010101010101").to_vec(), - b"http://facebook.com".to_vec(), - alice_pub_key.to_vec(), - ); - - if let TokenInstance::Nft { - id, - data, - data_url, - creator_pubkey, - .. - } = instance - { - let mut tx = Transaction { - inputs: vec![ - // 100 MLT - tx_input_gen_no_signature(), - ], - outputs: vec![TransactionOutput::new_nft( - id, - data.to_vec(), - data_url, - H256::from_slice(creator_pubkey.as_slice()), - )], - }; - let alice_sig = crypto::sr25519_sign(SR25519, &alice_pub_key, &tx.encode()).unwrap(); - tx.inputs[0].witness = alice_sig.0.to_vec(); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - } - - // it should allow to write and read ? - // let rsp = await dataToken.readData(firstTokenId); - // assert.equal(rsp, empty); - // await dataToken.writeData(firstTokenId, data); - // rsp = await dataToken.readData(firstTokenId); - // assert.equal(rsp, data); - }); + // execute_with_alice(|alice_pub_key| { + // // Let's create a new test nft + // let nft_id = BlakeTwo256::hash_of(&b"TEST"); + // let instance = TokenInstance::new_nft( + // nft_id, + // (*b"01010101010101010101010101010101").to_vec(), + // b"http://facebook.com".to_vec(), + // alice_pub_key.to_vec(), + // ); + // + // if let TokenInstance::Nft { + // id, + // data, + // data_url, + // creator_pubkey, + // .. + // } = instance + // { + // let mut tx = Transaction { + // inputs: vec![ + // // 100 MLT + // tx_input_gen_no_signature(), + // ], + // outputs: vec![TransactionOutput::new_nft( + // id, + // data.to_vec(), + // data_url, + // H256::from_slice(creator_pubkey.as_slice()), + // )], + // }; + // let alice_sig = crypto::sr25519_sign(SR25519, &alice_pub_key, &tx.encode()).unwrap(); + // tx.inputs[0].witness = alice_sig.0.to_vec(); + // assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + // } + // + // // it should allow to write and read ? + // // let rsp = await dataToken.readData(firstTokenId); + // // assert.equal(rsp, empty); + // // await dataToken.writeData(firstTokenId, data); + // // rsp = await dataToken.readData(firstTokenId); + // // assert.equal(rsp, data); + // }); } From 717600cc6aad8f42715c0ae47533e1a1b5cf688b Mon Sep 17 00:00:00 2001 From: sinitcin Date: Thu, 21 Oct 2021 19:29:01 +0300 Subject: [PATCH 11/53] I've been making refactoring, removed TokenList, added a new storage PointerToIssueToken Signed-off-by: sinitcin --- Cargo.lock | 17 --- node/Cargo.toml | 1 - node/src/chain_spec.rs | 2 +- pallets/utxo/Cargo.toml | 1 - pallets/utxo/rpc/Cargo.toml | 1 - pallets/utxo/rpc/runtime-api/Cargo.toml | 3 - pallets/utxo/src/lib.rs | 120 ++++++-------------- pallets/utxo/src/tests.rs | 6 +- pallets/utxo/src/tokens.rs | 59 ++++++++++ pallets/utxo/tokens/Cargo.toml | 31 ----- pallets/utxo/tokens/Readme.md | 24 ---- pallets/utxo/tokens/src/lib.rs | 145 ------------------------ runtime/Cargo.toml | 3 - 13 files changed, 96 insertions(+), 317 deletions(-) create mode 100644 pallets/utxo/src/tokens.rs delete mode 100644 pallets/utxo/tokens/Cargo.toml delete mode 100644 pallets/utxo/tokens/Readme.md delete mode 100644 pallets/utxo/tokens/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 55cb396..cd5272b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3387,7 +3387,6 @@ dependencies = [ "pallet-transaction-payment-rpc", "pallet-utxo-rpc", "pallet-utxo-rpc-runtime-api", - "pallet-utxo-tokens", "sc-authority-discovery", "sc-basic-authorship", "sc-cli", @@ -3664,7 +3663,6 @@ dependencies = [ "pallet-transaction-payment-rpc-runtime-api", "pallet-utxo", "pallet-utxo-rpc-runtime-api", - "pallet-utxo-tokens", "parity-scale-codec", "sp-api", "sp-block-builder", @@ -4138,7 +4136,6 @@ dependencies = [ "log", "pallet-aura", "pallet-timestamp", - "pallet-utxo-tokens", "parity-scale-codec", "pp-api", "serde", @@ -4160,7 +4157,6 @@ dependencies = [ "jsonrpc-core-client", "jsonrpc-derive", "pallet-utxo-rpc-runtime-api", - "pallet-utxo-tokens", "parity-scale-codec", "serde", "sp-api", @@ -4174,7 +4170,6 @@ name = "pallet-utxo-rpc-runtime-api" version = "0.1.0" dependencies = [ "frame-support", - "pallet-utxo-tokens", "parity-scale-codec", "serde", "serde_json", @@ -4183,18 +4178,6 @@ dependencies = [ "sp-runtime", ] -[[package]] -name = "pallet-utxo-tokens" -version = "0.1.0" -dependencies = [ - "frame-support", - "hex-literal 0.2.1", - "log", - "parity-scale-codec", - "serde", - "sp-core", -] - [[package]] name = "parity-db" version = "0.3.1" diff --git a/node/Cargo.toml b/node/Cargo.toml index 4a3ca5d..6f3a026 100644 --- a/node/Cargo.toml +++ b/node/Cargo.toml @@ -24,7 +24,6 @@ structopt = '0.3.8' node-template-runtime = {version = '3.0.0', path = '../runtime'} pallet-utxo-rpc = { path = "../pallets/utxo/rpc" } pallet-utxo-rpc-runtime-api = { path = "../pallets/utxo/rpc/runtime-api" } -pallet-utxo-tokens = { path = "../pallets/utxo/tokens" } log = "0.4.8" ureq = "2.2.0" diff --git a/node/src/chain_spec.rs b/node/src/chain_spec.rs index cec7851..c35de75 100644 --- a/node/src/chain_spec.rs +++ b/node/src/chain_spec.rs @@ -164,7 +164,7 @@ fn testnet_genesis( .map(|x| { // may need to create a const variable to represent 1_000 and 100_000_000 pallet_utxo::TransactionOutput::new_pubkey( - 1_000 * 100_000_000 * 400_000_000 as pallet_utxo::Value, + 1_000 * 100_000_000 * 400_000_000 as pallet_utxo::tokens::Value, H256::from_slice(x.as_slice()), ) }) diff --git a/pallets/utxo/Cargo.toml b/pallets/utxo/Cargo.toml index 65b3d8f..490a2c0 100644 --- a/pallets/utxo/Cargo.toml +++ b/pallets/utxo/Cargo.toml @@ -21,7 +21,6 @@ std = [ hex-literal = "0.2.1" log = "0.4.8" serde = '1.0.119' -pallet-utxo-tokens = { path = "./tokens" } variant_count = '1.1' [dependencies.bech32] diff --git a/pallets/utxo/rpc/Cargo.toml b/pallets/utxo/rpc/Cargo.toml index bc19661..21bc854 100644 --- a/pallets/utxo/rpc/Cargo.toml +++ b/pallets/utxo/rpc/Cargo.toml @@ -6,7 +6,6 @@ edition = "2018" [dependencies] pallet-utxo-rpc-runtime-api = { path = "./runtime-api" } -pallet-utxo-tokens = {path = "../tokens"} jsonrpc-core = "18.0.0" jsonrpc-core-client = "18.0.0" jsonrpc-derive = "18.0.0" diff --git a/pallets/utxo/rpc/runtime-api/Cargo.toml b/pallets/utxo/rpc/runtime-api/Cargo.toml index ef3d2f6..02587b7 100644 --- a/pallets/utxo/rpc/runtime-api/Cargo.toml +++ b/pallets/utxo/rpc/runtime-api/Cargo.toml @@ -4,9 +4,6 @@ version = "0.1.0" authors = ["RBB Lab"] edition = "2018" -[dependencies] -pallet-utxo-tokens = { path = "../../tokens" } - [dependencies.serde] version = "1.0.104" optional = true diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index ec30915..a97bfe3 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -19,22 +19,21 @@ pub use pallet::*; -#[cfg(test)] -mod mock; - -#[cfg(test)] -mod tests; - #[cfg(feature = "runtime-benchmarks")] mod benchmarking; - +#[cfg(test)] +mod mock; mod script; mod sign; +#[cfg(test)] +mod tests; +pub mod tokens; pub mod weights; #[frame_support::pallet] pub mod pallet { use crate::sign::{self, Scheme}; + use crate::tokens::{Mlt, TokenId, TxData, Value}; use bech32; use chainscript::Script; use codec::{Decode, Encode}; @@ -49,7 +48,6 @@ pub mod pallet { }; use frame_system::pallet_prelude::*; use hex_literal::hex; - use pallet_utxo_tokens::TokenInstance; use pp_api::ProgrammablePoolApi; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -65,17 +63,6 @@ pub mod pallet { }; use sp_runtime::DispatchErrorWithPostInfo; - pub type TokenId = H256; - pub type Value = u128; - pub type String = Vec; - - pub struct Mlt(Value); - impl Mlt { - pub fn to_munit(&self) -> Value { - self.0 * 1_000 * 100_000_000 - } - } - #[pallet::error] pub enum Error { /// Account balance must be greater than or equal to the transfer amount. @@ -237,47 +224,6 @@ pub mod pallet { pub(crate) data: Option, } - #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] - #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] - pub enum TxData { - // TokenTransfer data to another user. If it is a token, then the token data must also be transferred to the recipient. - #[codec(index = 1)] - TokenTransferV1 { token_id: TokenId, amount: Value }, - // A new token creation - #[codec(index = 2)] - TokenIssuanceV1 { - token_id: TokenId, - token_ticker: Vec, - amount_to_issue: Value, - // Should be not more than 18 numbers - number_of_decimals: u8, - metadata_URI: Vec, - }, - // Burning a token or NFT - #[codec(index = 3)] - TokenBurnV1 { - token_id: TokenId, - amount_to_burn: Value, - }, - // A new NFT creation - #[codec(index = 4)] - NftMintV1 { - token_id: TokenId, - data_hash: NftDataHash, - metadata_URI: Vec, - }, - } - - #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] - #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] - pub enum NftDataHash { - #[codec(index = 1)] - Hash32([u8; 32]), - #[codec(index = 2)] - Raw(Vec), - // Or any type that you want to implement - } - impl TransactionOutput { /// By default the header is 0: /// token type for both the value and fee is MLT, @@ -333,7 +279,7 @@ pub mod pallet { } } - pub fn new_nft(id: TokenId, data: Vec, data_url: String, creator: H256) -> Self { + pub fn new_nft(id: TokenId, data: Vec, data_url: Vec, creator: H256) -> Self { let pubkey = sp_core::sr25519::Public::from_h256(creator); Self { value: 0, @@ -393,11 +339,6 @@ pub mod pallet { #[allow(type_alias_bounds)] pub type TransactionFor = Transaction; - #[pallet::storage] - #[pallet::getter(fn token_list)] - pub(super) type TokenList = - StorageMap<_, Identity, TokenId, Option, ValueQuery>; - #[pallet::storage] #[pallet::getter(fn reward_total)] pub(super) type RewardTotal = StorageValue<_, Value, ValueQuery>; @@ -407,12 +348,17 @@ pub mod pallet { pub(super) type UtxoStore = StorageMap<_, Identity, H256, Option>, ValueQuery>; + #[pallet::storage] + #[pallet::getter(fn pointer_to_issue_token)] + pub(super) type PointerToIssueToken = + StorageMap<_, Identity, TokenId, /* UTXO */ H256, ValueQuery>; + #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { TokenCreated(H256, T::AccountId), - Minted(H256, T::AccountId, String), + Minted(H256, T::AccountId, Vec), TransactionSuccess(TransactionFor), } @@ -423,6 +369,11 @@ pub mod pallet { } } + pub(crate) fn get_utxo_by_tid(token_id: TokenId) -> Option> { + let utxo_id = PointerToIssueToken::::get(token_id); + UtxoStore::::get(utxo_id) + } + // Strips a transaction of its Signature fields by replacing value with ZERO-initialized fixed hash. pub fn get_simple_transaction( tx: &Transaction, @@ -613,7 +564,7 @@ pub mod pallet { } else { // But when we don't have an input for token but token id exist in TokenList ensure!( - !>::contains_key(tid), + !>::contains_key(tid), "no inputs for the token id" ); } @@ -779,8 +730,8 @@ pub mod pallet { caller: &T::AccountId, public: H256, input_for_fee: TransactionInput, - token_name: String, - token_ticker: String, + token_name: Vec, + token_ticker: Vec, supply: Value, ) -> Result> { // ensure!(token_name.len() <= 25, Error::::Unapproved); @@ -845,7 +796,7 @@ pub mod pallet { fn mint( caller: &T::AccountId, creator_pubkey: sp_core::sr25519::Public, - data_url: String, + data_url: Vec, data: Vec, ) -> Result> { /* let (fee, inputs_hashes) = pick_utxo::(caller, Mlt(100).to_munit()); @@ -954,8 +905,8 @@ pub mod pallet { origin: OriginFor, public: H256, input_for_fee: TransactionInput, - token_name: String, - token_ticker: String, + token_name: Vec, + token_ticker: Vec, supply: Value, ) -> DispatchResultWithPostInfo { let caller = &ensure_signed(origin)?; @@ -978,7 +929,7 @@ pub mod pallet { pub fn mint( origin: OriginFor, creator_pubkey: sp_core::sr25519::Public, - data_url: String, + data_url: Vec, data: Vec, ) -> DispatchResultWithPostInfo { let caller = &ensure_signed(origin)?; @@ -1080,13 +1031,11 @@ pub mod pallet { } } -use pallet_utxo_tokens::{TokenInstance, TokenListData}; - use frame_support::inherent::Vec; use frame_support::pallet_prelude::DispatchResultWithPostInfo; use sp_core::{ crypto::UncheckedFrom, - {H256, H512}, + Encode, {H256, H512}, }; use sp_runtime::sp_std::vec; use utxo_api::UtxoApi; @@ -1096,16 +1045,13 @@ impl crate::Pallet { 1337 } - pub fn tokens_list() -> TokenListData { - >::iter() - .enumerate() - .filter_map(|(_, instance)| instance.1) - .collect() - } - - pub fn nft_read(id: H256) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)> { - match TokenList::::get(id)? { - TokenInstance::Nft { data, data_url, .. } => Some((data_url, data.to_vec())), + pub fn nft_read(nft_id: H256) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)> { + match crate::pallet::get_utxo_by_tid::(nft_id)?.data { + Some(crate::tokens::TxData::NftMintV1 { + token_id, + data_hash, + metadata_URI, + }) => Some((metadata_URI, data_hash.encode())), _ => None, } } diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 3d5e6a5..c9a72bf 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -16,8 +16,8 @@ // Author(s): C. Yap use crate::{ - mock::*, Destination, RewardTotal, TokenList, Transaction, TransactionInput, TransactionOutput, - UtxoStore, Value, + mock::*, tokens::Value, Destination, RewardTotal, Transaction, TransactionInput, + TransactionOutput, UtxoStore, }; use chainscript::{opcodes::all as opc, Builder}; use codec::Encode; @@ -26,7 +26,7 @@ use frame_support::{ sp_io::crypto, sp_runtime::traits::{BlakeTwo256, Hash}, }; -use pallet_utxo_tokens::TokenInstance; + use sp_core::{sp_std::vec, sr25519::Public, testing::SR25519, H256, H512}; fn tx_input_gen_no_signature() -> (TransactionOutput, TransactionInput) { diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs new file mode 100644 index 0000000..b53300b --- /dev/null +++ b/pallets/utxo/src/tokens.rs @@ -0,0 +1,59 @@ +#![cfg_attr(not(feature = "std"), no_std)] + +use codec::{Decode, Encode}; +use frame_support::{dispatch::Vec, RuntimeDebug}; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; +use sp_core::{sr25519::Public, H256}; + +pub type Value = u128; + +pub struct Mlt(Value); +impl Mlt { + pub fn to_munit(&self) -> Value { + self.0 * 1_000 * 100_000_000 + } +} + +pub type TokenId = H256; + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +pub enum TxData { + // TokenTransfer data to another user. If it is a token, then the token data must also be transferred to the recipient. + #[codec(index = 1)] + TokenTransferV1 { token_id: TokenId, amount: u128 }, + // A new token creation + #[codec(index = 2)] + TokenIssuanceV1 { + token_id: TokenId, + token_ticker: Vec, + amount_to_issue: u128, + // Should be not more than 18 numbers + number_of_decimals: u8, + metadata_URI: Vec, + }, + // Burning a token or NFT + #[codec(index = 3)] + TokenBurnV1 { + token_id: TokenId, + amount_to_burn: u128, + }, + // A new NFT creation + #[codec(index = 4)] + NftMintV1 { + token_id: TokenId, + data_hash: NftDataHash, + metadata_URI: Vec, + }, +} + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +pub enum NftDataHash { + #[codec(index = 1)] + Hash32([u8; 32]), + #[codec(index = 2)] + Raw(Vec), + // Or any type that you want to implement +} diff --git a/pallets/utxo/tokens/Cargo.toml b/pallets/utxo/tokens/Cargo.toml deleted file mode 100644 index f97a4da..0000000 --- a/pallets/utxo/tokens/Cargo.toml +++ /dev/null @@ -1,31 +0,0 @@ -[package] -name = "pallet-utxo-tokens" -version = "0.1.0" -authors = ["RBB Lab"] -edition = "2018" - -[dependencies] -hex-literal = "0.2.1" -log = "0.4.8" - -[dependencies.frame-support] -default-features = false -git = 'https://github.com/paritytech/substrate.git' -version = '4.0.0-dev' -branch = "master" - -[dependencies.serde] -version = "1.0.104" -features = ["derive"] - -[dependencies.codec] -package = "parity-scale-codec" -version = "2.0.0" -default-features = false -features = ["derive"] - -[dependencies.sp-core] -default-features = false -git = 'https://github.com/paritytech/substrate.git' -version = '4.0.0-dev' -branch = "master" \ No newline at end of file diff --git a/pallets/utxo/tokens/Readme.md b/pallets/utxo/tokens/Readme.md deleted file mode 100644 index 412d360..0000000 --- a/pallets/utxo/tokens/Readme.md +++ /dev/null @@ -1,24 +0,0 @@ -# Token creation - -Call the extrinsic: -```bash -* Creator - Alice -* Pubkey - 0x2e1e60ac02d5a716b300e83b04bb4ddd48360ea119f5024f0ea7b2b1c1578a52 -* Input - we will take Fee over here -* Token name - any value -* Token ticker - any value -* Supply - any value -``` - -# Request the tokens list - -Call the RPC: - -```bash -curl http://localhost:9933 -H "Content-Type:application/json;charset=utf-8" -d '{ - "jsonrpc":"2.0", - "id":1, - "method":"tokens_list", - "params": [] -}' -``` \ No newline at end of file diff --git a/pallets/utxo/tokens/src/lib.rs b/pallets/utxo/tokens/src/lib.rs deleted file mode 100644 index 569d851..0000000 --- a/pallets/utxo/tokens/src/lib.rs +++ /dev/null @@ -1,145 +0,0 @@ -#![cfg_attr(not(feature = "std"), no_std)] - -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; - -use codec::{Decode, Encode}; -use frame_support::{dispatch::Vec, RuntimeDebug}; -use sp_core::{sr25519::Public, H256}; - -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash)] -pub struct NftDataRaw { - inner: Vec, -} - -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash)] -pub enum NftData { - Hash32([u8; 32]), - Hash64([u8; 64]), - Raw(Vec), - // Or any type that you want to implement -} - -impl NftDataRaw { - pub fn new(data: Vec) -> NftDataRaw { - Self { inner: data } - } - - pub fn into_data(&mut self) -> Option { - NftData::decode(&mut self.as_slice()).ok() - } - - pub fn to_vec(&self) -> Vec { - self.inner.clone() - } - - pub fn as_slice(&self) -> &[u8] { - self.inner.as_slice() - } -} - -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash)] -pub struct NftOwnerRaw { - inner: Vec, -} - -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] -pub enum NftOwner { - Sr25519(Public), - Raw(Vec), - // Or any type that you want to implement -} - -impl NftOwnerRaw { - pub fn new(data: Vec) -> Self { - Self { inner: data } - } - - pub fn into_data(&mut self) -> Option { - NftOwner::decode(&mut self.as_slice()).ok() - } - - pub fn to_vec(&self) -> Vec { - self.inner.clone() - } - - pub fn as_slice(&self) -> &[u8] { - self.inner.as_slice() - } -} - -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash)] -pub enum TokenInstance { - Normal { - id: H256, - version: u16, - name: Vec, - ticker: Vec, - supply: u128, - // We can add another fields like: - // pub number_format: NumberFormat, - // pub image: UUID, - // pub transaction: XXX, - }, - Nft { - id: H256, - version: u16, - data: NftDataRaw, - data_url: Vec, - creator_pubkey: NftOwnerRaw, - }, -} - -impl Default for TokenInstance { - fn default() -> Self { - Self::Normal { - id: H256::zero(), - version: 0, - name: Vec::new(), - ticker: Vec::new(), - supply: 0, - } - } -} - -impl TokenInstance { - pub fn new_normal(id: H256, name: Vec, ticker: Vec, supply: u128) -> Self { - Self::Normal { - id, - version: 0, - name, - ticker, - supply, - } - } - pub fn new_nft(id: H256, data: Vec, data_url: Vec, creator_pubkey: Vec) -> Self { - Self::Nft { - id, - version: 0, - data: NftDataRaw::new(data), - data_url, - creator_pubkey: NftOwnerRaw::new(creator_pubkey), - } - } - - pub fn id(&self) -> &H256 { - match self { - Self::Normal { id, .. } => id, - Self::Nft { id, .. } => id, - } - } - - pub fn version(&self) -> u16 { - *match self { - Self::Normal { version, .. } => version, - Self::Nft { version, .. } => version, - } - } -} - -pub type TokenListData = Vec; diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 6ab2f14..5e7a413 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -15,9 +15,6 @@ git = 'https://github.com/paritytech/substrate.git' version = '5.0.0-dev' branch = "master" -[dependencies] -pallet-utxo-tokens = { path = "../pallets/utxo/tokens" } - [dependencies.codec] default-features = false features = ['derive'] From 216a5eea839b101662f8141e8292c2356f6e0ba4 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Sun, 24 Oct 2021 23:34:52 +0300 Subject: [PATCH 12/53] Added draft of TransactionVerifier. Value in the PointerToIssueToken now is an OptionQuery. Signed-off-by: sinitcin --- pallets/utxo/src/base58_nostd.rs | 229 ++++++++++ pallets/utxo/src/lib.rs | 743 +++++++++++++++---------------- pallets/utxo/src/tokens.rs | 107 ++++- pallets/utxo/src/verifier.rs | 143 ++++++ 4 files changed, 823 insertions(+), 399 deletions(-) create mode 100644 pallets/utxo/src/base58_nostd.rs create mode 100644 pallets/utxo/src/verifier.rs diff --git a/pallets/utxo/src/base58_nostd.rs b/pallets/utxo/src/base58_nostd.rs new file mode 100644 index 0000000..fd2a58a --- /dev/null +++ b/pallets/utxo/src/base58_nostd.rs @@ -0,0 +1,229 @@ +//! Base58-to-text encoding +//! +//! Based on https://github.com/trezor/trezor-crypto/blob/master/base58.c +//! commit hash: c6e7d37 +//! works only up to 128 bytes + +const ALPHABET: &'static [u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; + +const B58_DIGITS_MAP: &'static [i8] = &[ + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, -1, -1, -1, -1, -1, -1, -1, 9, 10, 11, 12, 13, 14, 15, 16, -1, + 17, 18, 19, 20, 21, -1, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, -1, -1, -1, -1, -1, -1, 33, + 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, -1, -1, -1, -1, -1, +]; + +/// Errors that can occur when decoding base58 encoded string. +#[derive(Debug, PartialEq)] +pub enum FromBase58Error { + /// The input contained a character which is not a part of the base58 format. + InvalidBase58Character(char, usize), + /// The input had invalid length. + InvalidBase58Length, +} + +/// A trait for converting a value to base58 encoded string. +pub trait ToBase58 { + /// Converts a value of `self` to a base58 value, returning the owned string. + fn to_base58(&self) -> Vec; +} + +/// A trait for converting base58 encoded values. +pub trait FromBase58 { + /// Convert a value of `self`, interpreted as base58 encoded data, into an owned vector of bytes, returning a vector. + fn from_base58(&self) -> Result, FromBase58Error>; +} + +impl ToBase58 for [u8] { + fn to_base58(&self) -> Vec { + let zcount = self.iter().take_while(|x| **x == 0).count(); + let size = (self.len() - zcount) * 138 / 100 + 1; + let mut buffer = vec![0u8; size]; + + let mut i = zcount; + let mut high = size - 1; + + while i < self.len() { + let mut carry = self[i] as u32; + let mut j = size - 1; + + while j > high || carry != 0 { + carry += 256 * buffer[j] as u32; + buffer[j] = (carry % 58) as u8; + carry /= 58; + + // in original trezor implementation it was underflowing + if j > 0 { + j -= 1; + } + } + + i += 1; + high = j; + } + + let mut j = buffer.iter().take_while(|x| **x == 0).count(); + + let mut result = Vec::new(); + for _ in 0..zcount { + result.push(b'1'); + } + + while j < size { + result.push(ALPHABET[buffer[j] as usize]); + j += 1; + } + + result + } +} + +impl FromBase58 for str { + fn from_base58(&self) -> Result, FromBase58Error> { + let mut bin = [0u8; 132]; + let mut out = [0u32; (132 + 3) / 4]; + let bytesleft = (bin.len() % 4) as u8; + let zeromask = match bytesleft { + 0 => 0u32, + _ => 0xffffffff << (bytesleft * 8), + }; + + let zcount = self.chars().take_while(|x| *x == '1').count(); + let mut i = zcount; + let b58: Vec = self.bytes().collect(); + + while i < self.len() { + if (b58[i] & 0x80) != 0 { + // High-bit set on invalid digit + return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); + } + + if B58_DIGITS_MAP[b58[i] as usize] == -1 { + // // Invalid base58 digit + return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); + } + + let mut c = B58_DIGITS_MAP[b58[i] as usize] as u64; + let mut j = out.len(); + while j != 0 { + j -= 1; + let t = out[j] as u64 * 58 + c; + c = (t & 0x3f00000000) >> 32; + out[j] = (t & 0xffffffff) as u32; + } + + if c != 0 { + // Output number too big (carry to the next int32) + return Err(FromBase58Error::InvalidBase58Length); + } + + if (out[0] & zeromask) != 0 { + // Output number too big (last int32 filled too far) + return Err(FromBase58Error::InvalidBase58Length); + } + + i += 1; + } + + let mut i = 1; + let mut j = 0; + + bin[0] = match bytesleft { + 3 => ((out[0] & 0xff0000) >> 16) as u8, + 2 => ((out[0] & 0xff00) >> 8) as u8, + 1 => { + j = 1; + (out[0] & 0xff) as u8 + } + _ => { + i = 0; + bin[0] + } + }; + + while j < out.len() { + bin[i] = ((out[j] >> 0x18) & 0xff) as u8; + bin[i + 1] = ((out[j] >> 0x10) & 0xff) as u8; + bin[i + 2] = ((out[j] >> 8) & 0xff) as u8; + bin[i + 3] = ((out[j] >> 0) & 0xff) as u8; + i += 4; + j += 1; + } + + let leading_zeros = bin.iter().take_while(|x| **x == 0).count(); + Ok(bin[leading_zeros - zcount..].to_vec()) + } +} + +#[cfg(test)] +mod tests { + use super::{FromBase58, ToBase58}; + + #[test] + fn test_from_base58_basic() { + assert_eq!("".from_base58().unwrap(), b""); + assert_eq!("Z".from_base58().unwrap(), &[32]); + assert_eq!("n".from_base58().unwrap(), &[45]); + assert_eq!("q".from_base58().unwrap(), &[48]); + assert_eq!("r".from_base58().unwrap(), &[49]); + assert_eq!("z".from_base58().unwrap(), &[57]); + assert_eq!("4SU".from_base58().unwrap(), &[45, 49]); + assert_eq!("4k8".from_base58().unwrap(), &[49, 49]); + assert_eq!("ZiCa".from_base58().unwrap(), &[97, 98, 99]); + assert_eq!("3mJr7AoUXx2Wqd".from_base58().unwrap(), b"1234598760"); + assert_eq!( + "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f".from_base58().unwrap(), + b"abcdefghijklmnopqrstuvwxyz" + ); + } + + #[test] + fn test_from_base58_invalid_char() { + assert!("0".from_base58().is_err()); + assert!("O".from_base58().is_err()); + assert!("I".from_base58().is_err()); + assert!("l".from_base58().is_err()); + assert!("3mJr0".from_base58().is_err()); + assert!("O3yxU".from_base58().is_err()); + assert!("3sNI".from_base58().is_err()); + assert!("4kl8".from_base58().is_err()); + assert!("s!5<".from_base58().is_err()); + assert!("t$@mX<*".from_base58().is_err()); + } + + #[test] + fn test_from_base58_initial_zeros() { + assert_eq!("1ZiCa".from_base58().unwrap(), b"\0abc"); + assert_eq!("11ZiCa".from_base58().unwrap(), b"\0\0abc"); + assert_eq!("111ZiCa".from_base58().unwrap(), b"\0\0\0abc"); + assert_eq!("1111ZiCa".from_base58().unwrap(), b"\0\0\0\0abc"); + } + + #[test] + fn test_to_base58_basic() { + assert_eq!(b"".to_base58(), ""); + assert_eq!(&[32].to_base58(), "Z"); + assert_eq!(&[45].to_base58(), "n"); + assert_eq!(&[48].to_base58(), "q"); + assert_eq!(&[49].to_base58(), "r"); + assert_eq!(&[57].to_base58(), "z"); + assert_eq!(&[45, 49].to_base58(), "4SU"); + assert_eq!(&[49, 49].to_base58(), "4k8"); + assert_eq!(b"abc".to_base58(), "ZiCa"); + assert_eq!(b"1234598760".to_base58(), "3mJr7AoUXx2Wqd"); + assert_eq!( + b"abcdefghijklmnopqrstuvwxyz".to_base58(), + "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f" + ); + } + + #[test] + fn test_to_base58_initial_zeros() { + assert_eq!(b"\0abc".to_base58(), "1ZiCa"); + assert_eq!(b"\0\0abc".to_base58(), "11ZiCa"); + assert_eq!(b"\0\0\0abc".to_base58(), "111ZiCa"); + assert_eq!(b"\0\0\0\0abc".to_base58(), "1111ZiCa"); + } +} diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index a97bfe3..4cb5c6a 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -19,6 +19,7 @@ pub use pallet::*; +mod base58_nostd; #[cfg(feature = "runtime-benchmarks")] mod benchmarking; #[cfg(test)] @@ -28,17 +29,19 @@ mod sign; #[cfg(test)] mod tests; pub mod tokens; +pub mod verifier; pub mod weights; #[frame_support::pallet] pub mod pallet { - use crate::sign::{self, Scheme}; - use crate::tokens::{Mlt, TokenId, TxData, Value}; + // use crate::sign::{self, Scheme}; + use crate::tokens::{/*Mlt,*/ OutputData, TokenId, Value}; + use crate::verifier::TransactionVerifier; use bech32; use chainscript::Script; use codec::{Decode, Encode}; use core::marker::PhantomData; - use frame_support::weights::PostDispatchInfo; + // use frame_support::weights::PostDispatchInfo; use frame_support::{ dispatch::{DispatchResultWithPostInfo, Vec}, pallet_prelude::*, @@ -52,16 +55,15 @@ pub mod pallet { #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{ - sp_std::collections::btree_map::BTreeMap, + // sp_std::collections::btree_map::BTreeMap, sp_std::{convert::TryInto, str, vec}, - sr25519::Public as SR25Pub, + sr25519, testing::SR25519, - H256, H512, + H256, + H512, }; - use sp_runtime::traits::{ - AtLeast32Bit, Zero, /*, StaticLookup , AtLeast32BitUnsigned, Member, One */ - }; - use sp_runtime::DispatchErrorWithPostInfo; + use sp_runtime::traits::AtLeast32Bit; + // use sp_runtime::DispatchErrorWithPostInfo; #[pallet::error] pub enum Error { @@ -189,7 +191,7 @@ pub mod pallet { #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] pub enum Destination { /// Plain pay-to-pubkey - Pubkey(SR25Pub), + Pubkey(sr25519::Public), /// Pay to fund a new programmable pool. Takes code and data. CreatePP(Vec, Vec), /// Pay to an existing contract. Takes a destination account and input data. @@ -221,7 +223,7 @@ pub mod pallet { pub struct TransactionOutput { pub(crate) value: Value, pub(crate) destination: Destination, - pub(crate) data: Option, + pub(crate) data: Option, } impl TransactionOutput { @@ -256,38 +258,6 @@ pub mod pallet { } } - pub fn new_token( - token_id: TokenId, - token_ticker: Vec, - amount_to_issue: Value, - number_of_decimals: u8, - metadata_URI: Vec, - pubkey: H256, - ) -> Self { - let pubkey = sp_core::sr25519::Public::from_h256(pubkey); - Self { - value: 0, - destination: Destination::Pubkey(pubkey), - data: Some(TxData::TokenIssuanceV1 { - token_id, - token_ticker, - amount_to_issue, - // Should be not more than 18 numbers - number_of_decimals, - metadata_URI, - }), - } - } - - pub fn new_nft(id: TokenId, data: Vec, data_url: Vec, creator: H256) -> Self { - let pubkey = sp_core::sr25519::Public::from_h256(creator); - Self { - value: 0, - destination: Destination::Pubkey(pubkey), - data: None, - } - } - /// Create a new output to given script hash. pub fn new_script_hash(value: Value, hash: H256) -> Self { Self { @@ -316,7 +286,7 @@ pub mod pallet { mut self, utxos: &[TransactionOutput], index: usize, - pk: &SR25Pub, + pk: &sr25519::Public, ) -> Option { let msg = crate::sign::TransactionSigMsg::construct( Default::default(), @@ -351,7 +321,7 @@ pub mod pallet { #[pallet::storage] #[pallet::getter(fn pointer_to_issue_token)] pub(super) type PointerToIssueToken = - StorageMap<_, Identity, TokenId, /* UTXO */ H256, ValueQuery>; + StorageMap<_, Identity, TokenId, /* UTXO */ H256, OptionQuery>; #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] @@ -369,8 +339,10 @@ pub mod pallet { } } - pub(crate) fn get_utxo_by_tid(token_id: TokenId) -> Option> { - let utxo_id = PointerToIssueToken::::get(token_id); + pub(crate) fn get_utxo_by_token_id( + token_id: TokenId, + ) -> Option> { + let utxo_id = PointerToIssueToken::::get(token_id)?; UtxoStore::::get(utxo_id) } @@ -439,243 +411,260 @@ pub mod pallet { pub fn validate_transaction( tx: &TransactionFor, ) -> Result { - //ensure rather than assert to avoid panic - //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries - ensure!(!tx.inputs.is_empty(), "no inputs"); - ensure!(!tx.outputs.is_empty(), "no outputs"); - ensure!(tx.inputs.len() < (u32::MAX as usize), "too many inputs"); - ensure!(tx.outputs.len() < (u32::MAX as usize), "too many outputs"); - - //ensure each input is used only a single time - //maps each input into btree - //if map.len() > num of inputs then fail - //https://doc.rust-lang.org/std/collections/struct.BTreeMap.html - //WARNING workshop code has a bug here - //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs - //input_map.len() > transaction.inputs.len() //THIS IS WRONG - { - let input_map: BTreeMap<_, ()> = - tx.inputs.iter().map(|input| (input.outpoint, ())).collect(); - //we want map size and input size to be equal to ensure each is used only once - ensure!( - input_map.len() == tx.inputs.len(), - "each input should be used only once" - ); - } - //ensure each output is unique - //map each output to btree to count unique elements - //WARNING example code has a bug here - //out_map.len() != transaction.outputs.len() //THIS IS WRONG - { - let out_map: BTreeMap<_, ()> = tx.outputs.iter().map(|output| (output, ())).collect(); - //check each output is defined only once - ensure!( - out_map.len() == tx.outputs.len(), - "each output should be used once" - ); - } - let simple_tx = get_simple_transaction(tx); - let mut reward = 0; - // Resolve the transaction inputs by looking up UTXOs being spent by them. - // - // This will cointain one of the following: - // * Ok(utxos): a vector of UTXOs each input spends. - // * Err(missing): a vector of outputs missing from the store - let input_utxos = { - let mut missing = Vec::new(); - let mut resolved: Vec> = Vec::new(); - - for input in &tx.inputs { - if let Some(input_utxo) = >::get(&input.outpoint) { - let lock_commitment = input_utxo.destination.lock_commitment(); - ensure!( - input.lock_hash() == *lock_commitment, - "Lock hash does not match" - ); - resolved.push(input_utxo); - } else { - missing.push(input.outpoint.clone().as_fixed_bytes().to_vec()); - } - } + TransactionVerifier::<'_, T>::new(tx) + .checking_inputs()? + .checking_outputs()? + .checking_signatures()? + .checking_utxos_exists()? + .checking_tokens_transferring()? + .checking_tokens_issued()? + .checking_nft_mint()? + .checking_assets_burn()? + .calculating_reward()? + .collect_result() + + /* + //ensure rather than assert to avoid panic + //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries + ensure!(!tx.inputs.is_empty(), "no inputs"); + ensure!(!tx.outputs.is_empty(), "no outputs"); + ensure!(tx.inputs.len() < (u32::MAX as usize), "too many inputs"); + ensure!(tx.outputs.len() < (u32::MAX as usize), "too many outputs"); + + //ensure each input is used only a single time + //maps each input into btree + //if map.len() > num of inputs then fail + //https://doc.rust-lang.org/std/collections/struct.BTreeMap.html + //WARNING workshop code has a bug here + //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs + //input_map.len() > transaction.inputs.len() //THIS IS WRONG + { + let input_map: BTreeMap<_, ()> = + tx.inputs.iter().map(|input| (input.outpoint, ())).collect(); + //we want map size and input size to be equal to ensure each is used only once + ensure!( + input_map.len() == tx.inputs.len(), + "each input should be used only once" + ); + } + //ensure each output is unique + //map each output to btree to count unique elements + //WARNING example code has a bug here + //out_map.len() != transaction.outputs.len() //THIS IS WRONG + { + let out_map: BTreeMap<_, ()> = tx.outputs.iter().map(|output| (output, ())).collect(); + //check each output is defined only once + ensure!( + out_map.len() == tx.outputs.len(), + "each output should be used once" + ); + } + let simple_tx = get_simple_transaction(tx); + let mut reward = 0; + // Resolve the transaction inputs by looking up UTXOs being spent by them. + // + // This will cointain one of the following: + // * Ok(utxos): a vector of UTXOs each input spends. + // * Err(missing): a vector of outputs missing from the store + let input_utxos = { + let mut missing = Vec::new(); + let mut resolved: Vec> = Vec::new(); + + for input in &tx.inputs { + if let Some(input_utxo) = >::get(&input.outpoint) { + let lock_commitment = input_utxo.destination.lock_commitment(); + ensure!( + input.lock_hash() == *lock_commitment, + "Lock hash does not match" + ); + resolved.push(input_utxo); + } else { + missing.push(input.outpoint.clone().as_fixed_bytes().to_vec()); + } + } + + missing.is_empty().then(|| resolved).ok_or(missing) + }; - missing.is_empty().then(|| resolved).ok_or(missing) - }; - - let full_inputs: Vec<(TokenId, TransactionOutputFor)> = tx - .inputs - .iter() - .filter_map(|input| >::get(&input.outpoint)) - .filter_map(|output| match output.data { - Some(ref data) => match data { - TxData::TokenTransferV1 { token_id, amount } => Some((*token_id, output)), - TxData::TokenIssuanceV1 { - token_id, - token_ticker, - amount_to_issue, - number_of_decimals, - metadata_URI, - } => Some((*token_id, output)), - TxData::TokenBurnV1 { .. } => { - // frame_support::fail!("Token gone forever, we can't use it anymore").ok(); - None - } - TxData::NftMintV1 { - token_id, - data_hash, - metadata_URI, - } => Some((*token_id, output)), - }, - None => Some((H256::zero(), output)), - }) - .collect(); - - let input_vec: Vec<(TokenId, Value)> = - full_inputs.iter().map(|output| (output.0, output.1.value)).collect(); - - let out_vec: Vec<(TokenId, Value)> = tx - .outputs - .iter() - .filter_map(|output| { - match output.data { - Some(TxData::TokenTransferV1 { token_id, amount }) => Some((token_id, amount)), - Some(TxData::TokenIssuanceV1 { - token_id, - amount_to_issue, - .. - }) => Some((token_id, amount_to_issue)), - Some(TxData::NftMintV1 { token_id, .. }) => Some((token_id, 1)), - // Token gone forever, we can't use it anymore - Some(TxData::TokenBurnV1 { .. }) => None, - None => Some((H256::zero(), output.value)), - } - }) - .collect(); - - // Check for token creation - for output in tx.outputs.iter() { - let tid = match output.data { - Some(TxData::TokenTransferV1 { token_id, .. }) => token_id, - Some(TxData::TokenIssuanceV1 { token_id, .. }) => token_id, - _ => continue, - }; - // If we have input and output for the same token it's not a problem - if full_inputs.iter().find(|&x| (x.0 == tid) && (x.1 != *output)).is_some() { - continue; - } else { - // But when we don't have an input for token but token id exist in TokenList - ensure!( - !>::contains_key(tid), - "no inputs for the token id" - ); - } - } + let full_inputs: Vec<(TokenId, TransactionOutputFor)> = tx + .inputs + .iter() + .filter_map(|input| >::get(&input.outpoint)) + .filter_map(|output| match output.data { + Some(ref data) => match data { + OutputData::TokenTransferV1 { token_id, amount } => Some((*token_id, output)), + OutputData::TokenIssuanceV1 { + token_id, + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_URI, + } => Some((*token_id, output)), + OutputData::TokenBurnV1 { .. } => { + // frame_support::fail!("Token gone forever, we can't use it anymore").ok(); + None + } + OutputData::NftMintV1 { + token_id, + data_hash, + metadata_URI, + } => Some((*token_id, output)), + }, + None => Some((H256::zero(), output)), + }) + .collect(); - let mut new_utxos = Vec::new(); - // Check that outputs are valid - for (output_index, output) in tx.outputs.iter().enumerate() { - match output.destination { - Destination::Pubkey(_) | Destination::ScriptHash(_) => { - ensure!(output.value > 0, "output value must be nonzero"); - let hash = tx.outpoint(output_index as u64); - ensure!(!>::contains_key(hash), "output already exists"); - new_utxos.push(hash.as_fixed_bytes().to_vec()); - } - Destination::CreatePP(_, _) => { - log::info!("TODO validate OP_CREATE"); - } - Destination::CallPP(_, _) => { - log::info!("TODO validate OP_CALL"); - } - } - } + let input_vec: Vec<(TokenId, Value)> = + full_inputs.iter().map(|output| (output.0, output.1.value)).collect(); - // if all spent UTXOs are available, check the math and signatures - if let Ok(input_utxos) = &input_utxos { - // We have to check sum of input tokens is less or equal to output tokens. - let mut inputs_sum: BTreeMap = BTreeMap::new(); - let mut outputs_sum: BTreeMap = BTreeMap::new(); - - for x in input_vec { - let value = - x.1.checked_add(*inputs_sum.get(&x.0).unwrap_or(&0)) - .ok_or("input value overflow")?; - inputs_sum.insert(x.0, value); - } - for x in out_vec { - let value = - x.1.checked_add(*outputs_sum.get(&x.0).unwrap_or(&0)) - .ok_or("output value overflow")?; - outputs_sum.insert(x.0, value); - } + let out_vec: Vec<(TokenId, Value)> = tx + .outputs + .iter() + .filter_map(|output| { + match output.data { + Some(OutputData::TokenTransferV1 { token_id, amount }) => { + Some((token_id, amount)) + } + Some(OutputData::TokenIssuanceV1 { + token_id, + amount_to_issue, + .. + }) => Some((token_id, amount_to_issue)), + Some(OutputData::NftMintV1 { token_id, .. }) => Some((token_id, 1)), + // Token gone forever, we can't use it anymore + Some(OutputData::TokenBurnV1 { .. }) => None, + None => Some((H256::zero(), output.value)), + } + }) + .collect(); - let mut new_token_exist = false; - for output_token in &outputs_sum { - match inputs_sum.get(&output_token.0) { - Some(input_value) => ensure!( - input_value >= &output_token.1, - "output value must not exceed input value" - ), - None => { - // If the transaction has one an output with a new token ID - if new_token_exist { - frame_support::fail!("input for the token not found") - } else { - new_token_exist = true; - } - } - } - } + // Check for token creation + for output in tx.outputs.iter() { + let tid = match output.data { + Some(OutputData::TokenTransferV1 { token_id, .. }) => token_id, + Some(OutputData::TokenIssuanceV1 { token_id, .. }) => token_id, + _ => continue, + }; + // If we have input and output for the same token it's not a problem + if full_inputs.iter().find(|&x| (x.0 == tid) && (x.1 != *output)).is_some() { + continue; + } else { + // But when we don't have an input for token but token id exist in TokenList + ensure!( + !>::contains_key(tid), + "no inputs for the token id" + ); + } + } - for (index, (input, input_utxo)) in tx.inputs.iter().zip(input_utxos).enumerate() { - match &input_utxo.destination { - Destination::Pubkey(pubkey) => { - let msg = sign::TransactionSigMsg::construct( - sign::SigHash::default(), - &tx, - &input_utxos, - index as u64, - u32::MAX, - ); - let ok = pubkey - .parse_sig(&input.witness[..]) - .ok_or("bad signature format")? - .verify(&msg); - ensure!(ok, "signature must be valid"); - } - Destination::CreatePP(_, _) => { - log::info!("TODO validate spending of OP_CREATE"); - } - Destination::CallPP(_, _) => { - log::info!("TODO validate spending of OP_CALL"); - } - Destination::ScriptHash(_hash) => { - let witness = input.witness.clone(); - let lock = input.lock.clone(); - crate::script::verify(&tx, &input_utxos, index as u64, witness, lock) - .map_err(|_| "script verification failed")?; - } - } - } + let mut new_utxos = Vec::new(); + // Check that outputs are valid + for (output_index, output) in tx.outputs.iter().enumerate() { + match output.destination { + Destination::Pubkey(_) | Destination::ScriptHash(_) => { + ensure!(output.value > 0, "output value must be nonzero"); + let hash = tx.outpoint(output_index as u64); + ensure!(!>::contains_key(hash), "output already exists"); + new_utxos.push(hash.as_fixed_bytes().to_vec()); + } + Destination::CreatePP(_, _) => { + log::info!("TODO validate OP_CREATE"); + } + Destination::CallPP(_, _) => { + log::info!("TODO validate OP_CALL"); + } + } + } - // Reward at the moment only in MLT - reward = if inputs_sum.contains_key(&(H256::zero() as TokenId)) - && outputs_sum.contains_key(&(H256::zero() as TokenId)) - { - inputs_sum[&(H256::default() as TokenId)] - .checked_sub(outputs_sum[&(H256::zero() as TokenId)]) - .ok_or("reward underflow")? - } else { - *inputs_sum.get(&(H256::zero() as TokenId)).ok_or("fee doesn't exist")? - }; - } + // if all spent UTXOs are available, check the math and signatures + if let Ok(input_utxos) = &input_utxos { + // We have to check sum of input tokens is less or equal to output tokens. + let mut inputs_sum: BTreeMap = BTreeMap::new(); + let mut outputs_sum: BTreeMap = BTreeMap::new(); + + for x in input_vec { + let value = + x.1.checked_add(*inputs_sum.get(&x.0).unwrap_or(&0)) + .ok_or("input value overflow")?; + inputs_sum.insert(x.0, value); + } + for x in out_vec { + let value = + x.1.checked_add(*outputs_sum.get(&x.0).unwrap_or(&0)) + .ok_or("output value overflow")?; + outputs_sum.insert(x.0, value); + } + + let mut new_token_exist = false; + for output_token in &outputs_sum { + match inputs_sum.get(&output_token.0) { + Some(input_value) => ensure!( + input_value >= &output_token.1, + "output value must not exceed input value" + ), + None => { + // If the transaction has one an output with a new token ID + if new_token_exist { + frame_support::fail!("input for the token not found") + } else { + new_token_exist = true; + } + } + } + } + + for (index, (input, input_utxo)) in tx.inputs.iter().zip(input_utxos).enumerate() { + match &input_utxo.destination { + Destination::Pubkey(pubkey) => { + let msg = sign::TransactionSigMsg::construct( + sign::SigHash::default(), + &tx, + &input_utxos, + index as u64, + u32::MAX, + ); + let ok = pubkey + .parse_sig(&input.witness[..]) + .ok_or("bad signature format")? + .verify(&msg); + ensure!(ok, "signature must be valid"); + } + Destination::CreatePP(_, _) => { + log::info!("TODO validate spending of OP_CREATE"); + } + Destination::CallPP(_, _) => { + log::info!("TODO validate spending of OP_CALL"); + } + Destination::ScriptHash(_hash) => { + let witness = input.witness.clone(); + let lock = input.lock.clone(); + crate::script::verify(&tx, &input_utxos, index as u64, witness, lock) + .map_err(|_| "script verification failed")?; + } + } + } + + // Reward at the moment only in MLT + reward = if inputs_sum.contains_key(&(H256::zero() as TokenId)) + && outputs_sum.contains_key(&(H256::zero() as TokenId)) + { + inputs_sum[&(H256::default() as TokenId)] + .checked_sub(outputs_sum[&(H256::zero() as TokenId)]) + .ok_or("reward underflow")? + } else { + *inputs_sum.get(&(H256::zero() as TokenId)).ok_or("fee doesn't exist")? + }; + } + + Ok(ValidTransaction { + priority: reward as u64, + requires: input_utxos.map_or_else(|x| x, |_| Vec::new()), + provides: new_utxos, + longevity: TransactionLongevity::MAX, + propagate: true, + }) - Ok(ValidTransaction { - priority: reward as u64, - requires: input_utxos.map_or_else(|x| x, |_| Vec::new()), - provides: new_utxos, - longevity: TransactionLongevity::MAX, - propagate: true, - }) + */ } /// Update storage to reflect changes made by transaction @@ -726,80 +715,80 @@ pub mod pallet { Ok(().into()) } - pub fn token_create( - caller: &T::AccountId, - public: H256, - input_for_fee: TransactionInput, - token_name: Vec, - token_ticker: Vec, - supply: Value, - ) -> Result> { - // ensure!(token_name.len() <= 25, Error::::Unapproved); - // ensure!(token_ticker.len() <= 5, Error::::Unapproved); - // ensure!(!supply.is_zero(), Error::::MinBalanceZero); - // - // // Input with MLT FEE - // let fee = UtxoStore::::get(input_for_fee.outpoint).ok_or(Error::::Unapproved)?.value; - // ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); - // - // // Save in UTXO - // let instance = crate::TokenInstance::new_normal( - // BlakeTwo256::hash_of(&(&token_name, &token_ticker)), - // token_name, - // token_ticker, - // supply, - // ); - // let token_id = *instance.id(); - // - // ensure!( - // !>::contains_key(instance.id()), - // Error::::InUse - // ); - // - // let mut tx = Transaction { - // inputs: crate::vec![ - // // Fee an input equal 100 MLT - // input_for_fee, - // ], - // outputs: crate::vec![ - // // Output a new tokens - // TransactionOutput::new_token(*instance.id(), supply, public), - // ], - // }; - // - // // We shall make an output to return odd funds - // if fee > Mlt(100).to_munit() { - // tx.outputs.push(TransactionOutput::new_pubkey( - // fee - Mlt(100).to_munit(), - // public, - // )); - // } - // - // let sig = crypto::sr25519_sign( - // SR25519, - // &sp_core::sr25519::Public::from_h256(public), - // &tx.encode(), - // ) - // .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; - // for i in 0..tx.inputs.len() { - // tx.inputs[i].witness = sig.0.to_vec(); - // } - // // Success - // spend::(caller, &tx)?; - // - // // Save in Store - // >::insert(token_id, Some(instance)); - // Ok(token_id) - unimplemented!(); - } - + // pub fn token_create( + // caller: &T::AccountId, + // public: H256, + // input_for_fee: TransactionInput, + // token_name: Vec, + // token_ticker: Vec, + // supply: Value, + // ) -> Result> { + // ensure!(token_name.len() <= 25, Error::::Unapproved); + // ensure!(token_ticker.len() <= 5, Error::::Unapproved); + // ensure!(!supply.is_zero(), Error::::MinBalanceZero); + // + // // Input with MLT FEE + // let fee = UtxoStore::::get(input_for_fee.outpoint).ok_or(Error::::Unapproved)?.value; + // ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); + // + // // Save in UTXO + // let instance = crate::TokenInstance::new_normal( + // BlakeTwo256::hash_of(&(&token_name, &token_ticker)), + // token_name, + // token_ticker, + // supply, + // ); + // let token_id = *instance.id(); + // + // ensure!( + // !>::contains_key(instance.id()), + // Error::::InUse + // ); + // + // let mut tx = Transaction { + // inputs: crate::vec![ + // // Fee an input equal 100 MLT + // input_for_fee, + // ], + // outputs: crate::vec![ + // // Output a new tokens + // TransactionOutput::new_token(*instance.id(), supply, public), + // ], + // }; + // + // // We shall make an output to return odd funds + // if fee > Mlt(100).to_munit() { + // tx.outputs.push(TransactionOutput::new_pubkey( + // fee - Mlt(100).to_munit(), + // public, + // )); + // } + // + // let sig = crypto::sr25519_sign( + // SR25519, + // &sp_core::sr25519::Public::from_h256(public), + // &tx.encode(), + // ) + // .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; + // for i in 0..tx.inputs.len() { + // tx.inputs[i].witness = sig.0.to_vec(); + // } + // // Success + // spend::(caller, &tx)?; + // + // // Save in Store + // >::insert(token_id, Some(instance)); + // Ok(token_id) + // } + + /* fn mint( caller: &T::AccountId, creator_pubkey: sp_core::sr25519::Public, data_url: Vec, data: Vec, ) -> Result> { - /* let (fee, inputs_hashes) = pick_utxo::(caller, Mlt(100).to_munit()); + let (fee, inputs_hashes) = pick_utxo::(caller, Mlt(100).to_munit()); ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); ensure!(data_url.len() <= 50, Error::::Unapproved); @@ -845,10 +834,8 @@ pub mod pallet { // Save in Store TokenList::::insert(instance.id(), Some(instance.clone())); Ok(*instance.id()) - - */ - unimplemented!() } + */ /// Pick the UTXOs of `caller` from UtxoStore that satisfy request `value` /// @@ -900,44 +887,6 @@ pub mod pallet { Ok(().into()) } - #[pallet::weight(T::WeightInfo::token_create(768_usize.saturating_add(token_name.len()) as u32))] - pub fn token_create( - origin: OriginFor, - public: H256, - input_for_fee: TransactionInput, - token_name: Vec, - token_ticker: Vec, - supply: Value, - ) -> DispatchResultWithPostInfo { - let caller = &ensure_signed(origin)?; - let token_id = token_create::( - caller, - public, - input_for_fee, - token_name, - token_ticker, - supply, - )?; - Self::deposit_event(Event::::TokenCreated(token_id, caller.clone())); - Ok(().into()) - } - - /// Create a new NFT from the provided NFT info and identify the specified - /// account as its owner. The ID of the new NFT will be equal to the hash of the info - /// that defines it, as calculated by the runtime system's hashing algorithm. - #[pallet::weight(10_000)] - pub fn mint( - origin: OriginFor, - creator_pubkey: sp_core::sr25519::Public, - data_url: Vec, - data: Vec, - ) -> DispatchResultWithPostInfo { - let caller = &ensure_signed(origin)?; - let nft_id = mint::(caller, creator_pubkey, data_url.clone(), data)?; - Self::deposit_event(Event::::Minted(nft_id, caller.clone(), data_url)); - Ok(().into()) - } - #[pallet::weight(T::WeightInfo::send_to_address(16_u32.saturating_add(address.len() as u32)))] pub fn send_to_address( origin: OriginFor, @@ -997,7 +946,7 @@ pub mod pallet { for i in 0..tx.inputs.len() { tx = tx - .sign(&utxos, i, &SR25Pub(pubkey_raw)) + .sign(&utxos, i, &sr25519::Public(pubkey_raw)) .ok_or(DispatchError::Other("Failed to sign the transaction"))?; } @@ -1045,13 +994,19 @@ impl crate::Pallet { 1337 } - pub fn nft_read(nft_id: H256) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)> { - match crate::pallet::get_utxo_by_tid::(nft_id)?.data { - Some(crate::tokens::TxData::NftMintV1 { - token_id, + pub fn nft_read( + nft_id: &core::primitive::str, + ) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)> { + match crate::pallet::get_utxo_by_token_id::( + crate::tokens::TokenId::from_string(&nft_id).ok()?, + )? + .data + { + Some(crate::tokens::OutputData::NftMintV1 { data_hash, - metadata_URI, - }) => Some((metadata_URI, data_hash.encode())), + metadata_uri, + .. + }) => Some((metadata_uri, data_hash.encode())), _ => None, } } diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index b53300b..3331b67 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -1,10 +1,19 @@ #![cfg_attr(not(feature = "std"), no_std)] +// use crate::ss58_nostd::*; +// use crate::TransactionOutputFor; +use crate::base58_nostd::{FromBase58, FromBase58Error, ToBase58}; use codec::{Decode, Encode}; +// use frame_support::sp_runtime::traits::{BlakeTwo256, Hash}; +use frame_support::ensure; use frame_support::{dispatch::Vec, RuntimeDebug}; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_core::{sr25519::Public, H256}; +#[cfg(feature = "std")] +use sp_core::crypto::Ss58Codec; +use sp_core::{H160, H256}; + +const LENGTH_BYTES_TO_REPRESENT_ID: usize = 20; pub type Value = u128; @@ -15,11 +24,99 @@ impl Mlt { } } -pub type TokenId = H256; +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +enum TokenIdInner { + // todo: Need to check this + MLT, + Asset(H160), +} + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +pub struct TokenId { + inner: TokenIdInner, +} + +impl TokenId { + pub fn mlt() -> TokenId { + TokenId { + inner: TokenIdInner::MLT, + } + } + + pub fn new_asset(first_input_hash: H256) -> TokenId { + TokenId { + // We are loosing the first bytes of H256 over here + inner: TokenIdInner::Asset(H160::from(first_input_hash)), + } + } + + pub fn to_string(&self) -> Vec { + match self.inner { + TokenIdInner::MLT => vec![], + TokenIdInner::Asset(hash) => hash.as_bytes().to_base58().to_vec(), + } + } + + fn hash160_from_bytes(bytes: &[u8]) -> Result { + ensure!( + bytes.len() == LENGTH_BYTES_TO_REPRESENT_ID, + "Unexpected length of the asset ID" + ); + let mut buffer = [0u8; 20]; + buffer.copy_from_slice(bytes); + Ok(H160::from(buffer)) + } + + pub fn from_string(data: &str) -> Result { + let data = data.from_base58().map_err(|x| match x { + FromBase58Error::InvalidBase58Character { .. } => "Invalid Base58 character", + FromBase58Error::InvalidBase58Length => "Invalid Base58 length", + })?; + + let hash = TokenId::hash160_from_bytes(data.as_slice())?; + + Ok(TokenId { + inner: TokenIdInner::Asset(hash), + }) + } +} + +// We should implement it for Ss58Codec +impl AsMut<[u8]> for TokenId { + fn as_mut(&mut self) -> &mut [u8] { + match self.inner { + TokenIdInner::MLT => &mut [], + TokenIdInner::Asset(ref mut hash) => hash.as_bytes_mut(), + } + } +} + +// We should implement it for Ss58Codec +impl AsRef<[u8]> for TokenId { + fn as_ref(&self) -> &[u8] { + match self.inner { + TokenIdInner::MLT => &[], + TokenIdInner::Asset(ref hash) => hash.as_ref(), + } + } +} + +// We should implement it for Ss58Codec +impl Default for TokenId { + fn default() -> Self { + TokenId::mlt() + } +} + +#[cfg(feature = "std")] +// Unfortunately, the default codec can't be used with std +impl Ss58Codec for TokenId {} #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] -pub enum TxData { +pub enum OutputData { // TokenTransfer data to another user. If it is a token, then the token data must also be transferred to the recipient. #[codec(index = 1)] TokenTransferV1 { token_id: TokenId, amount: u128 }, @@ -31,7 +128,7 @@ pub enum TxData { amount_to_issue: u128, // Should be not more than 18 numbers number_of_decimals: u8, - metadata_URI: Vec, + metadata_uri: Vec, }, // Burning a token or NFT #[codec(index = 3)] @@ -44,7 +141,7 @@ pub enum TxData { NftMintV1 { token_id: TokenId, data_hash: NftDataHash, - metadata_URI: Vec, + metadata_uri: Vec, }, } diff --git a/pallets/utxo/src/verifier.rs b/pallets/utxo/src/verifier.rs new file mode 100644 index 0000000..2aee0ed --- /dev/null +++ b/pallets/utxo/src/verifier.rs @@ -0,0 +1,143 @@ +use crate::tokens::{OutputData, TokenId}; +use crate::{/*Transaction,*/ TransactionFor, TransactionOutputFor}; +use frame_support::ensure; +use frame_support::pallet_prelude::ValidTransaction; +use sp_core::sp_std::collections::btree_map::BTreeMap; +use sp_core::H256; + +pub struct TransactionVerifier<'a, T: frame_system::Config> { + tx: &'a TransactionFor, + input_map: Option>>, + output_map: Option>>, +} + +impl TransactionVerifier<'_, T> { + pub fn new(tx: &TransactionFor) -> TransactionVerifier { + TransactionVerifier { + tx, + input_map: None, + output_map: None, + } + } + + fn get_token_id_from_input(_outpoint: H256) -> TokenId { + unimplemented!() + } + + fn get_token_id_from_output(output: &TransactionOutputFor) -> TokenId { + match output.data { + Some(OutputData::TokenTransferV1 { ref token_id, .. }) + | Some(OutputData::TokenIssuanceV1 { ref token_id, .. }) + | Some(OutputData::NftMintV1 { ref token_id, .. }) => token_id.clone(), + Some(OutputData::TokenBurnV1 { .. }) => unreachable!(), + _ => TokenId::mlt(), + } + } + + fn get_output_by_outpoint(_outpoint: H256) -> TransactionOutputFor { + unimplemented!() + } + + pub fn checking_inputs(&mut self) -> Result, &'static str> { + //ensure rather than assert to avoid panic + //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries + ensure!(!self.tx.inputs.is_empty(), "no inputs"); + ensure!( + self.tx.inputs.len() < (u32::MAX as usize), + "too many inputs" + ); + + //ensure each input is used only a single time + //maps each input into btree + //if map.len() > num of inputs then fail + //https://doc.rust-lang.org/std/collections/struct.BTreeMap.html + //WARNING workshop code has a bug here + //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs + //input_map.len() > transaction.inputs.len() //THIS IS WRONG + + let input_map: BTreeMap> = self + .tx + .inputs + .iter() + .map(|input| { + ( + TransactionVerifier::<'_, T>::get_token_id_from_input(input.outpoint), + TransactionVerifier::<'_, T>::get_output_by_outpoint(input.outpoint), + ) + }) + .collect(); + //we want map size and input size to be equal to ensure each is used only once + ensure!( + input_map.len() == self.tx.inputs.len(), + "each input should be used only once" + ); + self.input_map = Some(input_map); + unimplemented!() + } + + pub fn checking_outputs(&mut self) -> Result, &'static str> { + //ensure rather than assert to avoid panic + //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries + ensure!(!self.tx.outputs.is_empty(), "no outputs"); + ensure!( + self.tx.outputs.len() < (u32::MAX as usize), + "too many outputs" + ); + + //ensure each output is unique + //map each output to btree to count unique elements + //WARNING example code has a bug here + //out_map.len() != transaction.outputs.len() //THIS IS WRONG + + let output_map: BTreeMap> = self + .tx + .outputs + .iter() + .map(|output| { + ( + TransactionVerifier::<'_, T>::get_token_id_from_output(&output), + output.clone(), + ) + }) + .collect(); + //check each output is defined only once + ensure!( + output_map.len() == self.tx.outputs.len(), + "each output should be used once" + ); + self.output_map = Some(output_map); + unimplemented!() + } + + pub fn checking_signatures(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn checking_utxos_exists(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn checking_tokens_transferring(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn checking_tokens_issued(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn checking_nft_mint(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn checking_assets_burn(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn calculating_reward(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn collect_result(&self) -> Result { + unimplemented!() + } +} From e0e22e4c6f88159f2597162d0edbda5ee36d5527 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 25 Oct 2021 10:50:33 +0300 Subject: [PATCH 13/53] We need to make an approach to verification Signed-off-by: sinitcin --- Lets talk about verification.md | 101 ++++++++++++++++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 Lets talk about verification.md diff --git a/Lets talk about verification.md b/Lets talk about verification.md new file mode 100644 index 0000000..5c17641 --- /dev/null +++ b/Lets talk about verification.md @@ -0,0 +1,101 @@ +**This description is still approximate and not accurate, we need to define an approach and agree on checks.** + +## Draft TransactionVerifier + +I suggest adding a structure that will contain: + +```rust +pub struct TransactionVerifier<'a, T: frame_system::Config> { + // Pointer to a tx that we have to check + tx: &'a TransactionFor, + // All inputs, to avoid repeated search in the loop + all_inputs_map: BTreeMap>, + // All outputs, to avoid repeated search in the loop + all_outputs_map: BTreeMap>, + // Using TokenId, you can get the entire amount of this token in all inputs + total_value_of_input_tokens: BTreeMap>, + // Using TokenId, you can get the entire amount of this token in all outputs + total_value_of_output_tokens: BTreeMap>, + // A set of transaction verification functions, this approach will allow you to remove unnecessary cycles, which will speed up the function + set_of_checks: Vec<&'a mut FnMut(...)>, + // ... + // I may add a priority field to the set of checks. I'm still thinking here. +} +``` + +This struct we will use this way in the pallet utxo: + +```rust + pub fn validate_transaction( + tx: &TransactionFor, + ) -> Result { + TransactionVerifier::<'_, T>::new(tx) + .checking_inputs() + .checking_outputs() + .checking_utxos_exists() + .checking_signatures() + .checking_tokens_transferring() + .checking_tokens_issued() + .checking_nft_mint() + .checking_assets_burn() + .calculating_reward() + .collect_result()? + } + +``` + +When creating a new instance of this structure, we must initialize the fields. + +Each subsequent check adds a new instance of the function to `set_of_checks`, which will be called in` collect_result`. + +At the moment we can split the verification function for these parts: + +* `checking_inputs` + * Checks that inputs exist in a transaction + * Checking that the number of inputs is not more than the maximum allowed number, now in the code I see that it is `u32::MAX` + * Ensure each input is used only a single time + +* `checking_outputs` + * Checks that outputs exist in a transaction + * Checking that the number of outputs is not more than the maximum allowed number, now in the code I see that it is `u32::MAX` + * Ensure each output is unique + * Output value must be nonzero + * An output can't exist already in the UtxoStore + +* `checking_utxos_exists` + * Resolve the transaction inputs by looking up UTXOs being spent by them. + +* `checking_signatures` + * if all spent UTXOs are available, check the math and signatures + +* `checking_tokens_transferring` + * We have to check that the total sum of input tokens is less or equal to output tokens. (Or just equal?) + * All inputs with such data code must be correctly mapped to outputs + * If NFT is sent we must not burn or lose data + +* `checking_tokens_issued` + * We must check the correctness of the issued tokens + * We have to check the length of `metadata_uri` and` ticker` + * We must check the correctness of `value` and `decimal` + +* `checking_nft_mint` + * We have to check the uniqueness of digital data, only one NFT can refer to one object + * We have to check the length of `metadata_uri` + +* `checking_assets_burn` + * Is there burn more than possible? + * Is there tocken_id exist for the burn? + +* `calculating_reward` + * Just collecting MLT for a transaction reward. + +* `collect_result` + * Call all of these functions in one loop. + +## Questions +* Do we need other checks? +* What is we need for checking Bitcoin Script? +* What is we need for checking contracts? +* If we can check an output address here, and add a possibility to find in the UtxoStore by any address format, then we can remove `fn pick_utxo` and `fn send_to_address`. Isn't that? + +I'm glad to see any suggestions or critics. \ No newline at end of file From eb2d6d322533e228da30bd4456b1df308473963d Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 25 Oct 2021 20:03:21 +0300 Subject: [PATCH 14/53] Added the first test for tokens - checking_tokens_issuance Signed-off-by: sinitcin --- pallets/utxo/src/base58_nostd.rs | 33 ++- pallets/utxo/src/lib.rs | 36 +-- pallets/utxo/src/tests.rs | 220 ++++++++++++--- pallets/utxo/src/tokens.rs | 13 +- pallets/utxo/src/verifier.rs | 468 ++++++++++++++++++++++--------- 5 files changed, 555 insertions(+), 215 deletions(-) diff --git a/pallets/utxo/src/base58_nostd.rs b/pallets/utxo/src/base58_nostd.rs index fd2a58a..ae86080 100644 --- a/pallets/utxo/src/base58_nostd.rs +++ b/pallets/utxo/src/base58_nostd.rs @@ -4,6 +4,9 @@ //! commit hash: c6e7d37 //! works only up to 128 bytes +use sp_std::vec; +use sp_std::vec::Vec; + const ALPHABET: &'static [u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; const B58_DIGITS_MAP: &'static [i8] = &[ @@ -203,27 +206,27 @@ mod tests { #[test] fn test_to_base58_basic() { - assert_eq!(b"".to_base58(), ""); - assert_eq!(&[32].to_base58(), "Z"); - assert_eq!(&[45].to_base58(), "n"); - assert_eq!(&[48].to_base58(), "q"); - assert_eq!(&[49].to_base58(), "r"); - assert_eq!(&[57].to_base58(), "z"); - assert_eq!(&[45, 49].to_base58(), "4SU"); - assert_eq!(&[49, 49].to_base58(), "4k8"); - assert_eq!(b"abc".to_base58(), "ZiCa"); - assert_eq!(b"1234598760".to_base58(), "3mJr7AoUXx2Wqd"); + assert_eq!(b"".to_base58(), "".as_bytes()); + assert_eq!(&[32].to_base58(), "Z".as_bytes()); + assert_eq!(&[45].to_base58(), "n".as_bytes()); + assert_eq!(&[48].to_base58(), "q".as_bytes()); + assert_eq!(&[49].to_base58(), "r".as_bytes()); + assert_eq!(&[57].to_base58(), "z".as_bytes()); + assert_eq!(&[45, 49].to_base58(), "4SU".as_bytes()); + assert_eq!(&[49, 49].to_base58(), "4k8".as_bytes()); + assert_eq!(b"abc".to_base58(), "ZiCa".as_bytes()); + assert_eq!(b"1234598760".to_base58(), "3mJr7AoUXx2Wqd".as_bytes()); assert_eq!( b"abcdefghijklmnopqrstuvwxyz".to_base58(), - "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f" + "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f".as_bytes() ); } #[test] fn test_to_base58_initial_zeros() { - assert_eq!(b"\0abc".to_base58(), "1ZiCa"); - assert_eq!(b"\0\0abc".to_base58(), "11ZiCa"); - assert_eq!(b"\0\0\0abc".to_base58(), "111ZiCa"); - assert_eq!(b"\0\0\0\0abc".to_base58(), "1111ZiCa"); + assert_eq!(b"\0abc".to_base58(), "1ZiCa".as_bytes()); + assert_eq!(b"\0\0abc".to_base58(), "11ZiCa".as_bytes()); + assert_eq!(b"\0\0\0abc".to_base58(), "111ZiCa".as_bytes()); + assert_eq!(b"\0\0\0\0abc".to_base58(), "1111ZiCa".as_bytes()); } } diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 4cb5c6a..66a1b87 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -29,6 +29,7 @@ mod sign; #[cfg(test)] mod tests; pub mod tokens; +#[macro_use] pub mod verifier; pub mod weights; @@ -36,7 +37,8 @@ pub mod weights; pub mod pallet { // use crate::sign::{self, Scheme}; use crate::tokens::{/*Mlt,*/ OutputData, TokenId, Value}; - use crate::verifier::TransactionVerifier; + // use crate::verifier::TransactionVerifier; + use super::implement_transaction_verifier; use bech32; use chainscript::Script; use codec::{Decode, Encode}; @@ -55,16 +57,17 @@ pub mod pallet { #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{ - // sp_std::collections::btree_map::BTreeMap, + sp_std::collections::btree_map::BTreeMap, sp_std::{convert::TryInto, str, vec}, sr25519, testing::SR25519, - H256, - H512, + H256, H512, }; use sp_runtime::traits::AtLeast32Bit; // use sp_runtime::DispatchErrorWithPostInfo; + implement_transaction_verifier!(); + #[pallet::error] pub enum Error { /// Account balance must be greater than or equal to the transfer amount. @@ -91,7 +94,7 @@ pub mod pallet { Unapproved, /// The source account would not survive the transfer and it needs to stay alive. WouldDie, - // Thrown when there is an attempt to mint a duplicate collection. + /// Thrown when there is an attempt to mint a duplicate collection. NftCollectionExists, } @@ -315,7 +318,7 @@ pub mod pallet { #[pallet::storage] #[pallet::getter(fn utxo_store)] - pub(super) type UtxoStore = + pub type UtxoStore = StorageMap<_, Identity, H256, Option>, ValueQuery>; #[pallet::storage] @@ -411,17 +414,16 @@ pub mod pallet { pub fn validate_transaction( tx: &TransactionFor, ) -> Result { - TransactionVerifier::<'_, T>::new(tx) - .checking_inputs()? - .checking_outputs()? - .checking_signatures()? - .checking_utxos_exists()? - .checking_tokens_transferring()? - .checking_tokens_issued()? - .checking_nft_mint()? - .checking_assets_burn()? - .calculating_reward()? - .collect_result() + let mut tv = TransactionVerifier::<'_, T>::new(tx)?; + tv.checking_outputs()?; + tv.checking_signatures()?; + tv.checking_utxos_exists()?; + tv.checking_tokens_transferring()?; + tv.checking_tokens_issued()?; + tv.checking_nft_mint()?; + tv.checking_assets_burn()?; + tv.calculating_reward()?; + tv.collect_result() /* //ensure rather than assert to avoid panic diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index c9a72bf..11ee7ae 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -27,6 +27,7 @@ use frame_support::{ sp_runtime::traits::{BlakeTwo256, Hash}, }; +use crate::tokens::OutputData; use sp_core::{sp_std::vec, sr25519::Public, testing::SR25519, H256, H512}; fn tx_input_gen_no_signature() -> (TransactionOutput, TransactionInput) { @@ -532,48 +533,181 @@ fn test_send_to_address() { }) } +// #[test] +// fn nft_test() { +// execute_with_alice(|alice_pub_key| { +// // Let's create a new test nft +// let nft_id = BlakeTwo256::hash_of(&b"TEST"); +// let instance = TokenInstance::new_nft( +// nft_id, +// (*b"01010101010101010101010101010101").to_vec(), +// b"http://facebook.com".to_vec(), +// alice_pub_key.to_vec(), +// ); +// +// if let TokenInstance::Nft { +// id, +// data, +// data_url, +// creator_pubkey, +// .. +// } = instance +// { +// let mut tx = Transaction { +// inputs: vec![ +// // 100 MLT +// tx_input_gen_no_signature(), +// ], +// outputs: vec![TransactionOutput::new_nft( +// id, +// data.to_vec(), +// data_url, +// H256::from_slice(creator_pubkey.as_slice()), +// )], +// }; +// let alice_sig = crypto::sr25519_sign(SR25519, &alice_pub_key, &tx.encode()).unwrap(); +// tx.inputs[0].witness = alice_sig.0.to_vec(); +// assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); +// } +// +// // it should allow to write and read ? +// // let rsp = await dataToken.readData(firstTokenId); +// // assert.equal(rsp, empty); +// // await dataToken.writeData(firstTokenId, data); +// // rsp = await dataToken.readData(firstTokenId); +// // assert.equal(rsp, data); +// }); + +// *Testing token creation: +use crate::tokens::TokenId; #[test] -fn nft_test() { - // execute_with_alice(|alice_pub_key| { - // // Let's create a new test nft - // let nft_id = BlakeTwo256::hash_of(&b"TEST"); - // let instance = TokenInstance::new_nft( - // nft_id, - // (*b"01010101010101010101010101010101").to_vec(), - // b"http://facebook.com".to_vec(), - // alice_pub_key.to_vec(), - // ); - // - // if let TokenInstance::Nft { - // id, - // data, - // data_url, - // creator_pubkey, - // .. - // } = instance - // { - // let mut tx = Transaction { - // inputs: vec![ - // // 100 MLT - // tx_input_gen_no_signature(), - // ], - // outputs: vec![TransactionOutput::new_nft( - // id, - // data.to_vec(), - // data_url, - // H256::from_slice(creator_pubkey.as_slice()), - // )], - // }; - // let alice_sig = crypto::sr25519_sign(SR25519, &alice_pub_key, &tx.encode()).unwrap(); - // tx.inputs[0].witness = alice_sig.0.to_vec(); - // assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - // } - // - // // it should allow to write and read ? - // // let rsp = await dataToken.readData(firstTokenId); - // // assert.equal(rsp, empty); - // // await dataToken.writeData(firstTokenId, data); - // // rsp = await dataToken.readData(firstTokenId); - // // assert.equal(rsp, data); - // }); +// Simple creation of tokens +fn checking_tokens_issuance() { + execute_with_alice(|alice_pub_key| { + // Alice wants to send herself a new utxo of value 50. + let (utxo0, input0) = tx_input_gen_no_signature(); + let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); + + let output = TransactionOutput { + value: 0, + destination: Destination::Pubkey(alice_pub_key), + + // TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), + data: Some(OutputData::TokenIssuanceV1 { + token_id: TokenId::new_asset(first_input_hash), + token_ticker: "Ben's token".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 2, + metadata_uri: "facebook.com".as_bytes().to_vec(), + }), + }; + let tx = Transaction { + inputs: vec![input0], + outputs: vec![output], + } + .sign_unchecked(&[utxo0], 0, &alice_pub_key); + + let new_utxo_hash = tx.outpoint(0); + + let (_, init_utxo) = genesis_utxo(); + assert!(UtxoStore::::contains_key(H256::from(init_utxo))); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); + assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); + assert!(UtxoStore::::contains_key(new_utxo_hash)); + assert_eq!(50, UtxoStore::::get(new_utxo_hash).unwrap().value); + }) +} + +#[test] +// Simple creation of NFT +fn checking_nft_mint() {} + +#[test] +// NFT might be only unique, we can't create a few nft for one item +fn checking_nft_unique() {} + +#[test] +// Creation a token with a pre-existing ID or re-creation of an already created token. +fn checking_tokens_twice_creation() {} + +// ** Creating a token with corrupted data +//////////////////////////////////////////////////////////////////////////////////////////////// + +#[test] +//Data field of zero length +fn checking_tokens_with_empty_data() {} + +#[test] +// The data field of the maximum allowed length filled with random garbage +fn checking_tokens_with_junk_data() {} + +#[test] +// Creation of a token with 0 issue amount +fn checking_tokens_creation_with_zero_amount() {} + +#[test] +// Generating a token with a long URI string +fn checking_tokens_creation_with_long_uri() {} + +// ** Creation of a token without input with MLT to pay commission +//////////////////////////////////////////////////////////////////////////////////////////////// + +#[test] +// Test tx where Input with token and without MLT, output has token (without MLT) +fn checking_tokens_creation_without_mlt() {} + +#[test] +// Test tx where Input with token and without MLT, output has MLT (without token) +fn checking_tokens_creation_with_random_inputs_and_outputs() {} + +#[test] +// Test tx where Input without token but with MLT, output has MLT and token +fn checking_tokens_creation_without_inputs() { + // Test tx where no inputs for token } + +#[test] +// Test where less MLT at the input than you need to pay the commission +fn checking_tokens_creation_with_insufficient_fee() {} + +#[test] +// Test tx where Input and output have a token but with zero value +fn checking_tokens_creation_with_zero_value() {} + +// * Testing token transfer +//////////////////////////////////////////////////////////////////////////////////////////////// + +// #[test] +// // +// fn checking_tokens_() {} +// #[test] +// // +// fn checking_tokens_() {} +// #[test] +// // +// fn checking_tokens_() {} +// #[test] +// // +// fn checking_tokens_() {} +// #[test] +// // +// fn checking_tokens_() {} +// #[test] +// // +// fn checking_tokens_() {} +// #[test] +// // +// fn checking_tokens_() {} +// #[test] +// // +// fn checking_tokens_() {} +// #[test] +// // +// fn checking_tokens_() {} + +// Testing token transfer + +// Testing the compatibility of the old version with the new one + +// Testing burning tokens diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index 3331b67..f84a7c5 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -54,7 +54,7 @@ impl TokenId { pub fn to_string(&self) -> Vec { match self.inner { - TokenIdInner::MLT => vec![], + TokenIdInner::MLT => sp_std::vec![], TokenIdInner::Asset(hash) => hash.as_bytes().to_base58().to_vec(), } } @@ -154,3 +154,14 @@ pub enum NftDataHash { Raw(Vec), // Or any type that you want to implement } + +impl OutputData { + pub(crate) fn id(&self) -> Option { + match self { + OutputData::TokenTransferV1 { ref token_id, .. } + | OutputData::TokenIssuanceV1 { ref token_id, .. } + | OutputData::NftMintV1 { ref token_id, .. } => Some(token_id.clone()), + _ => None, + } + } +} diff --git a/pallets/utxo/src/verifier.rs b/pallets/utxo/src/verifier.rs index 2aee0ed..6767597 100644 --- a/pallets/utxo/src/verifier.rs +++ b/pallets/utxo/src/verifier.rs @@ -1,143 +1,333 @@ -use crate::tokens::{OutputData, TokenId}; -use crate::{/*Transaction,*/ TransactionFor, TransactionOutputFor}; -use frame_support::ensure; -use frame_support::pallet_prelude::ValidTransaction; -use sp_core::sp_std::collections::btree_map::BTreeMap; -use sp_core::H256; - -pub struct TransactionVerifier<'a, T: frame_system::Config> { - tx: &'a TransactionFor, - input_map: Option>>, - output_map: Option>>, -} +// DRY (Don't Repeat Yourself) +#[macro_export] +macro_rules! implement_transaction_verifier { + () => { + use crate::sign::TransactionSigMsg; + use chainscript::sighash::SigHash; -impl TransactionVerifier<'_, T> { - pub fn new(tx: &TransactionFor) -> TransactionVerifier { - TransactionVerifier { - tx, - input_map: None, - output_map: None, + pub struct TransactionVerifier<'a, T: Config> { + tx: &'a TransactionFor, + all_inputs_map: BTreeMap)>, + all_outputs_map: BTreeMap>, + total_value_of_input_tokens: BTreeMap, + total_value_of_output_tokens: BTreeMap, } - } - - fn get_token_id_from_input(_outpoint: H256) -> TokenId { - unimplemented!() - } - - fn get_token_id_from_output(output: &TransactionOutputFor) -> TokenId { - match output.data { - Some(OutputData::TokenTransferV1 { ref token_id, .. }) - | Some(OutputData::TokenIssuanceV1 { ref token_id, .. }) - | Some(OutputData::NftMintV1 { ref token_id, .. }) => token_id.clone(), - Some(OutputData::TokenBurnV1 { .. }) => unreachable!(), - _ => TokenId::mlt(), + + impl TransactionVerifier<'_, T> { + // Turn Vector into BTreeMap + fn init_inputs( + tx: &TransactionFor, + ) -> BTreeMap)> { + let input_map: BTreeMap)> = tx + .inputs + .iter() + .filter_map(|input| { + let token_id = + TransactionVerifier::<'_, T>::get_token_id_from_input(input.outpoint) + .ok()?; + let output = + TransactionVerifier::<'_, T>::get_output_by_outpoint(input.outpoint)?; + Some((token_id, (input.clone(), output))) + }) + .collect(); + input_map + } + // Turn Vector into BTreeMap + fn init_outputs(tx: &TransactionFor) -> BTreeMap> { + let output_map: BTreeMap> = tx + .outputs + .iter() + .map(|output| { + ( + TransactionVerifier::<'_, T>::get_token_id_from_output(&output), + output.clone(), + ) + }) + .collect(); + output_map + } + + fn init_total_value_of_input_tokens( + all_inputs_map: &BTreeMap)>, + ) -> Result, &'static str> { + let mut total_value_of_input_tokens: BTreeMap = BTreeMap::new(); + let mut mlt_amount: Value = 0; + for (_, (_, (_, input_utxo))) in all_inputs_map.iter().enumerate() { + match &input_utxo.data { + Some(OutputData::TokenIssuanceV1 { + ref token_id, + amount_to_issue, + .. + }) => { + // If token has just created we can't meet another amount here. + total_value_of_input_tokens.insert(token_id.clone(), *amount_to_issue); + } + Some(OutputData::TokenTransferV1 { + ref token_id, + amount, + .. + }) => { + total_value_of_input_tokens.insert( + token_id.clone(), + total_value_of_input_tokens + .get(token_id) + .unwrap_or(&0) + .checked_add(*amount) + .ok_or("input value overflow")?, + ); + } + Some(OutputData::TokenBurnV1 { .. }) => { + // Nothing to do here because tokens no longer exist. + } + Some(OutputData::NftMintV1 { ref token_id, .. }) => { + // If NFT has just created we can't meet another NFT part here. + total_value_of_input_tokens.insert(token_id.clone(), 1); + } + None => { + mlt_amount = mlt_amount + .checked_add(input_utxo.value) + .ok_or("input value overflow")?; + } + } + } + total_value_of_input_tokens.insert(TokenId::mlt(), mlt_amount); + Ok(total_value_of_input_tokens) + } + + fn init_total_value_of_output_tokens( + all_outputs_map: &BTreeMap>, + ) -> Result, &'static str> { + let mut total_value_of_output_tokens: BTreeMap = BTreeMap::new(); + let mut mlt_amount: Value = 0; + for x in all_outputs_map { + match &x.1.data { + Some(OutputData::TokenIssuanceV1 { + ref token_id, + amount_to_issue, + .. + }) => { + // If token has just created we can't meet another amount here. + total_value_of_output_tokens.insert(token_id.clone(), *amount_to_issue); + } + Some(OutputData::TokenTransferV1 { + ref token_id, + amount, + .. + }) => { + total_value_of_output_tokens.insert( + token_id.clone(), + total_value_of_output_tokens + .get(token_id) + .unwrap_or(&0) + .checked_add(*amount) + .ok_or("input value overflow")?, + ); + } + Some(OutputData::TokenBurnV1 { .. }) => { + // Nothing to do here because tokens no longer exist. + } + Some(OutputData::NftMintV1 { ref token_id, .. }) => { + // If NFT has just created we can't meet another NFT part here. + total_value_of_output_tokens.insert(token_id.clone(), 1); + } + None => { + mlt_amount = + mlt_amount.checked_add(x.1.value).ok_or("input value overflow")?; + } + } + } + total_value_of_output_tokens.insert(TokenId::mlt(), mlt_amount); + Ok(total_value_of_output_tokens) + } + + pub fn new(tx: &TransactionFor) -> Result, &'static str> { + let all_inputs_map = Self::init_inputs(&tx); + let all_outputs_map = Self::init_outputs(&tx); + let total_value_of_input_tokens = + Self::init_total_value_of_input_tokens(&all_inputs_map)?; + let total_value_of_output_tokens = + Self::init_total_value_of_output_tokens(&all_outputs_map)?; + Ok(TransactionVerifier { + tx, + all_inputs_map, + all_outputs_map, + total_value_of_input_tokens, + total_value_of_output_tokens, + }) + } + + fn get_token_id_from_input(outpoint: H256) -> Result { + //if let Some(input_utxo) = crate::UtxoStore::::get(&outpoint) { + if let Some(input_utxo) = >::get(outpoint) { + match input_utxo.data { + Some(data) => data.id().ok_or("Token had burned or input incorrect"), + None => Ok(TokenId::mlt()), + } + } else { + Ok(TokenId::mlt()) + } + } + + fn get_token_id_from_output(output: &TransactionOutputFor) -> TokenId { + match output.data { + Some(OutputData::TokenTransferV1 { ref token_id, .. }) + | Some(OutputData::TokenIssuanceV1 { ref token_id, .. }) + | Some(OutputData::NftMintV1 { ref token_id, .. }) => token_id.clone(), + Some(OutputData::TokenBurnV1 { .. }) => unreachable!(), + _ => TokenId::mlt(), + } + } + + fn get_output_by_outpoint(outpoint: H256) -> Option> { + >::get(outpoint) + } + + pub fn checking_inputs(&mut self) -> Result<(), &'static str> { + //ensure rather than assert to avoid panic + //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries + ensure!(!self.tx.inputs.is_empty(), "no inputs"); + ensure!( + self.tx.inputs.len() < (u32::MAX as usize), + "too many inputs" + ); + + //ensure each input is used only a single time + //maps each input into btree + //if map.len() > num of inputs then fail + //https://doc.rust-lang.org/std/collections/struct.BTreeMap.html + //WARNING workshop code has a bug here + //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs + //input_map.len() > transaction.inputs.len() //THIS IS WRONG + + //we want map size and input size to be equal to ensure each is used only once + ensure!( + self.all_inputs_map.len() == self.tx.inputs.len(), + "each input should be used only once" + ); + Ok(()) + } + + pub fn checking_outputs(&mut self) -> Result<(), &'static str> { + //ensure rather than assert to avoid panic + //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries + ensure!(!self.tx.outputs.is_empty(), "no outputs"); + ensure!( + self.tx.outputs.len() < (u32::MAX as usize), + "too many outputs" + ); + + //ensure each output is unique + //map each output to btree to count unique elements + //WARNING example code has a bug here + //out_map.len() != transaction.outputs.len() //THIS IS WRONG + + //check each output is defined only once + ensure!( + self.all_outputs_map.len() == self.tx.outputs.len(), + "each output should be used once" + ); + Ok(()) + } + + pub fn checking_signatures(&self) -> Result<(), &'static str> { + /* + // if all spent UTXOs are available, check the math and signatures + if let Ok(input_utxos) = &input_utxos { + // We have to check sum of input tokens is less or equal to output tokens. + + let mut new_token_exist = false; + for output_token in &outputs_sum { + match inputs_sum.get(&output_token.0) { + Some(input_value) => ensure!( + input_value >= &output_token.1, + "output value must not exceed input value" + ), + None => { + // If the transaction has one an output with a new token ID + if new_token_exist { + frame_support::fail!("input for the token not found") + } else { + new_token_exist = true; + } + } + } + } + */ + + for (index, (_, (input, input_utxo))) in self.all_inputs_map.iter().enumerate() { + let spending: Vec> = self + .all_inputs_map + .iter() + .map(|(_, (_, ref input_utxo))| input_utxo.clone()) + .collect(); + match &input_utxo.destination { + Destination::Pubkey(pubkey) => { + let msg = TransactionSigMsg::construct( + SigHash::default(), + &self.tx, + // todo: Check with Lukas is it correct or no + &spending[..], + index as u64, + u32::MAX, + ); + let ok = crate::sign::Public::Schnorr(*pubkey) + .parse_sig(&input.witness[..]) + .ok_or("bad signature format")? + .verify(&msg); + ensure!(ok, "signature must be valid"); + } + Destination::CreatePP(_, _) => { + log::info!("TODO validate spending of OP_CREATE"); + } + Destination::CallPP(_, _) => { + log::info!("TODO validate spending of OP_CALL"); + } + Destination::ScriptHash(_hash) => { + let witness = input.witness.clone(); + let lock = input.lock.clone(); + crate::script::verify( + &self.tx, + // todo: Check with Lukas is it correct or no + &spending[..], + index as u64, + witness, + lock, + ) + .map_err(|_| "script verification failed")?; + } + } + } + + Ok(()) + } + + pub fn checking_utxos_exists(&self) -> Result<(), &'static str> { + unimplemented!() + } + + pub fn checking_tokens_transferring(&self) -> Result<(), &'static str> { + unimplemented!() + } + + pub fn checking_tokens_issued(&self) -> Result<(), &'static str> { + unimplemented!() + } + + pub fn checking_nft_mint(&self) -> Result<(), &'static str> { + unimplemented!() + } + + pub fn checking_assets_burn(&self) -> Result<(), &'static str> { + unimplemented!() + } + + pub fn calculating_reward(&self) -> Result<(), &'static str> { + unimplemented!() + } + + pub fn collect_result(&self) -> Result { + unimplemented!() + } } - } - - fn get_output_by_outpoint(_outpoint: H256) -> TransactionOutputFor { - unimplemented!() - } - - pub fn checking_inputs(&mut self) -> Result, &'static str> { - //ensure rather than assert to avoid panic - //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries - ensure!(!self.tx.inputs.is_empty(), "no inputs"); - ensure!( - self.tx.inputs.len() < (u32::MAX as usize), - "too many inputs" - ); - - //ensure each input is used only a single time - //maps each input into btree - //if map.len() > num of inputs then fail - //https://doc.rust-lang.org/std/collections/struct.BTreeMap.html - //WARNING workshop code has a bug here - //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs - //input_map.len() > transaction.inputs.len() //THIS IS WRONG - - let input_map: BTreeMap> = self - .tx - .inputs - .iter() - .map(|input| { - ( - TransactionVerifier::<'_, T>::get_token_id_from_input(input.outpoint), - TransactionVerifier::<'_, T>::get_output_by_outpoint(input.outpoint), - ) - }) - .collect(); - //we want map size and input size to be equal to ensure each is used only once - ensure!( - input_map.len() == self.tx.inputs.len(), - "each input should be used only once" - ); - self.input_map = Some(input_map); - unimplemented!() - } - - pub fn checking_outputs(&mut self) -> Result, &'static str> { - //ensure rather than assert to avoid panic - //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries - ensure!(!self.tx.outputs.is_empty(), "no outputs"); - ensure!( - self.tx.outputs.len() < (u32::MAX as usize), - "too many outputs" - ); - - //ensure each output is unique - //map each output to btree to count unique elements - //WARNING example code has a bug here - //out_map.len() != transaction.outputs.len() //THIS IS WRONG - - let output_map: BTreeMap> = self - .tx - .outputs - .iter() - .map(|output| { - ( - TransactionVerifier::<'_, T>::get_token_id_from_output(&output), - output.clone(), - ) - }) - .collect(); - //check each output is defined only once - ensure!( - output_map.len() == self.tx.outputs.len(), - "each output should be used once" - ); - self.output_map = Some(output_map); - unimplemented!() - } - - pub fn checking_signatures(&self) -> Result, &'static str> { - unimplemented!() - } - - pub fn checking_utxos_exists(&self) -> Result, &'static str> { - unimplemented!() - } - - pub fn checking_tokens_transferring(&self) -> Result, &'static str> { - unimplemented!() - } - - pub fn checking_tokens_issued(&self) -> Result, &'static str> { - unimplemented!() - } - - pub fn checking_nft_mint(&self) -> Result, &'static str> { - unimplemented!() - } - - pub fn checking_assets_burn(&self) -> Result, &'static str> { - unimplemented!() - } - - pub fn calculating_reward(&self) -> Result, &'static str> { - unimplemented!() - } - - pub fn collect_result(&self) -> Result { - unimplemented!() - } + }; } From dfecda348bde50ca2a03b18e5daabf88d428bca9 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 25 Oct 2021 23:29:05 +0300 Subject: [PATCH 15/53] Added checking for signature and for tokens amount. Signed-off-by: sinitcin --- .gitignore | 2 + pallets/utxo/src/lib.rs | 2 + pallets/utxo/src/verifier.rs | 75 +++++++++++++++++++++++------------- 3 files changed, 53 insertions(+), 26 deletions(-) diff --git a/.gitignore b/.gitignore index 84be25f..be4522a 100644 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,5 @@ Cargo.lock .direnv +/test/config.ini +/customSpec.json diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 66a1b87..c2bc576 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -415,9 +415,11 @@ pub mod pallet { tx: &TransactionFor, ) -> Result { let mut tv = TransactionVerifier::<'_, T>::new(tx)?; + tv.checking_inputs()?; tv.checking_outputs()?; tv.checking_signatures()?; tv.checking_utxos_exists()?; + tv.checking_amounts()?; tv.checking_tokens_transferring()?; tv.checking_tokens_issued()?; tv.checking_nft_mint()?; diff --git a/pallets/utxo/src/verifier.rs b/pallets/utxo/src/verifier.rs index 6767597..d00141e 100644 --- a/pallets/utxo/src/verifier.rs +++ b/pallets/utxo/src/verifier.rs @@ -11,6 +11,7 @@ macro_rules! implement_transaction_verifier { all_outputs_map: BTreeMap>, total_value_of_input_tokens: BTreeMap, total_value_of_output_tokens: BTreeMap, + new_utxos: Vec>, } impl TransactionVerifier<'_, T> { @@ -143,6 +144,7 @@ macro_rules! implement_transaction_verifier { pub fn new(tx: &TransactionFor) -> Result, &'static str> { let all_inputs_map = Self::init_inputs(&tx); let all_outputs_map = Self::init_outputs(&tx); + let new_utxos = Vec::new(); let total_value_of_input_tokens = Self::init_total_value_of_input_tokens(&all_inputs_map)?; let total_value_of_output_tokens = @@ -153,6 +155,7 @@ macro_rules! implement_transaction_verifier { all_outputs_map, total_value_of_input_tokens, total_value_of_output_tokens, + new_utxos, }) } @@ -230,30 +233,6 @@ macro_rules! implement_transaction_verifier { } pub fn checking_signatures(&self) -> Result<(), &'static str> { - /* - // if all spent UTXOs are available, check the math and signatures - if let Ok(input_utxos) = &input_utxos { - // We have to check sum of input tokens is less or equal to output tokens. - - let mut new_token_exist = false; - for output_token in &outputs_sum { - match inputs_sum.get(&output_token.0) { - Some(input_value) => ensure!( - input_value >= &output_token.1, - "output value must not exceed input value" - ), - None => { - // If the transaction has one an output with a new token ID - if new_token_exist { - frame_support::fail!("input for the token not found") - } else { - new_token_exist = true; - } - } - } - } - */ - for (index, (_, (input, input_utxo))) in self.all_inputs_map.iter().enumerate() { let spending: Vec> = self .all_inputs_map @@ -301,8 +280,52 @@ macro_rules! implement_transaction_verifier { Ok(()) } - pub fn checking_utxos_exists(&self) -> Result<(), &'static str> { - unimplemented!() + pub fn checking_amounts(&self) -> Result<(), &'static str> { + // if all spent UTXOs are available, check the math and signatures + let mut new_token_exist = false; + for (_, (token_id, input_value)) in + self.total_value_of_input_tokens.iter().enumerate() + { + match self.total_value_of_output_tokens.get(token_id) { + Some(output_value) => ensure!( + input_value >= &output_value, + "output value must not exceed input value" + ), + None => { + // If the transaction has one an output with a new token ID + if new_token_exist { + frame_support::fail!("input for the token not found") + } else { + new_token_exist = true; + } + } + } + } + Ok(()) + } + + pub fn checking_utxos_exists(&mut self) -> Result<(), &'static str> { + // Check that outputs are valid + + for (output_index, (token_id, output)) in self.all_outputs_map.iter().enumerate() { + match output.destination { + Destination::Pubkey(_) | Destination::ScriptHash(_) => { + if token_id == &TokenId::mlt() { + ensure!(output.value > 0, "output value must be nonzero"); + } + let hash = self.tx.outpoint(output_index as u64); + ensure!(!>::contains_key(hash), "output already exists"); + self.new_utxos.push(hash.as_fixed_bytes().to_vec()); + } + Destination::CreatePP(_, _) => { + log::info!("TODO validate OP_CREATE"); + } + Destination::CallPP(_, _) => { + log::info!("TODO validate OP_CALL"); + } + } + } + Ok(()) } pub fn checking_tokens_transferring(&self) -> Result<(), &'static str> { From 7a6d68baacc07064bcf7aab4fe95b8c89177ec84 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 26 Oct 2021 00:23:22 +0300 Subject: [PATCH 16/53] Added a couple a token creation tests. Added calculating_reward amd collect_result functions Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 8 ++--- pallets/utxo/src/tests.rs | 60 +++++++++++++++++++++++++++---- pallets/utxo/src/verifier.rs | 68 ++++++++++++++++++++++++++++++++---- 3 files changed, 120 insertions(+), 16 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index c2bc576..43e0a9b 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -420,10 +420,10 @@ pub mod pallet { tv.checking_signatures()?; tv.checking_utxos_exists()?; tv.checking_amounts()?; - tv.checking_tokens_transferring()?; - tv.checking_tokens_issued()?; - tv.checking_nft_mint()?; - tv.checking_assets_burn()?; + //tv.checking_tokens_transferring()?; + //tv.checking_tokens_issued()?; + //tv.checking_nft_mint()?; + //tv.checking_assets_burn()?; tv.calculating_reward()?; tv.collect_result() diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 11ee7ae..6d4e953 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -579,7 +579,7 @@ fn test_send_to_address() { // }); // *Testing token creation: -use crate::tokens::TokenId; +use crate::tokens::{NftDataHash, TokenId}; #[test] // Simple creation of tokens fn checking_tokens_issuance() { @@ -591,7 +591,6 @@ fn checking_tokens_issuance() { let output = TransactionOutput { value: 0, destination: Destination::Pubkey(alice_pub_key), - // TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), data: Some(OutputData::TokenIssuanceV1 { token_id: TokenId::new_asset(first_input_hash), @@ -607,21 +606,70 @@ fn checking_tokens_issuance() { outputs: vec![output], } .sign_unchecked(&[utxo0], 0, &alice_pub_key); - let new_utxo_hash = tx.outpoint(0); - let (_, init_utxo) = genesis_utxo(); assert!(UtxoStore::::contains_key(H256::from(init_utxo))); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); assert!(UtxoStore::::contains_key(new_utxo_hash)); - assert_eq!(50, UtxoStore::::get(new_utxo_hash).unwrap().value); + assert_eq!( + 1_000_000_000, + UtxoStore::::get(new_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenIssuanceV1 { + amount_to_issue, .. + } => amount_to_issue, + _ => 0, + }) + .unwrap_or(0) + ); }) } #[test] // Simple creation of NFT -fn checking_nft_mint() {} +fn checking_nft_mint() { + execute_with_alice(|alice_pub_key| { + // Alice wants to send herself a new utxo of value 50. + let (utxo0, input0) = tx_input_gen_no_signature(); + let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); + let data_hash = NftDataHash::Raw(vec![1, 2, 3, 4, 5]); + let output = TransactionOutput { + value: 0, + destination: Destination::Pubkey(alice_pub_key), + // TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), + data: Some(OutputData::NftMintV1 { + token_id: TokenId::new_asset(first_input_hash), + data_hash: data_hash.clone(), + metadata_uri: "facebook.com".as_bytes().to_vec(), + }), + }; + let tx = Transaction { + inputs: vec![input0], + outputs: vec![output], + } + .sign_unchecked(&[utxo0], 0, &alice_pub_key); + let new_utxo_hash = tx.outpoint(0); + let (_, init_utxo) = genesis_utxo(); + assert!(UtxoStore::::contains_key(H256::from(init_utxo))); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); + assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); + assert!(UtxoStore::::contains_key(new_utxo_hash)); + assert_eq!( + data_hash, + UtxoStore::::get(new_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::NftMintV1 { data_hash, .. } => data_hash, + _ => NftDataHash::Raw(Vec::new()), + }) + .unwrap_or(NftDataHash::Raw(Vec::new())) + ); + }) +} #[test] // NFT might be only unique, we can't create a few nft for one item diff --git a/pallets/utxo/src/verifier.rs b/pallets/utxo/src/verifier.rs index d00141e..c5d1091 100644 --- a/pallets/utxo/src/verifier.rs +++ b/pallets/utxo/src/verifier.rs @@ -12,6 +12,11 @@ macro_rules! implement_transaction_verifier { total_value_of_input_tokens: BTreeMap, total_value_of_output_tokens: BTreeMap, new_utxos: Vec>, + spended_utxos: Result< + Vec::AccountId>>, + Vec>, + >, + reward: u64, } impl TransactionVerifier<'_, T> { @@ -144,7 +149,6 @@ macro_rules! implement_transaction_verifier { pub fn new(tx: &TransactionFor) -> Result, &'static str> { let all_inputs_map = Self::init_inputs(&tx); let all_outputs_map = Self::init_outputs(&tx); - let new_utxos = Vec::new(); let total_value_of_input_tokens = Self::init_total_value_of_input_tokens(&all_inputs_map)?; let total_value_of_output_tokens = @@ -155,7 +159,9 @@ macro_rules! implement_transaction_verifier { all_outputs_map, total_value_of_input_tokens, total_value_of_output_tokens, - new_utxos, + new_utxos: Vec::new(), + spended_utxos: Ok(Vec::new()), + reward: 0, }) } @@ -305,8 +311,32 @@ macro_rules! implement_transaction_verifier { } pub fn checking_utxos_exists(&mut self) -> Result<(), &'static str> { - // Check that outputs are valid + // Resolve the transaction inputs by looking up UTXOs being spent by them. + // + // This will contain one of the following: + // * Ok(utxos): a vector of UTXOs each input spends. + // * Err(missing): a vector of outputs missing from the store + + self.spended_utxos = { + let mut missing = Vec::new(); + let mut resolved: Vec> = Vec::new(); + for input in &self.tx.inputs { + if let Some(input_utxo) = >::get(&input.outpoint) { + let lock_commitment = input_utxo.destination.lock_commitment(); + ensure!( + input.lock_hash() == *lock_commitment, + "Lock hash does not match" + ); + resolved.push(input_utxo); + } else { + missing.push(input.outpoint.clone().as_fixed_bytes().to_vec()); + } + } + missing.is_empty().then(|| resolved).ok_or(missing) + }; + + // Check that outputs are valid for (output_index, (token_id, output)) in self.all_outputs_map.iter().enumerate() { match output.destination { Destination::Pubkey(_) | Destination::ScriptHash(_) => { @@ -344,12 +374,38 @@ macro_rules! implement_transaction_verifier { unimplemented!() } - pub fn calculating_reward(&self) -> Result<(), &'static str> { - unimplemented!() + pub fn calculating_reward(&mut self) -> Result<(), &'static str> { + use std::convert::TryFrom; + // Reward at the moment only in MLT + self.reward = if self.total_value_of_input_tokens.contains_key(&TokenId::mlt()) + && self.total_value_of_output_tokens.contains_key(&(TokenId::mlt())) + { + u64::try_from( + self.total_value_of_input_tokens[&TokenId::mlt()] + .checked_sub(self.total_value_of_output_tokens[&TokenId::mlt()]) + .ok_or("reward underflow")?, + ) + .map_err(|_e| "too big amount of fee")? + } else { + u64::try_from( + *self + .total_value_of_input_tokens + .get(&TokenId::mlt()) + .ok_or("fee doesn't exist")?, + ) + .map_err(|_e| "too big amount of fee")? + }; + Ok(()) } pub fn collect_result(&self) -> Result { - unimplemented!() + Ok(ValidTransaction { + priority: self.reward, + requires: self.spended_utxos.clone().map_or_else(|x| x, |_| Vec::new()), + provides: self.new_utxos.clone(), + longevity: TransactionLongevity::MAX, + propagate: true, + }) } } }; From 8fb97b789bdf7081e708ac513a94c5fe6c153417 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Thu, 28 Oct 2021 10:47:30 +0300 Subject: [PATCH 17/53] Added a few new tests for token creation, transferring, and minting nft Signed-off-by: sinitcin --- Cargo.lock | 1 + pallets/utxo/Cargo.toml | 3 + pallets/utxo/src/lib.rs | 400 ++----------------- pallets/utxo/src/tests.rs | 723 +++++++++++++++++++++++++++-------- pallets/utxo/src/tokens.rs | 2 +- pallets/utxo/src/verifier.rs | 521 +++++++++++++++++-------- 6 files changed, 942 insertions(+), 708 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cd5272b..97ea749 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4138,6 +4138,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "pp-api", + "rand 0.4.6", "serde", "sp-consensus-aura", "sp-core", diff --git a/pallets/utxo/Cargo.toml b/pallets/utxo/Cargo.toml index 490a2c0..f4bce51 100644 --- a/pallets/utxo/Cargo.toml +++ b/pallets/utxo/Cargo.toml @@ -23,6 +23,9 @@ log = "0.4.8" serde = '1.0.119' variant_count = '1.1' +[dev-dependencies] +rand = "0.4" + [dependencies.bech32] default-features = false path = '../../libs/bech32' diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 43e0a9b..7a4427c 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -36,7 +36,7 @@ pub mod weights; #[frame_support::pallet] pub mod pallet { // use crate::sign::{self, Scheme}; - use crate::tokens::{/*Mlt,*/ OutputData, TokenId, Value}; + use crate::tokens::{NftDataHash, OutputData, TokenId, Value}; // use crate::verifier::TransactionVerifier; use super::implement_transaction_verifier; use bech32; @@ -269,6 +269,16 @@ pub mod pallet { data: None, } } + + // Create a new output with a some data + pub fn new_with_data(value: Value, pubkey: H256, data: OutputData) -> Self { + let pubkey = sp_core::sr25519::Public::from_h256(pubkey); + Self { + value, + destination: Destination::Pubkey(pubkey.into()), + data: Some(data), + } + } } #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] @@ -326,6 +336,11 @@ pub mod pallet { pub(super) type PointerToIssueToken = StorageMap<_, Identity, TokenId, /* UTXO */ H256, OptionQuery>; + #[pallet::storage] + #[pallet::getter(fn nft_unique_data_hash)] + pub(super) type NftUniqueDataHash = + StorageMap<_, Identity, NftDataHash, /* UTXO */ H256, OptionQuery>; + #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] #[pallet::metadata(T::AccountId = "AccountId")] @@ -420,255 +435,8 @@ pub mod pallet { tv.checking_signatures()?; tv.checking_utxos_exists()?; tv.checking_amounts()?; - //tv.checking_tokens_transferring()?; - //tv.checking_tokens_issued()?; - //tv.checking_nft_mint()?; - //tv.checking_assets_burn()?; tv.calculating_reward()?; tv.collect_result() - - /* - //ensure rather than assert to avoid panic - //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries - ensure!(!tx.inputs.is_empty(), "no inputs"); - ensure!(!tx.outputs.is_empty(), "no outputs"); - ensure!(tx.inputs.len() < (u32::MAX as usize), "too many inputs"); - ensure!(tx.outputs.len() < (u32::MAX as usize), "too many outputs"); - - //ensure each input is used only a single time - //maps each input into btree - //if map.len() > num of inputs then fail - //https://doc.rust-lang.org/std/collections/struct.BTreeMap.html - //WARNING workshop code has a bug here - //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs - //input_map.len() > transaction.inputs.len() //THIS IS WRONG - { - let input_map: BTreeMap<_, ()> = - tx.inputs.iter().map(|input| (input.outpoint, ())).collect(); - //we want map size and input size to be equal to ensure each is used only once - ensure!( - input_map.len() == tx.inputs.len(), - "each input should be used only once" - ); - } - //ensure each output is unique - //map each output to btree to count unique elements - //WARNING example code has a bug here - //out_map.len() != transaction.outputs.len() //THIS IS WRONG - { - let out_map: BTreeMap<_, ()> = tx.outputs.iter().map(|output| (output, ())).collect(); - //check each output is defined only once - ensure!( - out_map.len() == tx.outputs.len(), - "each output should be used once" - ); - } - let simple_tx = get_simple_transaction(tx); - let mut reward = 0; - // Resolve the transaction inputs by looking up UTXOs being spent by them. - // - // This will cointain one of the following: - // * Ok(utxos): a vector of UTXOs each input spends. - // * Err(missing): a vector of outputs missing from the store - let input_utxos = { - let mut missing = Vec::new(); - let mut resolved: Vec> = Vec::new(); - - for input in &tx.inputs { - if let Some(input_utxo) = >::get(&input.outpoint) { - let lock_commitment = input_utxo.destination.lock_commitment(); - ensure!( - input.lock_hash() == *lock_commitment, - "Lock hash does not match" - ); - resolved.push(input_utxo); - } else { - missing.push(input.outpoint.clone().as_fixed_bytes().to_vec()); - } - } - - missing.is_empty().then(|| resolved).ok_or(missing) - }; - - let full_inputs: Vec<(TokenId, TransactionOutputFor)> = tx - .inputs - .iter() - .filter_map(|input| >::get(&input.outpoint)) - .filter_map(|output| match output.data { - Some(ref data) => match data { - OutputData::TokenTransferV1 { token_id, amount } => Some((*token_id, output)), - OutputData::TokenIssuanceV1 { - token_id, - token_ticker, - amount_to_issue, - number_of_decimals, - metadata_URI, - } => Some((*token_id, output)), - OutputData::TokenBurnV1 { .. } => { - // frame_support::fail!("Token gone forever, we can't use it anymore").ok(); - None - } - OutputData::NftMintV1 { - token_id, - data_hash, - metadata_URI, - } => Some((*token_id, output)), - }, - None => Some((H256::zero(), output)), - }) - .collect(); - - let input_vec: Vec<(TokenId, Value)> = - full_inputs.iter().map(|output| (output.0, output.1.value)).collect(); - - let out_vec: Vec<(TokenId, Value)> = tx - .outputs - .iter() - .filter_map(|output| { - match output.data { - Some(OutputData::TokenTransferV1 { token_id, amount }) => { - Some((token_id, amount)) - } - Some(OutputData::TokenIssuanceV1 { - token_id, - amount_to_issue, - .. - }) => Some((token_id, amount_to_issue)), - Some(OutputData::NftMintV1 { token_id, .. }) => Some((token_id, 1)), - // Token gone forever, we can't use it anymore - Some(OutputData::TokenBurnV1 { .. }) => None, - None => Some((H256::zero(), output.value)), - } - }) - .collect(); - - // Check for token creation - for output in tx.outputs.iter() { - let tid = match output.data { - Some(OutputData::TokenTransferV1 { token_id, .. }) => token_id, - Some(OutputData::TokenIssuanceV1 { token_id, .. }) => token_id, - _ => continue, - }; - // If we have input and output for the same token it's not a problem - if full_inputs.iter().find(|&x| (x.0 == tid) && (x.1 != *output)).is_some() { - continue; - } else { - // But when we don't have an input for token but token id exist in TokenList - ensure!( - !>::contains_key(tid), - "no inputs for the token id" - ); - } - } - - let mut new_utxos = Vec::new(); - // Check that outputs are valid - for (output_index, output) in tx.outputs.iter().enumerate() { - match output.destination { - Destination::Pubkey(_) | Destination::ScriptHash(_) => { - ensure!(output.value > 0, "output value must be nonzero"); - let hash = tx.outpoint(output_index as u64); - ensure!(!>::contains_key(hash), "output already exists"); - new_utxos.push(hash.as_fixed_bytes().to_vec()); - } - Destination::CreatePP(_, _) => { - log::info!("TODO validate OP_CREATE"); - } - Destination::CallPP(_, _) => { - log::info!("TODO validate OP_CALL"); - } - } - } - - // if all spent UTXOs are available, check the math and signatures - if let Ok(input_utxos) = &input_utxos { - // We have to check sum of input tokens is less or equal to output tokens. - let mut inputs_sum: BTreeMap = BTreeMap::new(); - let mut outputs_sum: BTreeMap = BTreeMap::new(); - - for x in input_vec { - let value = - x.1.checked_add(*inputs_sum.get(&x.0).unwrap_or(&0)) - .ok_or("input value overflow")?; - inputs_sum.insert(x.0, value); - } - for x in out_vec { - let value = - x.1.checked_add(*outputs_sum.get(&x.0).unwrap_or(&0)) - .ok_or("output value overflow")?; - outputs_sum.insert(x.0, value); - } - - let mut new_token_exist = false; - for output_token in &outputs_sum { - match inputs_sum.get(&output_token.0) { - Some(input_value) => ensure!( - input_value >= &output_token.1, - "output value must not exceed input value" - ), - None => { - // If the transaction has one an output with a new token ID - if new_token_exist { - frame_support::fail!("input for the token not found") - } else { - new_token_exist = true; - } - } - } - } - - for (index, (input, input_utxo)) in tx.inputs.iter().zip(input_utxos).enumerate() { - match &input_utxo.destination { - Destination::Pubkey(pubkey) => { - let msg = sign::TransactionSigMsg::construct( - sign::SigHash::default(), - &tx, - &input_utxos, - index as u64, - u32::MAX, - ); - let ok = pubkey - .parse_sig(&input.witness[..]) - .ok_or("bad signature format")? - .verify(&msg); - ensure!(ok, "signature must be valid"); - } - Destination::CreatePP(_, _) => { - log::info!("TODO validate spending of OP_CREATE"); - } - Destination::CallPP(_, _) => { - log::info!("TODO validate spending of OP_CALL"); - } - Destination::ScriptHash(_hash) => { - let witness = input.witness.clone(); - let lock = input.lock.clone(); - crate::script::verify(&tx, &input_utxos, index as u64, witness, lock) - .map_err(|_| "script verification failed")?; - } - } - } - - // Reward at the moment only in MLT - reward = if inputs_sum.contains_key(&(H256::zero() as TokenId)) - && outputs_sum.contains_key(&(H256::zero() as TokenId)) - { - inputs_sum[&(H256::default() as TokenId)] - .checked_sub(outputs_sum[&(H256::zero() as TokenId)]) - .ok_or("reward underflow")? - } else { - *inputs_sum.get(&(H256::zero() as TokenId)).ok_or("fee doesn't exist")? - }; - } - - Ok(ValidTransaction { - priority: reward as u64, - requires: input_utxos.map_or_else(|x| x, |_| Vec::new()), - provides: new_utxos, - longevity: TransactionLongevity::MAX, - propagate: true, - }) - - */ } /// Update storage to reflect changes made by transaction @@ -696,6 +464,20 @@ pub mod pallet { let hash = tx.outpoint(index as u64); log::debug!("inserting to UtxoStore {:?} as key {:?}", output, hash); >::insert(hash, Some(output)); + match &output.data { + Some(OutputData::NftMintV1 { + token_id, + data_hash, + .. + }) => { + >::insert(data_hash, hash); + >::insert(token_id, hash); + } + Some(OutputData::TokenIssuanceV1 { token_id, .. }) => { + >::insert(token_id, hash); + } + _ => continue, + } } Destination::CreatePP(script, data) => { create::(caller, script, &data); @@ -719,128 +501,6 @@ pub mod pallet { Ok(().into()) } - // pub fn token_create( - // caller: &T::AccountId, - // public: H256, - // input_for_fee: TransactionInput, - // token_name: Vec, - // token_ticker: Vec, - // supply: Value, - // ) -> Result> { - // ensure!(token_name.len() <= 25, Error::::Unapproved); - // ensure!(token_ticker.len() <= 5, Error::::Unapproved); - // ensure!(!supply.is_zero(), Error::::MinBalanceZero); - // - // // Input with MLT FEE - // let fee = UtxoStore::::get(input_for_fee.outpoint).ok_or(Error::::Unapproved)?.value; - // ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); - // - // // Save in UTXO - // let instance = crate::TokenInstance::new_normal( - // BlakeTwo256::hash_of(&(&token_name, &token_ticker)), - // token_name, - // token_ticker, - // supply, - // ); - // let token_id = *instance.id(); - // - // ensure!( - // !>::contains_key(instance.id()), - // Error::::InUse - // ); - // - // let mut tx = Transaction { - // inputs: crate::vec![ - // // Fee an input equal 100 MLT - // input_for_fee, - // ], - // outputs: crate::vec![ - // // Output a new tokens - // TransactionOutput::new_token(*instance.id(), supply, public), - // ], - // }; - // - // // We shall make an output to return odd funds - // if fee > Mlt(100).to_munit() { - // tx.outputs.push(TransactionOutput::new_pubkey( - // fee - Mlt(100).to_munit(), - // public, - // )); - // } - // - // let sig = crypto::sr25519_sign( - // SR25519, - // &sp_core::sr25519::Public::from_h256(public), - // &tx.encode(), - // ) - // .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; - // for i in 0..tx.inputs.len() { - // tx.inputs[i].witness = sig.0.to_vec(); - // } - // // Success - // spend::(caller, &tx)?; - // - // // Save in Store - // >::insert(token_id, Some(instance)); - // Ok(token_id) - // } - - /* - fn mint( - caller: &T::AccountId, - creator_pubkey: sp_core::sr25519::Public, - data_url: Vec, - data: Vec, - ) -> Result> { - let (fee, inputs_hashes) = pick_utxo::(caller, Mlt(100).to_munit()); - ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); - ensure!(data_url.len() <= 50, Error::::Unapproved); - - let instance = TokenInstance::new_nft( - BlakeTwo256::hash_of(&data), - data.clone(), - data_url.clone(), - creator_pubkey.to_vec(), - ); - - let inputs_for_fee = inputs_hashes - .iter() - .filter_map(|x| >::get(&x)) - .map(|output| TransactionInput::new_empty(BlakeTwo256::hash_of(&(&output, 0 as u64)))) - .collect(); - - ensure!( - !TokenList::::contains_key(instance.id()), - Error::::NftCollectionExists - ); - - let mut tx = Transaction { - inputs: inputs_for_fee, - outputs: crate::vec![ - // Output a new tokens - TransactionOutput::new_nft( - *instance.id(), - data, - data_url, - H256::from(creator_pubkey) - ), - ], - }; - - let sig = crypto::sr25519_sign(SR25519, &creator_pubkey, &tx.encode()) - .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; - for i in 0..tx.inputs.len() { - tx.inputs[i].witness = sig.0.to_vec(); - } - // Success - spend::(caller, &tx)?; - - // Save in Store - TokenList::::insert(instance.id(), Some(instance.clone())); - Ok(*instance.id()) - } - */ - /// Pick the UTXOs of `caller` from UtxoStore that satisfy request `value` /// /// Return a list of UTXOs that satisfy the request diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 6d4e953..3887cda 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -102,7 +102,7 @@ fn test_unchecked_2nd_output() { let tx1 = Transaction { inputs: vec![input0], outputs: vec![ - TransactionOutput::new_create_pp(0, vec![], vec![]), + TransactionOutput::new_create_pp(1, vec![], vec![]), TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), ], } @@ -165,6 +165,8 @@ fn attack_with_sending_to_own_account() { #[test] fn attack_with_empty_transactions() { new_test_ext().execute_with(|| { + // We should use the real input because. Otherwise, appears another error + let (_, input) = tx_input_gen_no_signature(); assert_err!( Utxo::spend(Origin::signed(H256::zero()), Transaction::default()), // empty tx "no inputs" @@ -174,7 +176,7 @@ fn attack_with_empty_transactions() { Utxo::spend( Origin::signed(H256::zero()), Transaction { - inputs: vec![TransactionInput::default()], // an empty tx + inputs: vec![input], // an empty tx outputs: vec![] } ), @@ -349,60 +351,6 @@ fn test_script() { }) } -#[test] -fn test_tokens() { - // let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); - // test_ext.execute_with(|| { - // // Let's create a new test token - // let token_id = BlakeTwo256::hash_of(&b"TEST"); - // let supply = 1000; - // // Let's make a tx for a new token: - // // * We need at least one input for the fee and one output for a new token. - // // * TokenID for a new token has to be unique. - // let instance = TokenInstance::new_normal( - // token_id, - // b"New token test".to_vec(), - // b"NTT".to_vec(), - // supply, - // ); - // let mut first_tx = Transaction { - // inputs: vec![ - // // 100 MLT - // tx_input_gen_no_signature(), - // ], - // outputs: vec![ - // // 100 a new tokens - // TransactionOutput::new_token(token_id, supply, H256::from(alice_pub_key)), - // // 20 MLT to be paid as a fee, 80 MLT returning - // TransactionOutput::new_pubkey(80, H256::from(alice_pub_key)), - // ], - // } - // .sign_unchecked(&[utxo0], 0, &alice_pub_key); - // assert_ok!(Utxo::spend(Origin::signed(H256::zero()), first_tx.clone())); - // - // // Store a new TokenInstance to the Storage - // >::insert(token_id, Some(instance.clone())); - // dbg!(&>::get(token_id)); - // - // // alice sends 1000 tokens to karl and the rest back to herself 10 tokens - // let utxo_hash_mlt = first_tx.outpoint(1); - // let utxo_hash_token = first_tx.outpoint(0); - // let prev_utxos = [first_tx.outputs[1].clone(), first_tx.outputs[0].clone()]; - // - // let tx = Transaction { - // inputs: vec![ - // TransactionInput::new_empty(utxo_hash_mlt), - // TransactionInput::new_empty(utxo_hash_token), - // ], - // outputs: vec![TransactionOutput::new_token(token_id, 10, H256::from(karl_pub_key))], - // } - // .sign_unchecked(&prev_utxos, 0, &alice_pub_key) - // .sign_unchecked(&prev_utxos, 1, &alice_pub_key); - // - // assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - // }); -} - #[test] fn attack_double_spend_by_tweaking_input() { execute_with_alice(|alice_pub_key| { @@ -533,53 +481,19 @@ fn test_send_to_address() { }) } -// #[test] -// fn nft_test() { -// execute_with_alice(|alice_pub_key| { -// // Let's create a new test nft -// let nft_id = BlakeTwo256::hash_of(&b"TEST"); -// let instance = TokenInstance::new_nft( -// nft_id, -// (*b"01010101010101010101010101010101").to_vec(), -// b"http://facebook.com".to_vec(), -// alice_pub_key.to_vec(), -// ); -// -// if let TokenInstance::Nft { -// id, -// data, -// data_url, -// creator_pubkey, -// .. -// } = instance -// { -// let mut tx = Transaction { -// inputs: vec![ -// // 100 MLT -// tx_input_gen_no_signature(), -// ], -// outputs: vec![TransactionOutput::new_nft( -// id, -// data.to_vec(), -// data_url, -// H256::from_slice(creator_pubkey.as_slice()), -// )], -// }; -// let alice_sig = crypto::sr25519_sign(SR25519, &alice_pub_key, &tx.encode()).unwrap(); -// tx.inputs[0].witness = alice_sig.0.to_vec(); -// assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); -// } -// -// // it should allow to write and read ? -// // let rsp = await dataToken.readData(firstTokenId); -// // assert.equal(rsp, empty); -// // await dataToken.writeData(firstTokenId, data); -// // rsp = await dataToken.readData(firstTokenId); -// // assert.equal(rsp, data); -// }); - // *Testing token creation: use crate::tokens::{NftDataHash, TokenId}; +use rand::Rng; + +fn build_random_vec(len: usize) -> Vec { + let mut rng = rand::thread_rng(); + let mut vec = Vec::with_capacity(len); + for _ in 1..len { + vec.push(rng.gen::()); + } + vec +} + #[test] // Simple creation of tokens fn checking_tokens_issuance() { @@ -588,13 +502,13 @@ fn checking_tokens_issuance() { let (utxo0, input0) = tx_input_gen_no_signature(); let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); - let output = TransactionOutput { + let output_new = TransactionOutput { value: 0, destination: Destination::Pubkey(alice_pub_key), // TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), data: Some(OutputData::TokenIssuanceV1 { token_id: TokenId::new_asset(first_input_hash), - token_ticker: "Ben's token".as_bytes().to_vec(), + token_ticker: "BensT".as_bytes().to_vec(), amount_to_issue: 1_000_000_000, // Should be not more than 18 numbers number_of_decimals: 2, @@ -603,14 +517,16 @@ fn checking_tokens_issuance() { }; let tx = Transaction { inputs: vec![input0], - outputs: vec![output], + outputs: vec![output_new], } .sign_unchecked(&[utxo0], 0, &alice_pub_key); let new_utxo_hash = tx.outpoint(0); let (_, init_utxo) = genesis_utxo(); + // Spend assert!(UtxoStore::::contains_key(H256::from(init_utxo))); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); + // Checking a new UTXO assert!(UtxoStore::::contains_key(new_utxo_hash)); assert_eq!( 1_000_000_000, @@ -625,7 +541,7 @@ fn checking_tokens_issuance() { }) .unwrap_or(0) ); - }) + }); } #[test] @@ -673,89 +589,558 @@ fn checking_nft_mint() { #[test] // NFT might be only unique, we can't create a few nft for one item -fn checking_nft_unique() {} +fn checking_nft_unique() { + execute_with_alice(|alice_pub_key| { + // Alice wants to send herself a new utxo of value 50. + let (utxo0, input0) = tx_input_gen_no_signature(); + let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); + + let mut nft_data = OutputData::NftMintV1 { + token_id: TokenId::new_asset(first_input_hash), + data_hash: NftDataHash::Hash32([255; 32]), + metadata_uri: "facebook.com".as_bytes().to_vec(), + }; + let tx = Transaction { + inputs: vec![input0.clone()], + outputs: vec![ + TransactionOutput { + value: 0, + destination: Destination::Pubkey(alice_pub_key), + data: Some(nft_data.clone()), + }, + TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), + ], + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + let new_utxo_hash = tx.outpoint(1); + let (_, init_utxo) = genesis_utxo(); + // Spend + assert!(UtxoStore::::contains_key(H256::from(init_utxo))); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); + // Checking a new UTXO + assert!(UtxoStore::::contains_key(new_utxo_hash)); + let new_utxo = tx.outputs[1].clone(); + + // Alice wants to send herself a new utxo of value 50 again + if let OutputData::NftMintV1 { + ref mut token_id, .. + } = nft_data + { + *token_id = TokenId::new_asset(H256::random()); + } + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(new_utxo_hash.clone())], + outputs: vec![TransactionOutput { + value: 0, + destination: Destination::Pubkey(alice_pub_key), + data: Some(nft_data.clone()), + }], + } + .sign_unchecked(&[new_utxo], 0, &alice_pub_key); + // Spend + assert!(UtxoStore::::contains_key(H256::from(new_utxo_hash))); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "digital data has already been minted" + ); + }); +} #[test] // Creation a token with a pre-existing ID or re-creation of an already created token. -fn checking_tokens_twice_creation() {} +fn checking_tokens_double_creation() { + execute_with_alice(|alice_pub_key| { + // Alice wants to send herself a new utxo of value 50. + let (utxo0, input0) = tx_input_gen_no_signature(); + let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); -// ** Creating a token with corrupted data -//////////////////////////////////////////////////////////////////////////////////////////////// + let issuance_data = OutputData::TokenIssuanceV1 { + token_id: TokenId::new_asset(first_input_hash), + token_ticker: "BensT".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 2, + metadata_uri: "facebook.com".as_bytes().to_vec(), + }; -#[test] -//Data field of zero length -fn checking_tokens_with_empty_data() {} + let tx = Transaction { + inputs: vec![input0.clone()], + outputs: vec![TransactionOutput { + value: 0, + destination: Destination::Pubkey(alice_pub_key), + data: Some(issuance_data.clone()), + }], + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + let new_utxo_hash = tx.outpoint(0); + let (_, init_utxo) = genesis_utxo(); + // Spend + assert!(UtxoStore::::contains_key(H256::from(init_utxo))); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); + // Checking a new UTXO + assert!(UtxoStore::::contains_key(new_utxo_hash)); + assert_eq!( + 1_000_000_000, + UtxoStore::::get(new_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenIssuanceV1 { + amount_to_issue, .. + } => amount_to_issue, + _ => 0, + }) + .unwrap_or(0) + ); + let new_utxo_hash = tx.outpoint(0); + let new_utxo = tx.outputs[0].clone(); -#[test] -// The data field of the maximum allowed length filled with random garbage -fn checking_tokens_with_junk_data() {} + // Alice wants to send herself a new utxo of value 50 again + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(new_utxo_hash.clone())], + outputs: vec![TransactionOutput { + value: 0, + destination: Destination::Pubkey(alice_pub_key), + data: Some(issuance_data.clone()), + }], + } + .sign_unchecked(&[new_utxo], 0, &alice_pub_key); + // Spend + assert!(UtxoStore::::contains_key(H256::from(new_utxo_hash))); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "token has already been issued" + ); + }); +} #[test] -// Creation of a token with 0 issue amount -fn checking_tokens_creation_with_zero_amount() {} +fn checking_tokens_with_invalid_data() { + macro_rules! test_tx { + ($data: ident, $checking: tt, $err: expr) => { + execute_with_alice(|alice_pub_key| { + // Alice wants to send herself a new utxo of value 50. + let (utxo0, input0) = tx_input_gen_no_signature(); + let output_new = TransactionOutput { + value: 1, + destination: Destination::Pubkey(alice_pub_key), + data: Some($data.clone()), + }; + let tx = Transaction { + inputs: vec![input0], + outputs: vec![output_new], + } + .sign_unchecked(&[utxo0], 0, &alice_pub_key); + let new_utxo_hash = tx.outpoint(0); + let (_, init_utxo) = genesis_utxo(); + // Spend + assert!(UtxoStore::::contains_key(H256::from(init_utxo))); + if stringify!($checking) == "Err" { + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + $err + ); + assert!(UtxoStore::::contains_key(H256::from(init_utxo))); + assert!(!UtxoStore::::contains_key(new_utxo_hash)); + } else if stringify!($checking) == "Ok" { + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); + assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); + assert!(UtxoStore::::contains_key(new_utxo_hash)); + } + }); + }; + } + + // TokenID = MLT + let data = OutputData::TokenIssuanceV1 { + token_id: TokenId::mlt(), + token_ticker: vec![], + amount_to_issue: 0, + // Should be not more than 18 numbers + number_of_decimals: 0, + metadata_uri: vec![], + }; + test_tx!(data, Err, "unable to use mlt as a token id"); + + // Ticker empty + let data = OutputData::TokenIssuanceV1 { + token_id: TokenId::new_asset(H256::random()), + token_ticker: vec![], + amount_to_issue: 0, + number_of_decimals: 0, + metadata_uri: vec![], + }; + test_tx!(data, Err, "token ticker can't be empty"); + + // Ticker too long + let data = OutputData::TokenIssuanceV1 { + token_id: TokenId::new_asset(H256::random()), + token_ticker: Vec::from([0u8; 10_000]), + amount_to_issue: 0, + number_of_decimals: 0, + metadata_uri: vec![], + }; + test_tx!(data, Err, "token ticker is too long"); + + // Amount to issue is zero + let data = OutputData::TokenIssuanceV1 { + token_id: TokenId::new_asset(H256::random()), + token_ticker: b"BensT".to_vec(), + amount_to_issue: 0, + number_of_decimals: 0, + metadata_uri: vec![], + }; + test_tx!(data, Err, "output value must be nonzero"); + + // Number of decimals more than 18 numbers + let data = OutputData::TokenIssuanceV1 { + token_id: TokenId::new_asset(H256::random()), + token_ticker: b"BensT".to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 19, + metadata_uri: vec![], + }; + test_tx!(data, Err, "too long decimals"); + + // metadata_uri empty + let data = OutputData::TokenIssuanceV1 { + token_id: TokenId::new_asset(H256::random()), + token_ticker: b"BensT".to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 18, + metadata_uri: vec![], + }; + test_tx!(data, Ok, ""); + + // metadata_uri too long + let data = OutputData::TokenIssuanceV1 { + token_id: TokenId::new_asset(H256::random()), + token_ticker: b"BensT".to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 18, + metadata_uri: Vec::from([0u8; 10_000]), + }; + test_tx!(data, Err, "token metadata uri is too long"); + + // The data field of the maximum allowed length filled with random garbage + let mut rng = rand::thread_rng(); + let garbage = build_random_vec(100); + let data = OutputData::TokenIssuanceV1 { + token_id: TokenId::new_asset(H256::random()), + token_ticker: vec![0, 255, 254, 2, 1], + amount_to_issue: rng.gen::() as u128, + number_of_decimals: 18, + metadata_uri: garbage.clone(), + }; + test_tx!(data, Err, "token ticker has none ascii characters"); + // garbage uri + let data = OutputData::TokenIssuanceV1 { + token_id: TokenId::new_asset(H256::random()), + token_ticker: b"BensT".to_vec(), + amount_to_issue: rng.gen::() as u128, + number_of_decimals: 18, + metadata_uri: garbage, + }; + test_tx!(data, Err, "metadata uri has none ascii characters"); +} #[test] -// Generating a token with a long URI string -fn checking_tokens_creation_with_long_uri() {} +fn checking_tokens_transferring() { + let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + test_ext.execute_with(|| { + // Round 1 + let token_id = TokenId::new_asset(H256::random()); + // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0], + outputs: vec![ + TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), + TransactionOutput::new_with_data( + 10, + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_id: token_id.clone(), + token_ticker: "BensT".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 2, + metadata_uri: "facebook.com".as_bytes().to_vec(), + }, + ), + ], + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); -// ** Creation of a token without input with MLT to pay commission -//////////////////////////////////////////////////////////////////////////////////////////////// + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let new_utxo_hash = tx.outpoint(0); + let new_utxo = tx.outputs[0].clone(); + let token_utxo_hash = tx.outpoint(1); + let token_utxo = tx.outputs[1].clone(); -#[test] -// Test tx where Input with token and without MLT, output has token (without MLT) -fn checking_tokens_creation_without_mlt() {} + // Round 2 + // then send rest of the tokens to karl (proving that the first tx was successful) + let _tx = Transaction { + inputs: vec![TransactionInput::new_empty(new_utxo_hash)], + outputs: vec![TransactionOutput::new_pubkey(90, H256::from(karl_pub_key))], + } + .sign_unchecked(&[new_utxo.clone()], 0, &alice_pub_key); -#[test] -// Test tx where Input with token and without MLT, output has MLT (without token) -fn checking_tokens_creation_with_random_inputs_and_outputs() {} + // Round 3 - Let's fail on wrong token id + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: TokenId::new_asset(H256::random()), + amount: 1_00_000_000, + }, + )], + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "input for the token not found" + ); -#[test] -// Test tx where Input without token but with MLT, output has MLT and token -fn checking_tokens_creation_without_inputs() { - // Test tx where no inputs for token + // Round 3 - Let's fail on exceed token amount + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 1_000_000_001, + }, + )], + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "output value must not exceed input value" + ); + + // Round 3 - Let's send a big amount of MLT with the correct tokens + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_with_data( + 1_000_000_000, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 1_000_000_000, + }, + )], + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "output value must not exceed input value" + ); + + // Round 3 - should be success + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 1_000_000_000, + }, + )], + } + .sign_unchecked(&[token_utxo], 0, &karl_pub_key); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); + }); } #[test] -// Test where less MLT at the input than you need to pay the commission -fn checking_tokens_creation_with_insufficient_fee() {} +fn checking_nft_transferring() { + let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + test_ext.execute_with(|| { + // Round 1 + let token_id = TokenId::new_asset(H256::random()); + // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0], + outputs: vec![ + TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), + TransactionOutput::new_with_data( + 10, + H256::from(karl_pub_key), + OutputData::NftMintV1 { + token_id: token_id.clone(), + data_hash: NftDataHash::Hash32([7; 32]), + metadata_uri: "facebook.com".as_bytes().to_vec(), + }, + ), + ], + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let new_utxo_hash = tx.outpoint(0); + let new_utxo = tx.outputs[0].clone(); + let token_utxo_hash = tx.outpoint(1); + let token_utxo = tx.outputs[1].clone(); + + // Round 2 + // then send rest of the tokens to karl (proving that the first tx was successful) + let _tx = Transaction { + inputs: vec![TransactionInput::new_empty(new_utxo_hash)], + outputs: vec![TransactionOutput::new_pubkey(90, H256::from(karl_pub_key))], + } + .sign_unchecked(&[new_utxo.clone()], 0, &alice_pub_key); + + // Round 3 - Let's fail on wrong token id + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: TokenId::new_asset(H256::random()), + amount: 1_00_000_000, + }, + )], + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "input for the token not found" + ); + + // Round 3 - Let's fail on exceed token amount + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 1_000_000_001, + }, + )], + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "output value must not exceed input value" + ); + + // Round 3 - Let's send a big amount of MLT with the correct tokens + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_with_data( + 1_000_000_000, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 1_000_000_000, + }, + )], + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "output value must not exceed input value" + ); + + // Round 3 - should be success + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 1, + }, + )], + } + .sign_unchecked(&[token_utxo], 0, &karl_pub_key); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); + }); +} #[test] -// Test tx where Input and output have a token but with zero value -fn checking_tokens_creation_with_zero_value() {} - -// * Testing token transfer -//////////////////////////////////////////////////////////////////////////////////////////////// - -// #[test] -// // -// fn checking_tokens_() {} -// #[test] -// // -// fn checking_tokens_() {} -// #[test] -// // -// fn checking_tokens_() {} -// #[test] -// // -// fn checking_tokens_() {} -// #[test] -// // -// fn checking_tokens_() {} -// #[test] -// // -// fn checking_tokens_() {} -// #[test] -// // -// fn checking_tokens_() {} -// #[test] -// // -// fn checking_tokens_() {} -// #[test] -// // -// fn checking_tokens_() {} - -// Testing token transfer +// Test tx where Input with token and without MLT, output has token (without MLT) +fn checking_tokens_creation_with_insufficient_fee() { + let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + test_ext.execute_with(|| { + // Round 1 + let token_id = TokenId::new_asset(H256::random()); + // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0], + outputs: vec![ + TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), + TransactionOutput::new_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_id: token_id.clone(), + token_ticker: "BensT".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 2, + metadata_uri: "facebook.com".as_bytes().to_vec(), + }, + ), + ], + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let token_utxo_hash = tx.outpoint(1); + let token_utxo = tx.outputs[1].clone(); + let tx = Transaction { + inputs: vec![ + // Use here token issuance for examaple + TransactionInput::new_empty(token_utxo_hash), + ], + outputs: vec![TransactionOutput::new_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_id: TokenId::new_asset(H256::random()), + token_ticker: b"Enric".to_vec(), + amount_to_issue: 1_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 2, + metadata_uri: "facebook.com".as_bytes().to_vec(), + }, + )], + } + .sign_unchecked(&[token_utxo], 0, &karl_pub_key); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "insufficient fee" + ); + }); +} // Testing the compatibility of the old version with the new one +#[test] +fn checking_immutable_tx_format() { + // todo: Testing the compatibility of the old version with the new one - not done yet +} -// Testing burning tokens +// Testing burn tokens +#[test] +fn checking_burn_tokens() { + // todo: Burn tokens has not tested yet +} +// Testing the compatibility of the old version with the new one +#[test] +fn checking_token_id() { + // todo: Testing token id - not done yet + // Token ID should be +} diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index f84a7c5..1424cec 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -17,7 +17,7 @@ const LENGTH_BYTES_TO_REPRESENT_ID: usize = 20; pub type Value = u128; -pub struct Mlt(Value); +pub struct Mlt(pub Value); impl Mlt { pub fn to_munit(&self) -> Value { self.0 * 1_000 * 100_000_000 diff --git a/pallets/utxo/src/verifier.rs b/pallets/utxo/src/verifier.rs index c5d1091..7a52bb9 100644 --- a/pallets/utxo/src/verifier.rs +++ b/pallets/utxo/src/verifier.rs @@ -7,8 +7,8 @@ macro_rules! implement_transaction_verifier { pub struct TransactionVerifier<'a, T: Config> { tx: &'a TransactionFor, - all_inputs_map: BTreeMap)>, - all_outputs_map: BTreeMap>, + all_inputs_map: BTreeMap)>>, + all_outputs_map: BTreeMap>>, total_value_of_input_tokens: BTreeMap, total_value_of_output_tokens: BTreeMap, new_utxos: Vec>, @@ -23,76 +23,157 @@ macro_rules! implement_transaction_verifier { // Turn Vector into BTreeMap fn init_inputs( tx: &TransactionFor, - ) -> BTreeMap)> { - let input_map: BTreeMap)> = tx - .inputs - .iter() - .filter_map(|input| { - let token_id = - TransactionVerifier::<'_, T>::get_token_id_from_input(input.outpoint) - .ok()?; - let output = - TransactionVerifier::<'_, T>::get_output_by_outpoint(input.outpoint)?; - Some((token_id, (input.clone(), output))) - }) - .collect(); - input_map + ) -> Result< + BTreeMap)>>, + &'static str, + > { + let mut input_map: BTreeMap< + TokenId, + Vec<(TransactionInput, TransactionOutputFor)>, + > = BTreeMap::new(); + + for input in &tx.inputs { + let token_id = + TransactionVerifier::<'_, T>::get_token_id_from_input(input.outpoint)?; + let output = + TransactionVerifier::<'_, T>::get_output_by_outpoint(input.outpoint) + .ok_or("missing inputs")?; + + if let Some(inputs) = input_map.get_mut(&token_id) { + inputs.push((input.clone(), output)); + } else { + input_map.insert(token_id, vec![(input.clone(), output)]); + } + } + Ok(input_map) } // Turn Vector into BTreeMap - fn init_outputs(tx: &TransactionFor) -> BTreeMap> { - let output_map: BTreeMap> = tx - .outputs - .iter() - .map(|output| { - ( - TransactionVerifier::<'_, T>::get_token_id_from_output(&output), - output.clone(), - ) - }) - .collect(); - output_map + fn init_outputs( + tx: &TransactionFor, + ) -> Result>>, &'static str> { + let mut count = 0; + let mut output_map: BTreeMap>> = + BTreeMap::new(); + + for output in &tx.outputs { + let token_id = TransactionVerifier::<'_, T>::get_token_id_from_output(&output); + if let Some(outputs) = output_map.get_mut(&token_id) { + count += 1; + outputs.push(output.clone()); + } else { + count += 1; + output_map.insert(token_id, vec![output.clone()]); + } + } + ensure!(count == tx.outputs.len(), "can't load all outputs"); + Ok(output_map) } fn init_total_value_of_input_tokens( - all_inputs_map: &BTreeMap)>, + all_inputs_map: &BTreeMap< + TokenId, + Vec<(TransactionInput, TransactionOutputFor)>, + >, ) -> Result, &'static str> { let mut total_value_of_input_tokens: BTreeMap = BTreeMap::new(); let mut mlt_amount: Value = 0; - for (_, (_, (_, input_utxo))) in all_inputs_map.iter().enumerate() { - match &input_utxo.data { - Some(OutputData::TokenIssuanceV1 { - ref token_id, - amount_to_issue, - .. - }) => { - // If token has just created we can't meet another amount here. - total_value_of_input_tokens.insert(token_id.clone(), *amount_to_issue); - } - Some(OutputData::TokenTransferV1 { - ref token_id, - amount, - .. - }) => { - total_value_of_input_tokens.insert( - token_id.clone(), + for (_, (_, input_vec)) in all_inputs_map.iter().enumerate() { + for (_, input_utxo) in input_vec { + match &input_utxo.data { + Some(OutputData::TokenIssuanceV1 { + ref token_id, + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_uri, + }) => { + // We have to check is this token already issued? + ensure!( + PointerToIssueToken::::contains_key(token_id), + "token has never been issued" + ); + ensure!( + token_id != &TokenId::mlt(), + "unable to use mlt as a token id" + ); + ensure!( + token_ticker.is_ascii(), + "token ticker has none ascii characters" + ); + ensure!( + metadata_uri.is_ascii(), + "metadata uri has none ascii characters" + ); + ensure!(token_ticker.len() <= 5, "token ticker is too long"); + ensure!(!token_ticker.is_empty(), "token ticker can't be empty"); + ensure!( + metadata_uri.len() <= 100, + "token metadata uri is too long" + ); + ensure!(amount_to_issue > &0u128, "output value must be nonzero"); + ensure!(number_of_decimals <= &18, "too long decimals"); + // If token has just created we can't meet another amount here. total_value_of_input_tokens - .get(token_id) - .unwrap_or(&0) - .checked_add(*amount) - .ok_or("input value overflow")?, - ); - } - Some(OutputData::TokenBurnV1 { .. }) => { - // Nothing to do here because tokens no longer exist. - } - Some(OutputData::NftMintV1 { ref token_id, .. }) => { - // If NFT has just created we can't meet another NFT part here. - total_value_of_input_tokens.insert(token_id.clone(), 1); - } - None => { - mlt_amount = mlt_amount - .checked_add(input_utxo.value) - .ok_or("input value overflow")?; + .insert(token_id.clone(), *amount_to_issue); + // But probably in this input we have a fee + mlt_amount = mlt_amount + .checked_add(input_utxo.value) + .ok_or("input value overflow")?; + } + Some(OutputData::TokenTransferV1 { + ref token_id, + amount, + .. + }) => { + total_value_of_input_tokens.insert( + token_id.clone(), + total_value_of_input_tokens + .get(token_id) + .unwrap_or(&0) + .checked_add(*amount) + .ok_or("input value overflow")?, + ); + // But probably in this input we have a fee + mlt_amount = mlt_amount + .checked_add(input_utxo.value) + .ok_or("input value overflow")?; + } + Some(OutputData::TokenBurnV1 { .. }) => { + // Nothing to do here because tokens no longer exist. + } + Some(OutputData::NftMintV1 { + ref token_id, + data_hash, + metadata_uri, + }) => { + // We have to check is this token already issued? + ensure!( + PointerToIssueToken::::contains_key(token_id), + "unable to use an input where NFT has not minted yet" + ); + + // Check is this digital data unique? + ensure!( + NftUniqueDataHash::::contains_key(data_hash), + "unable to use an input where NFT digital data was changed" + ); + + ensure!( + token_id != &TokenId::mlt(), + "unable to use mlt as a token id" + ); + ensure!( + metadata_uri.is_ascii(), + "metadata uri has none ascii characters" + ); + // If NFT has just created we can't meet another NFT part here. + total_value_of_input_tokens.insert(token_id.clone(), 1); + } + None => { + mlt_amount = mlt_amount + .checked_add(input_utxo.value) + .ok_or("input value overflow")?; + } } } } @@ -101,44 +182,109 @@ macro_rules! implement_transaction_verifier { } fn init_total_value_of_output_tokens( - all_outputs_map: &BTreeMap>, + all_outputs_map: &BTreeMap>>, ) -> Result, &'static str> { let mut total_value_of_output_tokens: BTreeMap = BTreeMap::new(); let mut mlt_amount: Value = 0; - for x in all_outputs_map { - match &x.1.data { - Some(OutputData::TokenIssuanceV1 { - ref token_id, - amount_to_issue, - .. - }) => { - // If token has just created we can't meet another amount here. - total_value_of_output_tokens.insert(token_id.clone(), *amount_to_issue); - } - Some(OutputData::TokenTransferV1 { - ref token_id, - amount, - .. - }) => { - total_value_of_output_tokens.insert( - token_id.clone(), + for (_, (_, outputs_vec)) in all_outputs_map.iter().enumerate() { + for utxo in outputs_vec { + // for x in all_outputs_map { + match &utxo.data { + Some(OutputData::TokenIssuanceV1 { + ref token_id, + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_uri, + }) => { + // We have to check is this token already issued? + ensure!( + !PointerToIssueToken::::contains_key(token_id), + "token has already been issued" + ); + ensure!( + token_id != &TokenId::mlt(), + "unable to use mlt as a token id" + ); + ensure!( + token_ticker.is_ascii(), + "token ticker has none ascii characters" + ); + ensure!( + metadata_uri.is_ascii(), + "metadata uri has none ascii characters" + ); + ensure!(token_ticker.len() <= 5, "token ticker is too long"); + ensure!(!token_ticker.is_empty(), "token ticker can't be empty"); + ensure!( + metadata_uri.len() <= 100, + "token metadata uri is too long" + ); + ensure!(amount_to_issue > &0u128, "output value must be nonzero"); + ensure!(number_of_decimals <= &18, "too long decimals"); + + // If token has just created we can't meet another amount here. total_value_of_output_tokens - .get(token_id) - .unwrap_or(&0) - .checked_add(*amount) - .ok_or("input value overflow")?, - ); - } - Some(OutputData::TokenBurnV1 { .. }) => { - // Nothing to do here because tokens no longer exist. - } - Some(OutputData::NftMintV1 { ref token_id, .. }) => { - // If NFT has just created we can't meet another NFT part here. - total_value_of_output_tokens.insert(token_id.clone(), 1); - } - None => { - mlt_amount = - mlt_amount.checked_add(x.1.value).ok_or("input value overflow")?; + .insert(token_id.clone(), *amount_to_issue); + // But probably in this input we have a fee + mlt_amount = mlt_amount + .checked_add(utxo.value) + .ok_or("input value overflow")?; + } + Some(OutputData::TokenTransferV1 { + ref token_id, + amount, + .. + }) => { + total_value_of_output_tokens.insert( + token_id.clone(), + total_value_of_output_tokens + .get(token_id) + .unwrap_or(&0) + .checked_add(*amount) + .ok_or("output value overflow")?, + ); + // But probably in this input we have a fee + mlt_amount = mlt_amount + .checked_add(utxo.value) + .ok_or("input value overflow")?; + } + Some(OutputData::TokenBurnV1 { .. }) => { + // Nothing to do here because tokens no longer exist. + } + Some(OutputData::NftMintV1 { + ref token_id, + data_hash, + metadata_uri, + }) => { + // We have to check is this token already issued? + ensure!( + !PointerToIssueToken::::contains_key(token_id), + "token has already been issued" + ); + + // Check is this digital data unique? + ensure!( + !>::contains_key(data_hash), + "digital data has already been minted" + ); + + ensure!( + token_id != &TokenId::mlt(), + "unable to use mlt as a token id" + ); + ensure!( + metadata_uri.is_ascii(), + "metadata uri has none ascii characters" + ); + // If NFT has just created we can't meet another NFT part here. + total_value_of_output_tokens.insert(token_id.clone(), 1); + } + None => { + mlt_amount = mlt_amount + .checked_add(utxo.value) + .ok_or("output value overflow")?; + } } } } @@ -147,8 +293,8 @@ macro_rules! implement_transaction_verifier { } pub fn new(tx: &TransactionFor) -> Result, &'static str> { - let all_inputs_map = Self::init_inputs(&tx); - let all_outputs_map = Self::init_outputs(&tx); + let all_inputs_map = Self::init_inputs(&tx)?; + let all_outputs_map = Self::init_outputs(&tx)?; let total_value_of_input_tokens = Self::init_total_value_of_input_tokens(&all_inputs_map)?; let total_value_of_output_tokens = @@ -207,10 +353,11 @@ macro_rules! implement_transaction_verifier { //WARNING workshop code has a bug here //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs //input_map.len() > transaction.inputs.len() //THIS IS WRONG - + let input_map: BTreeMap<_, ()> = + self.tx.inputs.iter().map(|input| (input.outpoint, ())).collect(); //we want map size and input size to be equal to ensure each is used only once ensure!( - self.all_inputs_map.len() == self.tx.inputs.len(), + input_map.len() == self.tx.inputs.len(), "each input should be used only once" ); Ok(()) @@ -229,56 +376,66 @@ macro_rules! implement_transaction_verifier { //map each output to btree to count unique elements //WARNING example code has a bug here //out_map.len() != transaction.outputs.len() //THIS IS WRONG + let out_map: BTreeMap<_, ()> = + self.tx.outputs.iter().map(|output| (output, ())).collect(); //check each output is defined only once ensure!( - self.all_outputs_map.len() == self.tx.outputs.len(), + out_map.len() == self.tx.outputs.len(), "each output should be used once" ); Ok(()) } pub fn checking_signatures(&self) -> Result<(), &'static str> { - for (index, (_, (input, input_utxo))) in self.all_inputs_map.iter().enumerate() { - let spending: Vec> = self - .all_inputs_map - .iter() - .map(|(_, (_, ref input_utxo))| input_utxo.clone()) - .collect(); - match &input_utxo.destination { - Destination::Pubkey(pubkey) => { - let msg = TransactionSigMsg::construct( - SigHash::default(), - &self.tx, - // todo: Check with Lukas is it correct or no - &spending[..], - index as u64, - u32::MAX, - ); - let ok = crate::sign::Public::Schnorr(*pubkey) - .parse_sig(&input.witness[..]) - .ok_or("bad signature format")? - .verify(&msg); - ensure!(ok, "signature must be valid"); - } - Destination::CreatePP(_, _) => { - log::info!("TODO validate spending of OP_CREATE"); - } - Destination::CallPP(_, _) => { - log::info!("TODO validate spending of OP_CALL"); - } - Destination::ScriptHash(_hash) => { - let witness = input.witness.clone(); - let lock = input.lock.clone(); - crate::script::verify( - &self.tx, - // todo: Check with Lukas is it correct or no - &spending[..], - index as u64, - witness, - lock, - ) - .map_err(|_| "script verification failed")?; + for (index, (_, inputs_vec)) in self.all_inputs_map.iter().enumerate() { + for (sub_index, (input, input_utxo)) in inputs_vec.iter().enumerate() { + let spending_utxos: Vec> = self + .all_inputs_map + .iter() + .map(|(_, inputs_vec)| { + inputs_vec + .iter() + .map(|item| item.1.clone()) + .collect::>>() + }) + .flatten() + .collect(); + match &input_utxo.destination { + Destination::Pubkey(pubkey) => { + let msg = TransactionSigMsg::construct( + SigHash::default(), + &self.tx, + // todo: Check with Lukas is it correct or no + &spending_utxos[..], + (index + sub_index) as u64, + u32::MAX, + ); + let ok = crate::sign::Public::Schnorr(*pubkey) + .parse_sig(&input.witness[..]) + .ok_or("bad signature format")? + .verify(&msg); + ensure!(ok, "signature must be valid"); + } + Destination::CreatePP(_, _) => { + log::info!("TODO validate spending of OP_CREATE"); + } + Destination::CallPP(_, _) => { + log::info!("TODO validate spending of OP_CALL"); + } + Destination::ScriptHash(_hash) => { + let witness = input.witness.clone(); + let lock = input.lock.clone(); + crate::script::verify( + &self.tx, + // todo: Check with Lukas is it correct or no + &spending_utxos[..], + (index + sub_index) as u64, + witness, + lock, + ) + .map_err(|_| "script verification failed")?; + } } } } @@ -287,26 +444,55 @@ macro_rules! implement_transaction_verifier { } pub fn checking_amounts(&self) -> Result<(), &'static str> { - // if all spent UTXOs are available, check the math and signatures - let mut new_token_exist = false; - for (_, (token_id, input_value)) in - self.total_value_of_input_tokens.iter().enumerate() + let mut num_creations = 0; + for (_, (token_id, output_value)) in + self.total_value_of_output_tokens.iter().enumerate() { - match self.total_value_of_output_tokens.get(token_id) { - Some(output_value) => ensure!( + match self.total_value_of_input_tokens.get(token_id) { + Some(input_value) => ensure!( input_value >= &output_value, "output value must not exceed input value" ), None => { - // If the transaction has one an output with a new token ID - if new_token_exist { - frame_support::fail!("input for the token not found") - } else { - new_token_exist = true; + match self.all_outputs_map.get(token_id) { + Some(outputs_vec) => { + // We have not any input for this token, perhaps it's token creation + ensure!( + outputs_vec.len() == 1, + "attempting double creation token failed" + ); + match outputs_vec[0].data { + None + | Some(OutputData::TokenTransferV1 { .. }) + | Some(OutputData::TokenBurnV1 { .. }) => { + frame_support::fail!("input for the token not found") + } + Some(OutputData::NftMintV1 { .. }) + | Some(OutputData::TokenIssuanceV1 { .. }) => { + num_creations += 1; + continue; + } + } + } + None => unreachable!(), } } } } + // Check that enough fee + + let mlt = self + .total_value_of_input_tokens + .get(&TokenId::mlt()) + .ok_or("not found MLT fees")?; + if cfg!(test) { + ensure!(mlt >= &(num_creations * 10), "insufficient fee"); + } else { + ensure!( + mlt >= &(num_creations * crate::tokens::Mlt(100).to_munit()), + "insufficient fee" + ) + } Ok(()) } @@ -337,22 +523,16 @@ macro_rules! implement_transaction_verifier { }; // Check that outputs are valid - for (output_index, (token_id, output)) in self.all_outputs_map.iter().enumerate() { - match output.destination { - Destination::Pubkey(_) | Destination::ScriptHash(_) => { - if token_id == &TokenId::mlt() { - ensure!(output.value > 0, "output value must be nonzero"); - } - let hash = self.tx.outpoint(output_index as u64); - ensure!(!>::contains_key(hash), "output already exists"); - self.new_utxos.push(hash.as_fixed_bytes().to_vec()); - } - Destination::CreatePP(_, _) => { - log::info!("TODO validate OP_CREATE"); - } - Destination::CallPP(_, _) => { - log::info!("TODO validate OP_CALL"); + for (output_index, (token_id, outputs_vec)) in + self.all_outputs_map.iter().enumerate() + { + for (sub_index, output) in outputs_vec.iter().enumerate() { + let hash = self.tx.outpoint((output_index + sub_index) as u64); + ensure!(!>::contains_key(hash), "output already exists"); + if token_id == &TokenId::mlt() { + ensure!(output.value > 0, "output value must be nonzero"); } + self.new_utxos.push(hash.as_fixed_bytes().to_vec()); } } Ok(()) @@ -363,6 +543,11 @@ macro_rules! implement_transaction_verifier { } pub fn checking_tokens_issued(&self) -> Result<(), &'static str> { + // + // for (output_index, (token_id, output)) in self.all_outputs_map.iter().enumerate() { + // match output.destination { + // + // } unimplemented!() } From 958573ad84bfd02ee1445345d023a3af6ab01616 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Thu, 28 Oct 2021 11:20:44 +0300 Subject: [PATCH 18/53] Changed std::convert::TryFrom to sp_std. Signed-off-by: sinitcin --- pallets/utxo/src/tests.rs | 1 + pallets/utxo/src/verifier.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 3887cda..8cfe200 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -1138,6 +1138,7 @@ fn checking_immutable_tx_format() { fn checking_burn_tokens() { // todo: Burn tokens has not tested yet } + // Testing the compatibility of the old version with the new one #[test] fn checking_token_id() { diff --git a/pallets/utxo/src/verifier.rs b/pallets/utxo/src/verifier.rs index 7a52bb9..e3bc47c 100644 --- a/pallets/utxo/src/verifier.rs +++ b/pallets/utxo/src/verifier.rs @@ -560,7 +560,7 @@ macro_rules! implement_transaction_verifier { } pub fn calculating_reward(&mut self) -> Result<(), &'static str> { - use std::convert::TryFrom; + use sp_std::convert::TryFrom; // Reward at the moment only in MLT self.reward = if self.total_value_of_input_tokens.contains_key(&TokenId::mlt()) && self.total_value_of_output_tokens.contains_key(&(TokenId::mlt())) From 4abe164fd21a0bfe832ae10aba87a7560499426d Mon Sep 17 00:00:00 2001 From: sinitcin Date: Thu, 28 Oct 2021 13:37:49 +0300 Subject: [PATCH 19/53] Fixed conflicts after merge Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 24 +++--------------------- pallets/utxo/src/tests.rs | 23 ++++++++++++++++++++++- 2 files changed, 25 insertions(+), 22 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 3808a42..023e0dc 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -46,16 +46,13 @@ use utxo_api::UtxoApi; #[frame_support::pallet] pub mod pallet { + use super::implement_transaction_verifier; pub use crate::script::{BlockTime, RawBlockTime}; - use crate::sign::{self, Scheme}; use crate::tokens::{NftDataHash, OutputData, TokenId, Value}; - // use crate::verifier::TransactionVerifier; - use super::implement_transaction_verifier; use bech32; use chainscript::Script; use codec::{Decode, Encode}; use core::marker::PhantomData; - // use frame_support::weights::PostDispatchInfo; use frame_support::{ dispatch::{DispatchResultWithPostInfo, Vec}, pallet_prelude::*, @@ -76,8 +73,6 @@ pub mod pallet { H256, H512, }; use sp_runtime::traits::AtLeast32Bit; - // use sp_runtime::DispatchErrorWithPostInfo; - implement_transaction_verifier!(); #[pallet::error] @@ -274,8 +269,8 @@ pub mod pallet { ) -> Self { Self { value, - header: 0, destination: Destination::CallPP(dest_account, fund, input), + data: None, } } @@ -538,7 +533,6 @@ pub mod pallet { Destination::CallPP(acct_id, fund, data) => { call::(caller, acct_id, hash, output.value, *fund, data); } - _ => {} } } @@ -605,7 +599,7 @@ pub mod pallet { Ok(().into()) } - #[pallet::weight(T::WeightInfo::send_to_address(16_u32.saturating_add(address.len() as u32)))] + #[pallet::weight(::WeightInfo::send_to_address(16_u32.saturating_add(address.len() as u32)))] pub fn send_to_address( origin: OriginFor, value: Value, @@ -699,15 +693,6 @@ pub mod pallet { } } -use frame_support::inherent::Vec; -use frame_support::pallet_prelude::DispatchResultWithPostInfo; -use sp_core::{ - crypto::UncheckedFrom, - Encode, {H256, H512}, -}; -use sp_runtime::sp_std::vec; -use utxo_api::UtxoApi; - impl crate::Pallet { pub fn send() -> u32 { 1337 @@ -731,9 +716,6 @@ impl crate::Pallet { } } - } -} - fn coin_picker(outpoints: &Vec) -> Result, DispatchError> { let mut inputs: Vec = Vec::new(); diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index d6270d2..1e25f69 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -16,7 +16,7 @@ // Author(s): C. Yap use crate::{ - mock::*, tokens::Value, Destination, RewardTotal, Transaction, TransactionInput, + mock::*, tokens::Value, BlockTime, Destination, RewardTotal, Transaction, TransactionInput, TransactionOutput, UtxoStore, }; use chainscript::{opcodes::all as opc, Builder}; @@ -581,6 +581,7 @@ fn checking_tokens_issuance() { let tx = Transaction { inputs: vec![input0], outputs: vec![output_new], + time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); let new_utxo_hash = tx.outpoint(0); @@ -628,6 +629,7 @@ fn checking_nft_mint() { let tx = Transaction { inputs: vec![input0], outputs: vec![output], + time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); let new_utxo_hash = tx.outpoint(0); @@ -673,6 +675,7 @@ fn checking_nft_unique() { }, TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), ], + time_lock: Default::default(), } .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); let new_utxo_hash = tx.outpoint(1); @@ -699,6 +702,7 @@ fn checking_nft_unique() { destination: Destination::Pubkey(alice_pub_key), data: Some(nft_data.clone()), }], + time_lock: Default::default(), } .sign_unchecked(&[new_utxo], 0, &alice_pub_key); // Spend @@ -734,6 +738,7 @@ fn checking_tokens_double_creation() { destination: Destination::Pubkey(alice_pub_key), data: Some(issuance_data.clone()), }], + time_lock: Default::default(), } .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); let new_utxo_hash = tx.outpoint(0); @@ -768,6 +773,7 @@ fn checking_tokens_double_creation() { destination: Destination::Pubkey(alice_pub_key), data: Some(issuance_data.clone()), }], + time_lock: Default::default(), } .sign_unchecked(&[new_utxo], 0, &alice_pub_key); // Spend @@ -794,6 +800,7 @@ fn checking_tokens_with_invalid_data() { let tx = Transaction { inputs: vec![input0], outputs: vec![output_new], + time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); let new_utxo_hash = tx.outpoint(0); @@ -934,6 +941,7 @@ fn checking_tokens_transferring() { }, ), ], + time_lock: Default::default(), } .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); @@ -948,6 +956,7 @@ fn checking_tokens_transferring() { let _tx = Transaction { inputs: vec![TransactionInput::new_empty(new_utxo_hash)], outputs: vec![TransactionOutput::new_pubkey(90, H256::from(karl_pub_key))], + time_lock: Default::default(), } .sign_unchecked(&[new_utxo.clone()], 0, &alice_pub_key); @@ -962,6 +971,7 @@ fn checking_tokens_transferring() { amount: 1_00_000_000, }, )], + time_lock: Default::default(), } .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); frame_support::assert_err_ignore_postinfo!( @@ -980,6 +990,7 @@ fn checking_tokens_transferring() { amount: 1_000_000_001, }, )], + time_lock: Default::default(), } .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); frame_support::assert_err_ignore_postinfo!( @@ -998,6 +1009,7 @@ fn checking_tokens_transferring() { amount: 1_000_000_000, }, )], + time_lock: Default::default(), } .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); frame_support::assert_err_ignore_postinfo!( @@ -1016,6 +1028,7 @@ fn checking_tokens_transferring() { amount: 1_000_000_000, }, )], + time_lock: Default::default(), } .sign_unchecked(&[token_utxo], 0, &karl_pub_key); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); @@ -1044,6 +1057,7 @@ fn checking_nft_transferring() { }, ), ], + time_lock: Default::default(), } .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); @@ -1058,6 +1072,7 @@ fn checking_nft_transferring() { let _tx = Transaction { inputs: vec![TransactionInput::new_empty(new_utxo_hash)], outputs: vec![TransactionOutput::new_pubkey(90, H256::from(karl_pub_key))], + time_lock: Default::default(), } .sign_unchecked(&[new_utxo.clone()], 0, &alice_pub_key); @@ -1072,6 +1087,7 @@ fn checking_nft_transferring() { amount: 1_00_000_000, }, )], + time_lock: Default::default(), } .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); frame_support::assert_err_ignore_postinfo!( @@ -1090,6 +1106,7 @@ fn checking_nft_transferring() { amount: 1_000_000_001, }, )], + time_lock: Default::default(), } .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); frame_support::assert_err_ignore_postinfo!( @@ -1108,6 +1125,7 @@ fn checking_nft_transferring() { amount: 1_000_000_000, }, )], + time_lock: Default::default(), } .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); frame_support::assert_err_ignore_postinfo!( @@ -1126,6 +1144,7 @@ fn checking_nft_transferring() { amount: 1, }, )], + time_lock: Default::default(), } .sign_unchecked(&[token_utxo], 0, &karl_pub_key); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); @@ -1158,6 +1177,7 @@ fn checking_tokens_creation_with_insufficient_fee() { }, ), ], + time_lock: Default::default(), } .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); @@ -1181,6 +1201,7 @@ fn checking_tokens_creation_with_insufficient_fee() { metadata_uri: "facebook.com".as_bytes().to_vec(), }, )], + time_lock: Default::default(), } .sign_unchecked(&[token_utxo], 0, &karl_pub_key); frame_support::assert_err_ignore_postinfo!( From 96f73fb12b0a83067fadf1510fc58e23d146e81e Mon Sep 17 00:00:00 2001 From: sinitcin Date: Thu, 28 Oct 2021 15:45:08 +0300 Subject: [PATCH 20/53] Cleaned up comments Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 10 ++++- pallets/utxo/src/tests.rs | 42 +++++---------------- pallets/utxo/src/tokens.rs | 8 +--- pallets/utxo/src/verifier.rs | 72 ++++++++++++++++++++++-------------- 4 files changed, 64 insertions(+), 68 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index a440cb1..4886118 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -355,7 +355,6 @@ pub mod pallet { #[pallet::getter(fn utxo_store)] pub(super) type UtxoStore = StorageMap<_, Identity, H256, TransactionOutputFor>; - #[pallet::storage] #[pallet::getter(fn pointer_to_issue_token)] pub(super) type PointerToIssueToken = @@ -511,17 +510,24 @@ pub mod pallet { Destination::Pubkey(_) | Destination::ScriptHash(_) => { let hash = tx.outpoint(index as u64); log::debug!("inserting to UtxoStore {:?} as key {:?}", output, hash); - >::insert(hash, Some(output)); + >::insert(hash, output); match &output.data { Some(OutputData::NftMintV1 { token_id, data_hash, .. }) => { + // We have to control that digital data of NFT is unique. + // Otherwise, anybody else might make a new NFT with exactly the same hash. >::insert(data_hash, hash); + // Also, we should provide possibility of find an output that by token_id. + // This output is a place where token was created. It allow us to check that a token or + // a NFT have not created yet. >::insert(token_id, hash); } Some(OutputData::TokenIssuanceV1 { token_id, .. }) => { + // For MLS-01 we save a relation between token_id and the output where + // token was created. >::insert(token_id, hash); } _ => continue, diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 506e3d1..1499661 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -544,7 +544,7 @@ fn test_send_to_address() { }) } -// *Testing token creation: +// Testing token creation: use crate::tokens::{NftDataHash, TokenId}; use rand::Rng; @@ -561,19 +561,16 @@ fn build_random_vec(len: usize) -> Vec { // Simple creation of tokens fn checking_tokens_issuance() { execute_with_alice(|alice_pub_key| { - // Alice wants to send herself a new utxo of value 50. let (utxo0, input0) = tx_input_gen_no_signature(); let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); let output_new = TransactionOutput { value: 0, destination: Destination::Pubkey(alice_pub_key), - // TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), data: Some(OutputData::TokenIssuanceV1 { token_id: TokenId::new_asset(first_input_hash), token_ticker: "BensT".as_bytes().to_vec(), amount_to_issue: 1_000_000_000, - // Should be not more than 18 numbers number_of_decimals: 2, metadata_uri: "facebook.com".as_bytes().to_vec(), }), @@ -612,14 +609,12 @@ fn checking_tokens_issuance() { // Simple creation of NFT fn checking_nft_mint() { execute_with_alice(|alice_pub_key| { - // Alice wants to send herself a new utxo of value 50. let (utxo0, input0) = tx_input_gen_no_signature(); let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); let data_hash = NftDataHash::Raw(vec![1, 2, 3, 4, 5]); let output = TransactionOutput { value: 0, destination: Destination::Pubkey(alice_pub_key), - // TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), data: Some(OutputData::NftMintV1 { token_id: TokenId::new_asset(first_input_hash), data_hash: data_hash.clone(), @@ -656,7 +651,6 @@ fn checking_nft_mint() { // NFT might be only unique, we can't create a few nft for one item fn checking_nft_unique() { execute_with_alice(|alice_pub_key| { - // Alice wants to send herself a new utxo of value 50. let (utxo0, input0) = tx_input_gen_no_signature(); let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); @@ -688,7 +682,6 @@ fn checking_nft_unique() { assert!(UtxoStore::::contains_key(new_utxo_hash)); let new_utxo = tx.outputs[1].clone(); - // Alice wants to send herself a new utxo of value 50 again if let OutputData::NftMintV1 { ref mut token_id, .. } = nft_data @@ -718,7 +711,6 @@ fn checking_nft_unique() { // Creation a token with a pre-existing ID or re-creation of an already created token. fn checking_tokens_double_creation() { execute_with_alice(|alice_pub_key| { - // Alice wants to send herself a new utxo of value 50. let (utxo0, input0) = tx_input_gen_no_signature(); let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); @@ -726,7 +718,6 @@ fn checking_tokens_double_creation() { token_id: TokenId::new_asset(first_input_hash), token_ticker: "BensT".as_bytes().to_vec(), amount_to_issue: 1_000_000_000, - // Should be not more than 18 numbers number_of_decimals: 2, metadata_uri: "facebook.com".as_bytes().to_vec(), }; @@ -765,7 +756,6 @@ fn checking_tokens_double_creation() { let new_utxo_hash = tx.outpoint(0); let new_utxo = tx.outputs[0].clone(); - // Alice wants to send herself a new utxo of value 50 again let tx = Transaction { inputs: vec![TransactionInput::new_empty(new_utxo_hash.clone())], outputs: vec![TransactionOutput { @@ -790,7 +780,6 @@ fn checking_tokens_with_invalid_data() { macro_rules! test_tx { ($data: ident, $checking: tt, $err: expr) => { execute_with_alice(|alice_pub_key| { - // Alice wants to send herself a new utxo of value 50. let (utxo0, input0) = tx_input_gen_no_signature(); let output_new = TransactionOutput { value: 1, @@ -828,7 +817,6 @@ fn checking_tokens_with_invalid_data() { token_id: TokenId::mlt(), token_ticker: vec![], amount_to_issue: 0, - // Should be not more than 18 numbers number_of_decimals: 0, metadata_uri: vec![], }; @@ -920,7 +908,6 @@ fn checking_tokens_with_invalid_data() { fn checking_tokens_transferring() { let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { - // Round 1 let token_id = TokenId::new_asset(H256::random()); // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself let (utxo0, input0) = tx_input_gen_no_signature(); @@ -951,7 +938,6 @@ fn checking_tokens_transferring() { let token_utxo_hash = tx.outpoint(1); let token_utxo = tx.outputs[1].clone(); - // Round 2 // then send rest of the tokens to karl (proving that the first tx was successful) let _tx = Transaction { inputs: vec![TransactionInput::new_empty(new_utxo_hash)], @@ -960,7 +946,7 @@ fn checking_tokens_transferring() { } .sign_unchecked(&[new_utxo.clone()], 0, &alice_pub_key); - // Round 3 - Let's fail on wrong token id + // Let's fail on wrong token id let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], outputs: vec![TransactionOutput::new_with_data( @@ -979,7 +965,7 @@ fn checking_tokens_transferring() { "input for the token not found" ); - // Round 3 - Let's fail on exceed token amount + // Let's fail on exceed token amount let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], outputs: vec![TransactionOutput::new_with_data( @@ -998,7 +984,7 @@ fn checking_tokens_transferring() { "output value must not exceed input value" ); - // Round 3 - Let's send a big amount of MLT with the correct tokens + // Let's send a big amount of MLT with the correct tokens let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], outputs: vec![TransactionOutput::new_with_data( @@ -1017,7 +1003,7 @@ fn checking_tokens_transferring() { "output value must not exceed input value" ); - // Round 3 - should be success + // should be success let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], outputs: vec![TransactionOutput::new_with_data( @@ -1039,7 +1025,6 @@ fn checking_tokens_transferring() { fn checking_nft_transferring() { let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { - // Round 1 let token_id = TokenId::new_asset(H256::random()); // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself let (utxo0, input0) = tx_input_gen_no_signature(); @@ -1067,7 +1052,6 @@ fn checking_nft_transferring() { let token_utxo_hash = tx.outpoint(1); let token_utxo = tx.outputs[1].clone(); - // Round 2 // then send rest of the tokens to karl (proving that the first tx was successful) let _tx = Transaction { inputs: vec![TransactionInput::new_empty(new_utxo_hash)], @@ -1076,7 +1060,7 @@ fn checking_nft_transferring() { } .sign_unchecked(&[new_utxo.clone()], 0, &alice_pub_key); - // Round 3 - Let's fail on wrong token id + // Let's fail on wrong token id let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], outputs: vec![TransactionOutput::new_with_data( @@ -1095,7 +1079,7 @@ fn checking_nft_transferring() { "input for the token not found" ); - // Round 3 - Let's fail on exceed token amount + // Let's fail on exceed token amount let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], outputs: vec![TransactionOutput::new_with_data( @@ -1114,7 +1098,7 @@ fn checking_nft_transferring() { "output value must not exceed input value" ); - // Round 3 - Let's send a big amount of MLT with the correct tokens + // Let's send a big amount of MLT with the correct tokens let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], outputs: vec![TransactionOutput::new_with_data( @@ -1133,7 +1117,7 @@ fn checking_nft_transferring() { "output value must not exceed input value" ); - // Round 3 - should be success + // should be success let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], outputs: vec![TransactionOutput::new_with_data( @@ -1156,7 +1140,6 @@ fn checking_nft_transferring() { fn checking_tokens_creation_with_insufficient_fee() { let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { - // Round 1 let token_id = TokenId::new_asset(H256::random()); // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself let (utxo0, input0) = tx_input_gen_no_signature(); @@ -1171,7 +1154,6 @@ fn checking_tokens_creation_with_insufficient_fee() { token_id: token_id.clone(), token_ticker: "BensT".as_bytes().to_vec(), amount_to_issue: 1_000_000_000, - // Should be not more than 18 numbers number_of_decimals: 2, metadata_uri: "facebook.com".as_bytes().to_vec(), }, @@ -1186,7 +1168,7 @@ fn checking_tokens_creation_with_insufficient_fee() { let token_utxo = tx.outputs[1].clone(); let tx = Transaction { inputs: vec![ - // Use here token issuance for examaple + // Use here token issuance for example TransactionInput::new_empty(token_utxo_hash), ], outputs: vec![TransactionOutput::new_with_data( @@ -1211,21 +1193,17 @@ fn checking_tokens_creation_with_insufficient_fee() { }); } -// Testing the compatibility of the old version with the new one #[test] fn checking_immutable_tx_format() { // todo: Testing the compatibility of the old version with the new one - not done yet } -// Testing burn tokens #[test] fn checking_burn_tokens() { // todo: Burn tokens has not tested yet } -// Testing the compatibility of the old version with the new one #[test] fn checking_token_id() { // todo: Testing token id - not done yet - // Token ID should be } diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index 1424cec..16e639c 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -1,10 +1,7 @@ #![cfg_attr(not(feature = "std"), no_std)] -// use crate::ss58_nostd::*; -// use crate::TransactionOutputFor; use crate::base58_nostd::{FromBase58, FromBase58Error, ToBase58}; use codec::{Decode, Encode}; -// use frame_support::sp_runtime::traits::{BlakeTwo256, Hash}; use frame_support::ensure; use frame_support::{dispatch::Vec, RuntimeDebug}; #[cfg(feature = "std")] @@ -27,7 +24,6 @@ impl Mlt { #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] enum TokenIdInner { - // todo: Need to check this MLT, Asset(H160), } @@ -47,7 +43,7 @@ impl TokenId { pub fn new_asset(first_input_hash: H256) -> TokenId { TokenId { - // We are loosing the first bytes of H256 over here + // We are loosing the first bytes of H256 over here and save 20 the last bytes inner: TokenIdInner::Asset(H160::from(first_input_hash)), } } @@ -103,7 +99,6 @@ impl AsRef<[u8]> for TokenId { } } -// We should implement it for Ss58Codec impl Default for TokenId { fn default() -> Self { TokenId::mlt() @@ -111,7 +106,6 @@ impl Default for TokenId { } #[cfg(feature = "std")] -// Unfortunately, the default codec can't be used with std impl Ss58Codec for TokenId {} #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] diff --git a/pallets/utxo/src/verifier.rs b/pallets/utxo/src/verifier.rs index 996944c..df18a89 100644 --- a/pallets/utxo/src/verifier.rs +++ b/pallets/utxo/src/verifier.rs @@ -1,25 +1,67 @@ -// DRY (Don't Repeat Yourself) #[macro_export] +// The Substrate has a big macros ecosystem. That could be easily broken if T:Config will using in +// other mod instead of lib.rs. Due to we have not enough time for quality decomposition lib.rs to +// I decide to move this part of the code in the macro. +// +// At the moment, this piece of code is rough. After the test-net, we will return to https://github.com/mintlayer/core/issues/81 +// and decide how it make it better. The main problem is that there are a lot of cycles. We should split into +// stages and use all of these checks as an array of functions that we will call on a couple of main cycles. +// But, at the moment it works and it is suitable for the test-net. + macro_rules! implement_transaction_verifier { () => { use crate::sign::TransactionSigMsg; use chainscript::sighash::SigHash; + // The main object, where stored temporary data about a tx pub struct TransactionVerifier<'a, T: Config> { + // Pointer to the transaction tx: &'a TransactionFor, + // Vec of inputs for each Token ID all_inputs_map: BTreeMap)>>, + // Vec of outputs for each Token ID all_outputs_map: BTreeMap>>, + // The total summary value of the tokens in inputs for each TokenID total_value_of_input_tokens: BTreeMap, + // The total summary value of the tokens in outputs for each TokenID total_value_of_output_tokens: BTreeMap, + // Vec of outputs that should be written new_utxos: Vec>, + // For more information have a look at checking_utxos_exists spended_utxos: Result< Vec::AccountId>>, Vec>, >, + // The total reward for this tx reward: u64, } impl TransactionVerifier<'_, T> { + pub fn new(tx: &TransactionFor) -> Result, &'static str> { + // Verify absolute time lock + ensure!( + tx.check_time_lock::(), + "Time lock restrictions not satisfied" + ); + // Init + let all_inputs_map = Self::init_inputs(&tx)?; + let all_outputs_map = Self::init_outputs(&tx)?; + let total_value_of_input_tokens = + Self::init_total_value_of_input_tokens(&all_inputs_map)?; + let total_value_of_output_tokens = + Self::init_total_value_of_output_tokens(&all_outputs_map)?; + Ok(TransactionVerifier { + tx, + all_inputs_map, + all_outputs_map, + total_value_of_input_tokens, + total_value_of_output_tokens, + new_utxos: Vec::new(), + spended_utxos: Ok(Vec::new()), + reward: 0, + }) + } + // Turn Vector into BTreeMap fn init_inputs( tx: &TransactionFor, @@ -292,32 +334,7 @@ macro_rules! implement_transaction_verifier { Ok(total_value_of_output_tokens) } - pub fn new(tx: &TransactionFor) -> Result, &'static str> { - // Verify absolute time lock - ensure!( - tx.check_time_lock::(), - "Time lock restrictions not satisfied" - ); - let all_inputs_map = Self::init_inputs(&tx)?; - let all_outputs_map = Self::init_outputs(&tx)?; - let total_value_of_input_tokens = - Self::init_total_value_of_input_tokens(&all_inputs_map)?; - let total_value_of_output_tokens = - Self::init_total_value_of_output_tokens(&all_outputs_map)?; - Ok(TransactionVerifier { - tx, - all_inputs_map, - all_outputs_map, - total_value_of_input_tokens, - total_value_of_output_tokens, - new_utxos: Vec::new(), - spended_utxos: Ok(Vec::new()), - reward: 0, - }) - } - fn get_token_id_from_input(outpoint: H256) -> Result { - //if let Some(input_utxo) = crate::UtxoStore::::get(&outpoint) { if let Some(input_utxo) = >::get(outpoint) { match input_utxo.data { Some(data) => data.id().ok_or("Token had burned or input incorrect"), @@ -491,14 +508,15 @@ macro_rules! implement_transaction_verifier { } } // Check that enough fee - let mlt = self .total_value_of_input_tokens .get(&TokenId::mlt()) .ok_or("not found MLT fees")?; if cfg!(test) { + // For tests we will use a small amount of MLT ensure!(mlt >= &(num_creations * 10), "insufficient fee"); } else { + // If we are not in tests, we should use 100 MLT for each token creation ensure!( mlt >= &(num_creations * crate::tokens::Mlt(100).to_munit()), "insufficient fee" From 1340d71685081be2f1ff1769e586db4badcdc34e Mon Sep 17 00:00:00 2001 From: sinitcin Date: Thu, 28 Oct 2021 17:06:45 +0300 Subject: [PATCH 21/53] Removed unnecessary enumerate() functions and fixed misprints Signed-off-by: sinitcin --- pallets/utxo/src/verifier.rs | 38 +++++++++++++++++++++++------------- 1 file changed, 24 insertions(+), 14 deletions(-) diff --git a/pallets/utxo/src/verifier.rs b/pallets/utxo/src/verifier.rs index df18a89..d3fa3b4 100644 --- a/pallets/utxo/src/verifier.rs +++ b/pallets/utxo/src/verifier.rs @@ -1,3 +1,20 @@ +// Copyright (c) 2021 RBB S.r.l +// opensource@mintlayer.org +// SPDX-License-Identifier: MIT +// Licensed under the MIT License; +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://spdx.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Author(s): A.Sinitsyn + #[macro_export] // The Substrate has a big macros ecosystem. That could be easily broken if T:Config will using in // other mod instead of lib.rs. Due to we have not enough time for quality decomposition lib.rs to @@ -7,7 +24,6 @@ // and decide how it make it better. The main problem is that there are a lot of cycles. We should split into // stages and use all of these checks as an array of functions that we will call on a couple of main cycles. // But, at the moment it works and it is suitable for the test-net. - macro_rules! implement_transaction_verifier { () => { use crate::sign::TransactionSigMsg; @@ -28,7 +44,7 @@ macro_rules! implement_transaction_verifier { // Vec of outputs that should be written new_utxos: Vec>, // For more information have a look at checking_utxos_exists - spended_utxos: Result< + spent_utxo: Result< Vec::AccountId>>, Vec>, >, @@ -57,7 +73,7 @@ macro_rules! implement_transaction_verifier { total_value_of_input_tokens, total_value_of_output_tokens, new_utxos: Vec::new(), - spended_utxos: Ok(Vec::new()), + spent_utxo: Ok(Vec::new()), reward: 0, }) } @@ -93,21 +109,17 @@ macro_rules! implement_transaction_verifier { fn init_outputs( tx: &TransactionFor, ) -> Result>>, &'static str> { - let mut count = 0; let mut output_map: BTreeMap>> = BTreeMap::new(); for output in &tx.outputs { let token_id = TransactionVerifier::<'_, T>::get_token_id_from_output(&output); if let Some(outputs) = output_map.get_mut(&token_id) { - count += 1; outputs.push(output.clone()); } else { - count += 1; output_map.insert(token_id, vec![output.clone()]); } } - ensure!(count == tx.outputs.len(), "can't load all outputs"); Ok(output_map) } @@ -119,7 +131,7 @@ macro_rules! implement_transaction_verifier { ) -> Result, &'static str> { let mut total_value_of_input_tokens: BTreeMap = BTreeMap::new(); let mut mlt_amount: Value = 0; - for (_, (_, input_vec)) in all_inputs_map.iter().enumerate() { + for (_, input_vec) in all_inputs_map.iter() { for (_, input_utxo) in input_vec { match &input_utxo.data { Some(OutputData::TokenIssuanceV1 { @@ -228,7 +240,7 @@ macro_rules! implement_transaction_verifier { ) -> Result, &'static str> { let mut total_value_of_output_tokens: BTreeMap = BTreeMap::new(); let mut mlt_amount: Value = 0; - for (_, (_, outputs_vec)) in all_outputs_map.iter().enumerate() { + for (_, outputs_vec) in all_outputs_map.iter() { for utxo in outputs_vec { // for x in all_outputs_map { match &utxo.data { @@ -473,9 +485,7 @@ macro_rules! implement_transaction_verifier { pub fn checking_amounts(&self) -> Result<(), &'static str> { let mut num_creations = 0; - for (_, (token_id, output_value)) in - self.total_value_of_output_tokens.iter().enumerate() - { + for (token_id, output_value) in self.total_value_of_output_tokens.iter() { match self.total_value_of_input_tokens.get(token_id) { Some(input_value) => ensure!( input_value >= &output_value, @@ -532,7 +542,7 @@ macro_rules! implement_transaction_verifier { // * Ok(utxos): a vector of UTXOs each input spends. // * Err(missing): a vector of outputs missing from the store - self.spended_utxos = { + self.spent_utxo = { let mut missing = Vec::new(); let mut resolved: Vec> = Vec::new(); @@ -594,7 +604,7 @@ macro_rules! implement_transaction_verifier { pub fn collect_result(&self) -> Result { Ok(ValidTransaction { priority: self.reward, - requires: self.spended_utxos.clone().map_or_else(|x| x, |_| Vec::new()), + requires: self.spent_utxo.clone().map_or_else(|x| x, |_| Vec::new()), provides: self.new_utxos.clone(), longevity: TransactionLongevity::MAX, propagate: true, From b1cdc5e31b4ffbcef6f2a84ff3daab4d8f3f223b Mon Sep 17 00:00:00 2001 From: sinitcin Date: Thu, 28 Oct 2021 18:15:53 +0300 Subject: [PATCH 22/53] Deleted AssetId Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 5 +---- pallets/utxo/src/mock.rs | 1 - runtime/src/lib.rs | 1 - 3 files changed, 1 insertion(+), 6 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 4886118..8e15881 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -72,7 +72,6 @@ pub mod pallet { testing::SR25519, H256, H512, }; - use sp_runtime::traits::AtLeast32Bit; implement_transaction_verifier!(); #[pallet::error] @@ -114,8 +113,6 @@ pub mod pallet { pub trait Config: frame_system::Config + pallet_timestamp::Config { type Event: From> + IsType<::Event>; - type AssetId: Parameter + AtLeast32Bit + Default + Copy; - /// The overarching call type. type Call: Dispatchable + From> + IsSubType> + Clone; @@ -283,7 +280,7 @@ pub mod pallet { } } - // Create a new output with a some data + // Create a new output with some data pub fn new_with_data(value: Value, pubkey: H256, data: OutputData) -> Self { let pubkey = sp_core::sr25519::Public::from_h256(pubkey); Self { diff --git a/pallets/utxo/src/mock.rs b/pallets/utxo/src/mock.rs index 6db7401..c4c5f7c 100644 --- a/pallets/utxo/src/mock.rs +++ b/pallets/utxo/src/mock.rs @@ -154,7 +154,6 @@ impl pallet_utxo::Config for Test { type Call = Call; type WeightInfo = crate::weights::WeightInfo; type ProgrammablePool = MockPool; - type AssetId = u64; fn authorities() -> Vec { Aura::authorities() diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index b0b0377..0634544 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -302,7 +302,6 @@ impl pallet_utxo::Config for Runtime { type Call = Call; type WeightInfo = pallet_utxo::weights::WeightInfo; type ProgrammablePool = pallet_pp::Pallet; - type AssetId = u64; fn authorities() -> Vec { Aura::authorities() From ae9e26f3a9d8bb7e5d042cd60bc6191b72174598 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Fri, 29 Oct 2021 16:44:25 +0300 Subject: [PATCH 23/53] Fixed names, comments, and removed impl ss58Codec Signed-off-by: sinitcin --- pallets/utxo/src/base58_nostd.rs | 7 +++---- pallets/utxo/src/tests.rs | 28 +++++++++++++++------------- pallets/utxo/src/tokens.rs | 32 +------------------------------- 3 files changed, 19 insertions(+), 48 deletions(-) diff --git a/pallets/utxo/src/base58_nostd.rs b/pallets/utxo/src/base58_nostd.rs index ae86080..da5ab10 100644 --- a/pallets/utxo/src/base58_nostd.rs +++ b/pallets/utxo/src/base58_nostd.rs @@ -7,7 +7,8 @@ use sp_std::vec; use sp_std::vec::Vec; -const ALPHABET: &'static [u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; +const BASE58_ALPHABET: &'static [u8] = + b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; const B58_DIGITS_MAP: &'static [i8] = &[ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, @@ -56,8 +57,6 @@ impl ToBase58 for [u8] { carry += 256 * buffer[j] as u32; buffer[j] = (carry % 58) as u8; carry /= 58; - - // in original trezor implementation it was underflowing if j > 0 { j -= 1; } @@ -75,7 +74,7 @@ impl ToBase58 for [u8] { } while j < size { - result.push(ALPHABET[buffer[j] as usize]); + result.push(BASE58_ALPHABET[buffer[j] as usize]); j += 1; } diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 1499661..7f1370e 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -551,7 +551,7 @@ use rand::Rng; fn build_random_vec(len: usize) -> Vec { let mut rng = rand::thread_rng(); let mut vec = Vec::with_capacity(len); - for _ in 1..len { + for _ in 0..len { vec.push(rng.gen::()); } vec @@ -559,7 +559,7 @@ fn build_random_vec(len: usize) -> Vec { #[test] // Simple creation of tokens -fn checking_tokens_issuance() { +fn test_token_issuance() { execute_with_alice(|alice_pub_key| { let (utxo0, input0) = tx_input_gen_no_signature(); let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); @@ -583,7 +583,9 @@ fn checking_tokens_issuance() { .sign_unchecked(&[utxo0], 0, &alice_pub_key); let new_utxo_hash = tx.outpoint(0); let (_, init_utxo) = genesis_utxo(); - // Spend + // submit tx - in the test it makes a new UTXO. Checks before that this UTXO has not created yet. + // After calling `Utxo::spend`, we should check that Storages successfully changed. + // If it successfully wrote a new UTXO in the Storage, tx goes through all verifications correctly. assert!(UtxoStore::::contains_key(H256::from(init_utxo))); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); @@ -607,7 +609,7 @@ fn checking_tokens_issuance() { #[test] // Simple creation of NFT -fn checking_nft_mint() { +fn test_nft_mint() { execute_with_alice(|alice_pub_key| { let (utxo0, input0) = tx_input_gen_no_signature(); let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); @@ -649,7 +651,7 @@ fn checking_nft_mint() { #[test] // NFT might be only unique, we can't create a few nft for one item -fn checking_nft_unique() { +fn test_nft_unique() { execute_with_alice(|alice_pub_key| { let (utxo0, input0) = tx_input_gen_no_signature(); let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); @@ -709,7 +711,7 @@ fn checking_nft_unique() { #[test] // Creation a token with a pre-existing ID or re-creation of an already created token. -fn checking_tokens_double_creation() { +fn test_token_double_creation() { execute_with_alice(|alice_pub_key| { let (utxo0, input0) = tx_input_gen_no_signature(); let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); @@ -776,7 +778,7 @@ fn checking_tokens_double_creation() { } #[test] -fn checking_tokens_with_invalid_data() { +fn test_tokens_with_invalid_data() { macro_rules! test_tx { ($data: ident, $checking: tt, $err: expr) => { execute_with_alice(|alice_pub_key| { @@ -905,7 +907,7 @@ fn checking_tokens_with_invalid_data() { } #[test] -fn checking_tokens_transferring() { +fn test_tokens_transferring() { let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { let token_id = TokenId::new_asset(H256::random()); @@ -1022,7 +1024,7 @@ fn checking_tokens_transferring() { } #[test] -fn checking_nft_transferring() { +fn test_nft_transferring() { let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { let token_id = TokenId::new_asset(H256::random()); @@ -1137,7 +1139,7 @@ fn checking_nft_transferring() { #[test] // Test tx where Input with token and without MLT, output has token (without MLT) -fn checking_tokens_creation_with_insufficient_fee() { +fn test_token_creation_with_insufficient_fee() { let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { let token_id = TokenId::new_asset(H256::random()); @@ -1194,16 +1196,16 @@ fn checking_tokens_creation_with_insufficient_fee() { } #[test] -fn checking_immutable_tx_format() { +fn test_immutable_tx_format() { // todo: Testing the compatibility of the old version with the new one - not done yet } #[test] -fn checking_burn_tokens() { +fn test_burn_tokens() { // todo: Burn tokens has not tested yet } #[test] -fn checking_token_id() { +fn test_token_id() { // todo: Testing token id - not done yet } diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index 16e639c..fc89710 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -7,7 +7,6 @@ use frame_support::{dispatch::Vec, RuntimeDebug}; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; #[cfg(feature = "std")] -use sp_core::crypto::Ss58Codec; use sp_core::{H160, H256}; const LENGTH_BYTES_TO_REPRESENT_ID: usize = 20; @@ -43,7 +42,7 @@ impl TokenId { pub fn new_asset(first_input_hash: H256) -> TokenId { TokenId { - // We are loosing the first bytes of H256 over here and save 20 the last bytes + // We are loosing the first bytes of H256 over here and using 20 the last bytes inner: TokenIdInner::Asset(H160::from(first_input_hash)), } } @@ -79,35 +78,6 @@ impl TokenId { } } -// We should implement it for Ss58Codec -impl AsMut<[u8]> for TokenId { - fn as_mut(&mut self) -> &mut [u8] { - match self.inner { - TokenIdInner::MLT => &mut [], - TokenIdInner::Asset(ref mut hash) => hash.as_bytes_mut(), - } - } -} - -// We should implement it for Ss58Codec -impl AsRef<[u8]> for TokenId { - fn as_ref(&self) -> &[u8] { - match self.inner { - TokenIdInner::MLT => &[], - TokenIdInner::Asset(ref hash) => hash.as_ref(), - } - } -} - -impl Default for TokenId { - fn default() -> Self { - TokenId::mlt() - } -} - -#[cfg(feature = "std")] -impl Ss58Codec for TokenId {} - #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] pub enum OutputData { From 89be57bfbd5ed67eee483f36cbcea30d664eaca2 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 1 Nov 2021 18:46:16 +0300 Subject: [PATCH 24/53] Added base58Check functions and then created the lib base58_nostd in the core/libs folder Signed-off-by: sinitcin --- Cargo.lock | 9 + libs/base58_nostd/Cargo.toml | 20 ++ libs/base58_nostd/src/lib.rs | 337 +++++++++++++++++++++++++++++++ libs/chainscript/Cargo.toml | 2 +- pallets/utxo/Cargo.toml | 5 + pallets/utxo/src/base58_nostd.rs | 231 --------------------- pallets/utxo/src/lib.rs | 5 +- pallets/utxo/src/tests.rs | 24 +-- pallets/utxo/src/tokens.rs | 5 +- 9 files changed, 389 insertions(+), 249 deletions(-) create mode 100644 libs/base58_nostd/Cargo.toml create mode 100644 libs/base58_nostd/src/lib.rs delete mode 100644 pallets/utxo/src/base58_nostd.rs diff --git a/Cargo.lock b/Cargo.lock index 6681f24..3209a24 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -396,6 +396,14 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5024ee8015f02155eee35c711107ddd9a9bf3cb689cf2a9089c97e79b6e1ae83" +[[package]] +name = "base58_nostd" +version = "0.1.0" +dependencies = [ + "frame-support", + "sp-std", +] + [[package]] name = "base64" version = "0.12.3" @@ -4128,6 +4136,7 @@ dependencies = [ name = "pallet-utxo" version = "0.1.0" dependencies = [ + "base58_nostd", "bech32", "chainscript", "frame-benchmarking", diff --git a/libs/base58_nostd/Cargo.toml b/libs/base58_nostd/Cargo.toml new file mode 100644 index 0000000..719ef12 --- /dev/null +++ b/libs/base58_nostd/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "base58_nostd" +version = "0.1.0" +edition = "2021" +authors = ["Anton Sinitsyn "] +description = "Encodes and decodes the Bech32 format" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies.frame-support] +default-features = false +git = 'https://github.com/paritytech/substrate.git' +version = '4.0.0-dev' +branch = "master" + +[dependencies.sp-std] +default-features = false +git = 'https://github.com/paritytech/substrate.git' +version = '4.0.0-dev' +branch = "master" diff --git a/libs/base58_nostd/src/lib.rs b/libs/base58_nostd/src/lib.rs new file mode 100644 index 0000000..4958fb3 --- /dev/null +++ b/libs/base58_nostd/src/lib.rs @@ -0,0 +1,337 @@ +// Copyright (c) 2021 RBB S.r.l +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// Based on https://github.com/trezor/trezor-crypto/blob/master/base58.c +// commit hash: c6e7d37 +// works only up to 128 bytes + +use frame_support::sp_io::hashing::sha2_256; +use sp_std::vec; +use sp_std::vec::Vec; + +pub const TOKEN_ID_PREFIX: &'static str = "MLS"; + +const BASE58_ALPHABET: &'static [u8] = + b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; + +const B58_DIGITS_MAP: &'static [i8] = &[ + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, -1, -1, -1, -1, -1, -1, -1, 9, 10, 11, 12, 13, 14, 15, 16, -1, + 17, 18, 19, 20, 21, -1, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, -1, -1, -1, -1, -1, -1, 33, + 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, -1, -1, -1, -1, -1, +]; + +/// Errors that can occur when decoding base58 encoded string. +#[derive(Debug, PartialEq)] +pub enum FromBase58Error { + /// The input contained a character which is not a part of the base58 format. + InvalidBase58Character(char, usize), + /// The input had invalid length. + InvalidBase58Length, + /// Base58 string contains invalid checksum + InvalidChecksum, + /// The input has invalid prefix. + InvalidPrefix, +} + +/// A trait for converting a value to base58 encoded string. +pub trait ToBase58 { + /// Converts a value of `self` to a base58 value, returning the owned string. + fn to_base58(&self) -> Vec; + /// Converts a value of `self` to a base58 value with checksum applied, returning the owned string. + fn to_mls_b58check(&self) -> Vec; +} + +/// A trait for converting base58 encoded values. +pub trait FromBase58 { + /// Convert a value of `self`, interpreted as base58 encoded data, into an owned vector of bytes, returning a vector. + fn from_base58(&self) -> Result, FromBase58Error>; + /// Converts a value of `self`, interpreted as base58 encoded data with checksum applied, into an owned vector of bytes, + /// returning a vector. + fn from_mls_b58check(&self) -> Result, FromBase58Error>; +} + +fn checksum(payload: &[u8]) -> Vec { + let sha256 = sha2_256(payload); + let doubled_sha256 = sha2_256(&sha256); + // Return the first 4 bytes of sha256(sha256(payload)) + Vec::from(&doubled_sha256[..4]) +} + +fn encode_to_base58(payload: &[u8]) -> Vec { + let zcount = payload.iter().take_while(|x| **x == 0).count(); + let size = (payload.len() - zcount) * 138 / 100 + 1; + let mut buffer = vec![0u8; size]; + let mut i = zcount; + let mut high = size - 1; + while i < payload.len() { + let mut carry = payload[i] as u32; + let mut j = size - 1; + + while j > high || carry != 0 { + carry += 256 * buffer[j] as u32; + buffer[j] = (carry % 58) as u8; + carry /= 58; + if j > 0 { + j -= 1; + } + } + i += 1; + high = j; + } + let mut j = buffer.iter().take_while(|x| **x == 0).count(); + let mut result = Vec::new(); + for _ in 0..zcount { + result.push(b'1'); + } + while j < size { + result.push(BASE58_ALPHABET[buffer[j] as usize]); + j += 1; + } + result +} + +fn decode_from_base58(payload: &str) -> Result, FromBase58Error> { + let mut bin = [0u8; 132]; + let mut out = [0u32; (132 + 3) / 4]; + let bytesleft = (bin.len() % 4) as u8; + let zeromask = match bytesleft { + 0 => 0u32, + _ => 0xffffffff << (bytesleft * 8), + }; + + let zcount = payload.chars().take_while(|x| *x == '1').count(); + let mut i = zcount; + let b58: Vec = payload.bytes().collect(); + + while i < payload.len() { + if (b58[i] & 0x80) != 0 { + // High-bit set on invalid digit + return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); + } + + if B58_DIGITS_MAP[b58[i] as usize] == -1 { + // // Invalid base58 digit + return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); + } + + let mut c = B58_DIGITS_MAP[b58[i] as usize] as u64; + let mut j = out.len(); + while j != 0 { + j -= 1; + let t = out[j] as u64 * 58 + c; + c = (t & 0x3f00000000) >> 32; + out[j] = (t & 0xffffffff) as u32; + } + + if c != 0 { + // Output number too big (carry to the next int32) + return Err(FromBase58Error::InvalidBase58Length); + } + + if (out[0] & zeromask) != 0 { + // Output number too big (last int32 filled too far) + return Err(FromBase58Error::InvalidBase58Length); + } + + i += 1; + } + + let mut i = 1; + let mut j = 0; + + bin[0] = match bytesleft { + 3 => ((out[0] & 0xff0000) >> 16) as u8, + 2 => ((out[0] & 0xff00) >> 8) as u8, + 1 => { + j = 1; + (out[0] & 0xff) as u8 + } + _ => { + i = 0; + bin[0] + } + }; + + while j < out.len() { + bin[i] = ((out[j] >> 0x18) & 0xff) as u8; + bin[i + 1] = ((out[j] >> 0x10) & 0xff) as u8; + bin[i + 2] = ((out[j] >> 8) & 0xff) as u8; + bin[i + 3] = ((out[j] >> 0) & 0xff) as u8; + i += 4; + j += 1; + } + + let leading_zeros = bin.iter().take_while(|x| **x == 0).count(); + Ok(bin[leading_zeros - zcount..].to_vec()) +} + +impl FromBase58 for str { + fn from_base58(&self) -> Result, FromBase58Error> { + decode_from_base58(self) + } + + fn from_mls_b58check(&self) -> Result, FromBase58Error> { + let mut payload: Vec = self.from_base58()?; + // Let's check is it right prefix or not + if &payload[0..3] != TOKEN_ID_PREFIX.as_bytes() { + Err(FromBase58Error::InvalidPrefix)?; + } + if payload.len() < 5 { + return Err(FromBase58Error::InvalidChecksum); + } + let checksum_index = payload.len() - 4; + let provided_checksum = payload.split_off(checksum_index); + let checksum = checksum(&payload)[..4].to_vec(); + if checksum != provided_checksum { + return Err(FromBase58Error::InvalidChecksum); + } + Ok(payload[TOKEN_ID_PREFIX.len()..].to_vec()) + } +} + +impl ToBase58 for [u8] { + fn to_base58(&self) -> Vec { + encode_to_base58(self) + } + + fn to_mls_b58check(&self) -> Vec { + let mut payload = TOKEN_ID_PREFIX.as_bytes().to_vec(); + payload.extend(self); + payload.extend(checksum(payload.as_slice())); + encode_to_base58(payload.as_slice()) + } +} + +#[cfg(test)] +mod tests { + use super::{FromBase58, FromBase58Error, ToBase58}; + + #[test] + fn test_from_base58_basic() { + assert_eq!("".from_base58().unwrap(), b""); + assert_eq!("Z".from_base58().unwrap(), &[32]); + assert_eq!("n".from_base58().unwrap(), &[45]); + assert_eq!("q".from_base58().unwrap(), &[48]); + assert_eq!("r".from_base58().unwrap(), &[49]); + assert_eq!("z".from_base58().unwrap(), &[57]); + assert_eq!("4SU".from_base58().unwrap(), &[45, 49]); + assert_eq!("4k8".from_base58().unwrap(), &[49, 49]); + assert_eq!("ZiCa".from_base58().unwrap(), &[97, 98, 99]); + assert_eq!("3mJr7AoUXx2Wqd".from_base58().unwrap(), b"1234598760"); + assert_eq!( + "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f".from_base58().unwrap(), + b"abcdefghijklmnopqrstuvwxyz" + ); + } + + #[test] + fn test_from_base58_invalid_char() { + assert!("0".from_base58().is_err()); + assert!("O".from_base58().is_err()); + assert!("I".from_base58().is_err()); + assert!("l".from_base58().is_err()); + assert!("3mJr0".from_base58().is_err()); + assert!("O3yxU".from_base58().is_err()); + assert!("3sNI".from_base58().is_err()); + assert!("4kl8".from_base58().is_err()); + assert!("s!5<".from_base58().is_err()); + assert!("t$@mX<*".from_base58().is_err()); + } + + #[test] + fn test_from_base58_initial_zeros() { + assert_eq!("1ZiCa".from_base58().unwrap(), b"\0abc"); + assert_eq!("11ZiCa".from_base58().unwrap(), b"\0\0abc"); + assert_eq!("111ZiCa".from_base58().unwrap(), b"\0\0\0abc"); + assert_eq!("1111ZiCa".from_base58().unwrap(), b"\0\0\0\0abc"); + } + + #[test] + fn test_to_base58_basic() { + assert_eq!(b"".to_base58(), "".as_bytes()); + assert_eq!(&[32].to_base58(), "Z".as_bytes()); + assert_eq!(&[45].to_base58(), "n".as_bytes()); + assert_eq!(&[48].to_base58(), "q".as_bytes()); + assert_eq!(&[49].to_base58(), "r".as_bytes()); + assert_eq!(&[57].to_base58(), "z".as_bytes()); + assert_eq!(&[45, 49].to_base58(), "4SU".as_bytes()); + assert_eq!(&[49, 49].to_base58(), "4k8".as_bytes()); + assert_eq!(b"abc".to_base58(), "ZiCa".as_bytes()); + assert_eq!(b"1234598760".to_base58(), "3mJr7AoUXx2Wqd".as_bytes()); + assert_eq!( + b"abcdefghijklmnopqrstuvwxyz".to_base58(), + "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f".as_bytes() + ); + } + + #[test] + fn test_to_base58_initial_zeros() { + assert_eq!(b"\0abc".to_base58(), "1ZiCa".as_bytes()); + assert_eq!(b"\0\0abc".to_base58(), "11ZiCa".as_bytes()); + assert_eq!(b"\0\0\0abc".to_base58(), "111ZiCa".as_bytes()); + assert_eq!(b"\0\0\0\0abc".to_base58(), "1111ZiCa".as_bytes()); + } + + #[test] + fn to_base58check() { + assert_eq!(b"".to_mls_b58check(), "1Wh4bh".as_bytes()); + assert_eq!(b"hello".to_mls_b58check(), "12L5B5yqsf7vwb".as_bytes()); + } + + #[test] + fn from_base58check() { + assert_eq!( + "25TfmUELb1jGfVSAbKsV4fAVTKAn".from_mls_b58check().unwrap(), + b"SOME_TOKEN_ID".to_vec() + ); + } + + #[test] + fn from_base58check_with_invalid_checksum() { + assert_eq!( + "j8YiVRUK8wrJ2wzLH7W6222".from_mls_b58check(), + Err(FromBase58Error::InvalidChecksum) + ); + } + + #[test] + #[should_panic] + fn from_base58check_with_invalid_length() { + "Wh4bh".from_mls_b58check().unwrap(); + } + + #[test] + fn base58check_loop() { + // Encode some bytes into b58_check + let enc = "SOME_TOKEN_ID".as_bytes().to_mls_b58check(); + let enc = std::str::from_utf8(enc.as_slice()).unwrap(); + dbg!(enc); + + // decode back + let dec = enc.from_mls_b58check().unwrap(); + assert_eq!( + std::str::from_utf8(dec.as_slice()).unwrap(), + "SOME_TOKEN_ID" + ); + } +} diff --git a/libs/chainscript/Cargo.toml b/libs/chainscript/Cargo.toml index c49c2e5..7dd8c78 100644 --- a/libs/chainscript/Cargo.toml +++ b/libs/chainscript/Cargo.toml @@ -1,5 +1,5 @@ [package] -authors = ['Lukas Kuklinek '] +authors = ['Lukas Kuklinek '] description = 'An interpreter for bitcoin script and its dialects' edition = '2018' name = 'chainscript' diff --git a/pallets/utxo/Cargo.toml b/pallets/utxo/Cargo.toml index c055676..078e552 100644 --- a/pallets/utxo/Cargo.toml +++ b/pallets/utxo/Cargo.toml @@ -32,6 +32,11 @@ default-features = false path = '../../libs/bech32' version = '0.8.1' +[dependencies.base58_nostd] +default-features = false +path = '../../libs/base58_nostd' +version = '0.1.0' + [dependencies.chainscript] default-features = false path = '../../libs/chainscript' diff --git a/pallets/utxo/src/base58_nostd.rs b/pallets/utxo/src/base58_nostd.rs deleted file mode 100644 index da5ab10..0000000 --- a/pallets/utxo/src/base58_nostd.rs +++ /dev/null @@ -1,231 +0,0 @@ -//! Base58-to-text encoding -//! -//! Based on https://github.com/trezor/trezor-crypto/blob/master/base58.c -//! commit hash: c6e7d37 -//! works only up to 128 bytes - -use sp_std::vec; -use sp_std::vec::Vec; - -const BASE58_ALPHABET: &'static [u8] = - b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; - -const B58_DIGITS_MAP: &'static [i8] = &[ - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, -1, -1, -1, -1, -1, -1, -1, 9, 10, 11, 12, 13, 14, 15, 16, -1, - 17, 18, 19, 20, 21, -1, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, -1, -1, -1, -1, -1, -1, 33, - 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, -1, -1, -1, -1, -1, -]; - -/// Errors that can occur when decoding base58 encoded string. -#[derive(Debug, PartialEq)] -pub enum FromBase58Error { - /// The input contained a character which is not a part of the base58 format. - InvalidBase58Character(char, usize), - /// The input had invalid length. - InvalidBase58Length, -} - -/// A trait for converting a value to base58 encoded string. -pub trait ToBase58 { - /// Converts a value of `self` to a base58 value, returning the owned string. - fn to_base58(&self) -> Vec; -} - -/// A trait for converting base58 encoded values. -pub trait FromBase58 { - /// Convert a value of `self`, interpreted as base58 encoded data, into an owned vector of bytes, returning a vector. - fn from_base58(&self) -> Result, FromBase58Error>; -} - -impl ToBase58 for [u8] { - fn to_base58(&self) -> Vec { - let zcount = self.iter().take_while(|x| **x == 0).count(); - let size = (self.len() - zcount) * 138 / 100 + 1; - let mut buffer = vec![0u8; size]; - - let mut i = zcount; - let mut high = size - 1; - - while i < self.len() { - let mut carry = self[i] as u32; - let mut j = size - 1; - - while j > high || carry != 0 { - carry += 256 * buffer[j] as u32; - buffer[j] = (carry % 58) as u8; - carry /= 58; - if j > 0 { - j -= 1; - } - } - - i += 1; - high = j; - } - - let mut j = buffer.iter().take_while(|x| **x == 0).count(); - - let mut result = Vec::new(); - for _ in 0..zcount { - result.push(b'1'); - } - - while j < size { - result.push(BASE58_ALPHABET[buffer[j] as usize]); - j += 1; - } - - result - } -} - -impl FromBase58 for str { - fn from_base58(&self) -> Result, FromBase58Error> { - let mut bin = [0u8; 132]; - let mut out = [0u32; (132 + 3) / 4]; - let bytesleft = (bin.len() % 4) as u8; - let zeromask = match bytesleft { - 0 => 0u32, - _ => 0xffffffff << (bytesleft * 8), - }; - - let zcount = self.chars().take_while(|x| *x == '1').count(); - let mut i = zcount; - let b58: Vec = self.bytes().collect(); - - while i < self.len() { - if (b58[i] & 0x80) != 0 { - // High-bit set on invalid digit - return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); - } - - if B58_DIGITS_MAP[b58[i] as usize] == -1 { - // // Invalid base58 digit - return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); - } - - let mut c = B58_DIGITS_MAP[b58[i] as usize] as u64; - let mut j = out.len(); - while j != 0 { - j -= 1; - let t = out[j] as u64 * 58 + c; - c = (t & 0x3f00000000) >> 32; - out[j] = (t & 0xffffffff) as u32; - } - - if c != 0 { - // Output number too big (carry to the next int32) - return Err(FromBase58Error::InvalidBase58Length); - } - - if (out[0] & zeromask) != 0 { - // Output number too big (last int32 filled too far) - return Err(FromBase58Error::InvalidBase58Length); - } - - i += 1; - } - - let mut i = 1; - let mut j = 0; - - bin[0] = match bytesleft { - 3 => ((out[0] & 0xff0000) >> 16) as u8, - 2 => ((out[0] & 0xff00) >> 8) as u8, - 1 => { - j = 1; - (out[0] & 0xff) as u8 - } - _ => { - i = 0; - bin[0] - } - }; - - while j < out.len() { - bin[i] = ((out[j] >> 0x18) & 0xff) as u8; - bin[i + 1] = ((out[j] >> 0x10) & 0xff) as u8; - bin[i + 2] = ((out[j] >> 8) & 0xff) as u8; - bin[i + 3] = ((out[j] >> 0) & 0xff) as u8; - i += 4; - j += 1; - } - - let leading_zeros = bin.iter().take_while(|x| **x == 0).count(); - Ok(bin[leading_zeros - zcount..].to_vec()) - } -} - -#[cfg(test)] -mod tests { - use super::{FromBase58, ToBase58}; - - #[test] - fn test_from_base58_basic() { - assert_eq!("".from_base58().unwrap(), b""); - assert_eq!("Z".from_base58().unwrap(), &[32]); - assert_eq!("n".from_base58().unwrap(), &[45]); - assert_eq!("q".from_base58().unwrap(), &[48]); - assert_eq!("r".from_base58().unwrap(), &[49]); - assert_eq!("z".from_base58().unwrap(), &[57]); - assert_eq!("4SU".from_base58().unwrap(), &[45, 49]); - assert_eq!("4k8".from_base58().unwrap(), &[49, 49]); - assert_eq!("ZiCa".from_base58().unwrap(), &[97, 98, 99]); - assert_eq!("3mJr7AoUXx2Wqd".from_base58().unwrap(), b"1234598760"); - assert_eq!( - "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f".from_base58().unwrap(), - b"abcdefghijklmnopqrstuvwxyz" - ); - } - - #[test] - fn test_from_base58_invalid_char() { - assert!("0".from_base58().is_err()); - assert!("O".from_base58().is_err()); - assert!("I".from_base58().is_err()); - assert!("l".from_base58().is_err()); - assert!("3mJr0".from_base58().is_err()); - assert!("O3yxU".from_base58().is_err()); - assert!("3sNI".from_base58().is_err()); - assert!("4kl8".from_base58().is_err()); - assert!("s!5<".from_base58().is_err()); - assert!("t$@mX<*".from_base58().is_err()); - } - - #[test] - fn test_from_base58_initial_zeros() { - assert_eq!("1ZiCa".from_base58().unwrap(), b"\0abc"); - assert_eq!("11ZiCa".from_base58().unwrap(), b"\0\0abc"); - assert_eq!("111ZiCa".from_base58().unwrap(), b"\0\0\0abc"); - assert_eq!("1111ZiCa".from_base58().unwrap(), b"\0\0\0\0abc"); - } - - #[test] - fn test_to_base58_basic() { - assert_eq!(b"".to_base58(), "".as_bytes()); - assert_eq!(&[32].to_base58(), "Z".as_bytes()); - assert_eq!(&[45].to_base58(), "n".as_bytes()); - assert_eq!(&[48].to_base58(), "q".as_bytes()); - assert_eq!(&[49].to_base58(), "r".as_bytes()); - assert_eq!(&[57].to_base58(), "z".as_bytes()); - assert_eq!(&[45, 49].to_base58(), "4SU".as_bytes()); - assert_eq!(&[49, 49].to_base58(), "4k8".as_bytes()); - assert_eq!(b"abc".to_base58(), "ZiCa".as_bytes()); - assert_eq!(b"1234598760".to_base58(), "3mJr7AoUXx2Wqd".as_bytes()); - assert_eq!( - b"abcdefghijklmnopqrstuvwxyz".to_base58(), - "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f".as_bytes() - ); - } - - #[test] - fn test_to_base58_initial_zeros() { - assert_eq!(b"\0abc".to_base58(), "1ZiCa".as_bytes()); - assert_eq!(b"\0\0abc".to_base58(), "11ZiCa".as_bytes()); - assert_eq!(b"\0\0\0abc".to_base58(), "111ZiCa".as_bytes()); - assert_eq!(b"\0\0\0\0abc".to_base58(), "1111ZiCa".as_bytes()); - } -} diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 8e15881..4ebc902 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -19,7 +19,6 @@ pub use pallet::*; -mod base58_nostd; #[cfg(feature = "runtime-benchmarks")] mod benchmarking; #[cfg(test)] @@ -280,8 +279,8 @@ pub mod pallet { } } - // Create a new output with some data - pub fn new_with_data(value: Value, pubkey: H256, data: OutputData) -> Self { + /// Create a new output with the data field. This is going to be paid to a public key. + pub fn new_p2pk_with_data(value: Value, pubkey: H256, data: OutputData) -> Self { let pubkey = sp_core::sr25519::Public::from_h256(pubkey); Self { value, diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 7f1370e..3b4a44d 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -917,7 +917,7 @@ fn test_tokens_transferring() { inputs: vec![input0], outputs: vec![ TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), - TransactionOutput::new_with_data( + TransactionOutput::new_p2pk_with_data( 10, H256::from(karl_pub_key), OutputData::TokenIssuanceV1 { @@ -951,7 +951,7 @@ fn test_tokens_transferring() { // Let's fail on wrong token id let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_with_data( + outputs: vec![TransactionOutput::new_p2pk_with_data( 0, H256::from(alice_pub_key), OutputData::TokenTransferV1 { @@ -970,7 +970,7 @@ fn test_tokens_transferring() { // Let's fail on exceed token amount let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_with_data( + outputs: vec![TransactionOutput::new_p2pk_with_data( 0, H256::from(alice_pub_key), OutputData::TokenTransferV1 { @@ -989,7 +989,7 @@ fn test_tokens_transferring() { // Let's send a big amount of MLT with the correct tokens let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_with_data( + outputs: vec![TransactionOutput::new_p2pk_with_data( 1_000_000_000, H256::from(alice_pub_key), OutputData::TokenTransferV1 { @@ -1008,7 +1008,7 @@ fn test_tokens_transferring() { // should be success let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_with_data( + outputs: vec![TransactionOutput::new_p2pk_with_data( 0, H256::from(alice_pub_key), OutputData::TokenTransferV1 { @@ -1034,7 +1034,7 @@ fn test_nft_transferring() { inputs: vec![input0], outputs: vec![ TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), - TransactionOutput::new_with_data( + TransactionOutput::new_p2pk_with_data( 10, H256::from(karl_pub_key), OutputData::NftMintV1 { @@ -1065,7 +1065,7 @@ fn test_nft_transferring() { // Let's fail on wrong token id let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_with_data( + outputs: vec![TransactionOutput::new_p2pk_with_data( 0, H256::from(alice_pub_key), OutputData::TokenTransferV1 { @@ -1084,7 +1084,7 @@ fn test_nft_transferring() { // Let's fail on exceed token amount let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_with_data( + outputs: vec![TransactionOutput::new_p2pk_with_data( 0, H256::from(alice_pub_key), OutputData::TokenTransferV1 { @@ -1103,7 +1103,7 @@ fn test_nft_transferring() { // Let's send a big amount of MLT with the correct tokens let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_with_data( + outputs: vec![TransactionOutput::new_p2pk_with_data( 1_000_000_000, H256::from(alice_pub_key), OutputData::TokenTransferV1 { @@ -1122,7 +1122,7 @@ fn test_nft_transferring() { // should be success let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_with_data( + outputs: vec![TransactionOutput::new_p2pk_with_data( 0, H256::from(alice_pub_key), OutputData::TokenTransferV1 { @@ -1149,7 +1149,7 @@ fn test_token_creation_with_insufficient_fee() { inputs: vec![input0], outputs: vec![ TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), - TransactionOutput::new_with_data( + TransactionOutput::new_p2pk_with_data( 0, H256::from(karl_pub_key), OutputData::TokenIssuanceV1 { @@ -1173,7 +1173,7 @@ fn test_token_creation_with_insufficient_fee() { // Use here token issuance for example TransactionInput::new_empty(token_utxo_hash), ], - outputs: vec![TransactionOutput::new_with_data( + outputs: vec![TransactionOutput::new_p2pk_with_data( 0, H256::from(karl_pub_key), OutputData::TokenIssuanceV1 { diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index fc89710..db202c9 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -1,12 +1,11 @@ #![cfg_attr(not(feature = "std"), no_std)] -use crate::base58_nostd::{FromBase58, FromBase58Error, ToBase58}; +use base58_nostd::{FromBase58, FromBase58Error, ToBase58}; use codec::{Decode, Encode}; use frame_support::ensure; use frame_support::{dispatch::Vec, RuntimeDebug}; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -#[cfg(feature = "std")] use sp_core::{H160, H256}; const LENGTH_BYTES_TO_REPRESENT_ID: usize = 20; @@ -68,6 +67,8 @@ impl TokenId { let data = data.from_base58().map_err(|x| match x { FromBase58Error::InvalidBase58Character { .. } => "Invalid Base58 character", FromBase58Error::InvalidBase58Length => "Invalid Base58 length", + FromBase58Error::InvalidChecksum => "Invalid checksum", + FromBase58Error::InvalidPrefix => "Invalid token id", })?; let hash = TokenId::hash160_from_bytes(data.as_slice())?; From 008f7a31ebe85b5045e11c84fe95229b8585d199 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 2 Nov 2021 15:40:04 +0300 Subject: [PATCH 25/53] Removed verifier and TokenId::mlt(), changed names, split tests for token issuance Signed-off-by: sinitcin --- libs/base58_nostd/src/lib.rs | 22 +- pallets/utxo/src/lib.rs | 445 +++++++++++++++++++++++-- pallets/utxo/src/mock.rs | 16 +- pallets/utxo/src/tests.rs | 155 ++++----- pallets/utxo/src/tokens.rs | 6 - pallets/utxo/src/verifier.rs | 615 ----------------------------------- 6 files changed, 533 insertions(+), 726 deletions(-) delete mode 100644 pallets/utxo/src/verifier.rs diff --git a/libs/base58_nostd/src/lib.rs b/libs/base58_nostd/src/lib.rs index 4958fb3..df6c76a 100644 --- a/libs/base58_nostd/src/lib.rs +++ b/libs/base58_nostd/src/lib.rs @@ -20,8 +20,11 @@ // // Based on https://github.com/trezor/trezor-crypto/blob/master/base58.c // commit hash: c6e7d37 +// license: MIT // works only up to 128 bytes +#![cfg_attr(not(feature = "std"), no_std)] + use frame_support::sp_io::hashing::sha2_256; use sp_std::vec; use sp_std::vec::Vec; @@ -294,8 +297,10 @@ mod tests { #[test] fn to_base58check() { - assert_eq!(b"".to_mls_b58check(), "1Wh4bh".as_bytes()); - assert_eq!(b"hello".to_mls_b58check(), "12L5B5yqsf7vwb".as_bytes()); + assert_eq!( + b"SOME_TOKEN_ID".to_mls_b58check(), + "25TfmUELb1jGfVSAbKsV4fAVTKAn".as_bytes() + ); } #[test] @@ -322,16 +327,13 @@ mod tests { #[test] fn base58check_loop() { - // Encode some bytes into b58_check - let enc = "SOME_TOKEN_ID".as_bytes().to_mls_b58check(); - let enc = std::str::from_utf8(enc.as_slice()).unwrap(); - dbg!(enc); + let text = + "To be, or not to be, that is the Question: Whether ’tis Nobler in the mind to suffer."; + let enc = text.as_bytes().to_mls_b58check(); + let enc = sp_std::str::from_utf8(enc.as_slice()).unwrap(); // decode back let dec = enc.from_mls_b58check().unwrap(); - assert_eq!( - std::str::from_utf8(dec.as_slice()).unwrap(), - "SOME_TOKEN_ID" - ); + assert_eq!(sp_std::str::from_utf8(dec.as_slice()).unwrap(), text); } } diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 4ebc902..c3f0a04 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -28,8 +28,6 @@ mod sign; #[cfg(test)] mod tests; pub mod tokens; -#[macro_use] -pub mod verifier; pub mod weights; use chainscript::Builder; @@ -45,8 +43,8 @@ use utxo_api::UtxoApi; #[frame_support::pallet] pub mod pallet { - use super::implement_transaction_verifier; pub use crate::script::{BlockTime, RawBlockTime}; + use crate::sign::{self, Scheme}; use crate::tokens::{NftDataHash, OutputData, TokenId, Value}; use bech32; use chainscript::Script; @@ -71,7 +69,6 @@ pub mod pallet { testing::SR25519, H256, H512, }; - implement_transaction_verifier!(); #[pallet::error] pub enum Error { @@ -353,7 +350,7 @@ pub mod pallet { #[pallet::storage] #[pallet::getter(fn pointer_to_issue_token)] - pub(super) type PointerToIssueToken = + pub(super) type TokenIssuanceTransactions = StorageMap<_, Identity, TokenId, /* UTXO */ H256, OptionQuery>; #[pallet::storage] @@ -375,10 +372,10 @@ pub mod pallet { } } - pub(crate) fn get_utxo_by_token_id( + pub(crate) fn get_output_by_token_id( token_id: TokenId, ) -> Option> { - let utxo_id = PointerToIssueToken::::get(token_id)?; + let utxo_id = TokenIssuanceTransactions::::get(token_id)?; UtxoStore::::get(utxo_id) } @@ -468,14 +465,421 @@ pub mod pallet { pub fn validate_transaction( tx: &TransactionFor, ) -> Result { - let mut tv = TransactionVerifier::<'_, T>::new(tx)?; - tv.checking_inputs()?; - tv.checking_outputs()?; - tv.checking_signatures()?; - tv.checking_utxos_exists()?; - tv.checking_amounts()?; - tv.calculating_reward()?; - tv.collect_result() + //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries + + //ensure rather than assert to avoid panic + //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries + ensure!(!tx.inputs.is_empty(), "no inputs"); + ensure!(!tx.outputs.is_empty(), "no outputs"); + ensure!(tx.inputs.len() < (u32::MAX as usize), "too many inputs"); + ensure!(tx.outputs.len() < (u32::MAX as usize), "too many outputs"); + + //ensure each input is used only a single time + //maps each input into btree + //if map.len() > num of inputs then fail + //https://doc.rust-lang.org/std/collections/struct.BTreeMap.html + //WARNING workshop code has a bug here + //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs + //input_map.len() > transaction.inputs.len() //THIS IS WRONG + { + let input_map: BTreeMap<_, ()> = + tx.inputs.iter().map(|input| (input.outpoint, ())).collect(); + //we want map size and input size to be equal to ensure each is used only once + ensure!( + input_map.len() == tx.inputs.len(), + "each input should be used only once" + ); + } + //ensure each output is unique + //map each output to btree to count unique elements + //WARNING example code has a bug here + //out_map.len() != transaction.outputs.len() //THIS IS WRONG + { + let out_map: BTreeMap<_, ()> = tx.outputs.iter().map(|output| (output, ())).collect(); + //check each output is defined only once + ensure!( + out_map.len() == tx.outputs.len(), + "each output should be used once" + ); + } + + // Verify absolute time lock + ensure!( + tx.check_time_lock::(), + "Time lock restrictions not satisfied" + ); + + // Resolve the transaction inputs by looking up UTXOs being spent by them. + // + // This will cointain one of the following: + // * Ok(utxos): a vector of UTXOs each input spends. + // * Err(missing): a vector of outputs missing from the store + let input_utxos = { + let mut missing = Vec::new(); + let mut resolved: Vec> = Vec::new(); + + for input in &tx.inputs { + if let Some(input_utxo) = >::get(&input.outpoint) { + let lock_commitment = input_utxo.destination.lock_commitment(); + ensure!( + input.lock_hash() == *lock_commitment, + "Lock hash does not match" + ); + resolved.push(input_utxo); + } else { + missing.push(input.outpoint.clone().as_fixed_bytes().to_vec()); + } + } + + missing.is_empty().then(|| resolved).ok_or(missing) + }; + + let full_inputs: Vec<(TokenId, TransactionOutputFor)> = tx + .inputs + .iter() + .filter_map(|input| >::get(&input.outpoint)) + .filter_map(|output| match output.data { + Some(ref data) => match data { + OutputData::TokenTransferV1 { token_id, .. } + | OutputData::TokenIssuanceV1 { token_id, .. } + | OutputData::NftMintV1 { token_id, .. } => Some((token_id.clone(), output)), + OutputData::TokenBurnV1 { .. } => None, + }, + None => { + // We do not calculate MLT here + None + } + }) + .collect(); + + // + let mut total_value_of_input_tokens: BTreeMap = BTreeMap::new(); + let mut mlt_amount_in_inputs: Value = 0; + for input in &tx.inputs { + let output = >::get(&input.outpoint).ok_or("missing inputs")?; + match &output.data { + Some(OutputData::TokenIssuanceV1 { + ref token_id, + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_uri, + }) => { + // We have to check is this token already issued? + ensure!( + TokenIssuanceTransactions::::contains_key(token_id), + "token has never been issued" + ); + ensure!( + token_ticker.is_ascii(), + "token ticker has none ascii characters" + ); + ensure!( + metadata_uri.is_ascii(), + "metadata uri has none ascii characters" + ); + ensure!(token_ticker.len() <= 5, "token ticker is too long"); + ensure!(!token_ticker.is_empty(), "token ticker can't be empty"); + ensure!(metadata_uri.len() <= 100, "token metadata uri is too long"); + ensure!(amount_to_issue > &0u128, "output value must be nonzero"); + ensure!(number_of_decimals <= &18, "too long decimals"); + // If token has just created we can't meet another amount here. + total_value_of_input_tokens.insert(token_id.clone(), *amount_to_issue); + // But probably in this input we have a fee + mlt_amount_in_inputs = mlt_amount_in_inputs + .checked_add(output.value) + .ok_or("input value overflow")?; + } + Some(OutputData::TokenTransferV1 { + ref token_id, + amount, + .. + }) => { + total_value_of_input_tokens.insert( + token_id.clone(), + total_value_of_input_tokens + .get(token_id) + .unwrap_or(&0) + .checked_add(*amount) + .ok_or("input value overflow")?, + ); + // But probably in this input we have a fee + mlt_amount_in_inputs = mlt_amount_in_inputs + .checked_add(output.value) + .ok_or("input value overflow")?; + } + Some(OutputData::TokenBurnV1 { .. }) => { + // Nothing to do here because tokens no longer exist. + } + Some(OutputData::NftMintV1 { + ref token_id, + data_hash, + metadata_uri, + }) => { + // We have to check is this token already issued? + ensure!( + TokenIssuanceTransactions::::contains_key(token_id), + "unable to use an input where NFT has not minted yet" + ); + // Check is this digital data unique? + ensure!( + NftUniqueDataHash::::contains_key(data_hash), + "unable to use an input where NFT digital data was changed" + ); + ensure!( + metadata_uri.is_ascii(), + "metadata uri has none ascii characters" + ); + // If NFT has just created we can't meet another NFT part here. + total_value_of_input_tokens.insert(token_id.clone(), 1); + } + None => { + mlt_amount_in_inputs = mlt_amount_in_inputs + .checked_add(output.value) + .ok_or("input value overflow")?; + } + } + } + + let mut total_value_of_output_tokens: BTreeMap = BTreeMap::new(); + let mut mlt_amount_in_outputs: Value = 0; + for output in &tx.outputs { + match &output.data { + Some(OutputData::TokenIssuanceV1 { + ref token_id, + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_uri, + }) => { + // We have to check is this token already issued? + ensure!( + !TokenIssuanceTransactions::::contains_key(token_id), + "token has already been issued" + ); + ensure!( + token_ticker.is_ascii(), + "token ticker has none ascii characters" + ); + ensure!( + metadata_uri.is_ascii(), + "metadata uri has none ascii characters" + ); + ensure!(token_ticker.len() <= 5, "token ticker is too long"); + ensure!(!token_ticker.is_empty(), "token ticker can't be empty"); + ensure!(metadata_uri.len() <= 100, "token metadata uri is too long"); + ensure!(amount_to_issue > &0u128, "output value must be nonzero"); + ensure!(number_of_decimals <= &18, "too long decimals"); + + // If token has just created we can't meet another amount here. + total_value_of_output_tokens.insert(token_id.clone(), *amount_to_issue); + // But probably in this input we have a fee + mlt_amount_in_outputs = mlt_amount_in_outputs + .checked_add(output.value) + .ok_or("input value overflow")?; + } + Some(OutputData::TokenTransferV1 { + ref token_id, + amount, + .. + }) => { + total_value_of_output_tokens.insert( + token_id.clone(), + total_value_of_output_tokens + .get(token_id) + .unwrap_or(&0) + .checked_add(*amount) + .ok_or("output value overflow")?, + ); + // But probably in this input we have a fee + mlt_amount_in_outputs = mlt_amount_in_outputs + .checked_add(output.value) + .ok_or("input value overflow")?; + } + Some(OutputData::TokenBurnV1 { .. }) => { + // Nothing to do here because tokens no longer exist. + } + Some(OutputData::NftMintV1 { + ref token_id, + data_hash, + metadata_uri, + }) => { + // We have to check is this token already issued? + ensure!( + !TokenIssuanceTransactions::::contains_key(token_id), + "token has already been issued" + ); + + // Check is this digital data unique? + ensure!( + !>::contains_key(data_hash), + "digital data has already been minted" + ); + ensure!( + metadata_uri.is_ascii(), + "metadata uri has none ascii characters" + ); + // If NFT has just created we can't meet another NFT part here. + total_value_of_output_tokens.insert(token_id.clone(), 1); + } + None => { + mlt_amount_in_outputs = mlt_amount_in_outputs + .checked_add(output.value) + .ok_or("output value overflow")?; + } + } + } + + // Check for token creation + for output in tx.outputs.iter() { + let tid = match output.data { + Some(OutputData::TokenTransferV1 { ref token_id, .. }) + | Some(OutputData::TokenIssuanceV1 { ref token_id, .. }) => token_id.clone(), + Some(OutputData::NftMintV1 { .. }) + | Some(OutputData::TokenBurnV1 { .. }) + | None => continue, + }; + // If we have input and output for the same token it's not a problem + if full_inputs.iter().find(|&x| (x.0 == tid) && (x.1 != *output)).is_some() { + continue; + } else { + // But when we don't have an input for token but token id exist + ensure!( + !>::contains_key(tid), + "no inputs for the token id" + ); + } + } + + let mut new_utxos = Vec::new(); + let mut reward = 0; + + // Check that outputs are valid + for (output_index, output) in tx.outputs.iter().enumerate() { + match output.data { + Some(OutputData::TokenIssuanceV1 { + amount_to_issue, .. + }) => ensure!(amount_to_issue > 0, "output value must be nonzero"), + Some(OutputData::TokenTransferV1 { amount, .. }) => { + ensure!(amount > 0, "output value must be nonzero") + } + Some(OutputData::TokenBurnV1 { amount_to_burn, .. }) => { + ensure!(amount_to_burn > 0, "output value must be nonzero") + } + Some(OutputData::NftMintV1 { .. }) => { + // Nothing to check + } + None => ensure!(output.value > 0, "output value must be nonzero"), + } + let hash = tx.outpoint(output_index as u64); + ensure!(!>::contains_key(hash), "output already exists"); + new_utxos.push(hash.as_fixed_bytes().to_vec()); + + match output.destination { + Destination::Pubkey(_) | Destination::ScriptHash(_) => {} + Destination::CreatePP(_, _) => { + log::info!("TODO validate OP_CREATE"); + } + Destination::CallPP(_, _, _) => { + log::info!("TODO validate OP_CALL"); + } + } + } + + // if all spent UTXOs are available, check the math and signatures + if let Ok(input_utxos) = &input_utxos { + // We have to check sum of input tokens is less or equal to output tokens. + ensure!( + mlt_amount_in_outputs <= mlt_amount_in_inputs, + "output value must not exceed input value" + ); + + let mut num_creations = 0; + for output_token in &total_value_of_output_tokens { + match total_value_of_input_tokens.get(&output_token.0) { + Some(input_value) => ensure!( + input_value >= &output_token.1, + "output value must not exceed input value" + ), + None => { + match &tx.outputs.iter().find(|x| match x.data { + Some(ref output_data) => { + output_data.id().as_ref() == Some(output_token.0) + } + None => false, + }) { + Some(output) => match output.data { + None + | Some(OutputData::TokenTransferV1 { .. }) + | Some(OutputData::TokenBurnV1 { .. }) => { + frame_support::fail!("input for the token not found") + } + Some(OutputData::NftMintV1 { .. }) + | Some(OutputData::TokenIssuanceV1 { .. }) => { + num_creations += 1; + continue; + } + }, + None => frame_support::fail!("corrupted output data"), + } + } + } + } + ensure!( + mlt_amount_in_inputs >= (num_creations * crate::tokens::Mlt(100).to_munit()), + "insufficient fee" + ); + + for (index, (input, input_utxo)) in tx.inputs.iter().zip(input_utxos).enumerate() { + match &input_utxo.destination { + Destination::Pubkey(pubkey) => { + let msg = sign::TransactionSigMsg::construct( + sign::SigHash::default(), + &tx, + &input_utxos, + index as u64, + u32::MAX, + ); + let ok = pubkey + .parse_sig(&input.witness[..]) + .ok_or("bad signature format")? + .verify(&msg); + ensure!(ok, "signature must be valid"); + } + Destination::CreatePP(_, _) => { + log::info!("TODO validate spending of OP_CREATE"); + } + Destination::CallPP(_, _, _) => { + let spend = + u16::from_le_bytes(input.witness[1..].try_into().or_else(|_| { + Err(DispatchError::Other( + "Failed to convert witness to an opcode", + )) + })?); + ensure!(spend == 0x1337, "OP_SPEND not found"); + } + Destination::ScriptHash(_hash) => { + let witness = input.witness.clone(); + let lock = input.lock.clone(); + crate::script::verify(&tx, &input_utxos, index as u64, witness, lock) + .map_err(|_| "script verification failed")?; + } + } + } + + // Reward at the moment only in MLT + reward = mlt_amount_in_inputs + .checked_sub(mlt_amount_in_outputs) + .ok_or("reward underflow")?; + } + + Ok(ValidTransaction { + priority: reward as u64, + requires: input_utxos.map_or_else(|x| x, |_| Vec::new()), + provides: new_utxos, + longevity: TransactionLongevity::MAX, + propagate: true, + }) } /// Update storage to reflect changes made by transaction @@ -519,14 +923,17 @@ pub mod pallet { // Also, we should provide possibility of find an output that by token_id. // This output is a place where token was created. It allow us to check that a token or // a NFT have not created yet. - >::insert(token_id, hash); + >::insert(token_id, hash); } Some(OutputData::TokenIssuanceV1 { token_id, .. }) => { // For MLS-01 we save a relation between token_id and the output where // token was created. - >::insert(token_id, hash); + >::insert(token_id, hash); } - _ => continue, + // For the security reason we are implementing all cases + Some(OutputData::TokenBurnV1 { .. }) + | Some(OutputData::TokenTransferV1 { .. }) + | None => continue, } } Destination::CreatePP(script, data) => { @@ -701,7 +1108,7 @@ impl crate::Pallet { pub fn nft_read( nft_id: &core::primitive::str, ) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)> { - match crate::pallet::get_utxo_by_token_id::( + match crate::pallet::get_output_by_token_id::( crate::tokens::TokenId::from_string(&nft_id).ok()?, )? .data diff --git a/pallets/utxo/src/mock.rs b/pallets/utxo/src/mock.rs index c4c5f7c..534c6d4 100644 --- a/pallets/utxo/src/mock.rs +++ b/pallets/utxo/src/mock.rs @@ -18,6 +18,7 @@ use crate as pallet_utxo; use pallet_utxo::TransactionOutput; use pp_api::ProgrammablePoolApi; +use crate::tokens::Value; use frame_support::{dispatch::Vec, weights::Weight}; use frame_support::{ parameter_types, @@ -43,10 +44,13 @@ pub type Block = frame_system::mocking::MockBlock; pub const ALICE_PHRASE: &str = "news slush supreme milk chapter athlete soap sausage put clutch what kitten"; +pub const ALICE_GENESIS_BALANCE: Value = 1_000_000_000_000_000; + pub fn genesis_utxo() -> (TransactionOutput, H256) { let keystore = KeyStore::new(); let alice_pub_key = create_pub_key(&keystore, ALICE_PHRASE); - let output = TransactionOutput::::new_pubkey(100, H256::from(alice_pub_key)); + let output = + TransactionOutput::::new_pubkey(ALICE_GENESIS_BALANCE, H256::from(alice_pub_key)); let hash = BlakeTwo256::hash_of(&output); (output, hash) } @@ -177,7 +181,10 @@ pub fn new_test_ext() -> TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_utxo::GenesisConfig:: { - genesis_utxos: vec![TransactionOutput::new_pubkey(100, H256::from(alice_pub_key))], + genesis_utxos: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE, + H256::from(alice_pub_key), + )], _marker: Default::default(), } .assimilate_storage(&mut t) @@ -199,7 +206,10 @@ pub fn new_test_ext_and_keys() -> (TestExternalities, Public, Public) { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_utxo::GenesisConfig:: { - genesis_utxos: vec![TransactionOutput::new_pubkey(100, H256::from(alice_pub_key))], + genesis_utxos: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE, + H256::from(alice_pub_key), + )], _marker: Default::default(), } .assimilate_storage(&mut t) diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 3b4a44d..fc75123 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -278,7 +278,7 @@ fn attack_by_overspending() { let tx = Transaction { inputs: vec![input0], outputs: vec![ - TransactionOutput::new_pubkey(100, H256::from(alice_pub_key)), + TransactionOutput::new_pubkey(ALICE_GENESIS_BALANCE, H256::from(alice_pub_key)), // Creates 2 new utxo out of thin air TransactionOutput::new_pubkey(2, H256::from(alice_pub_key)), ], @@ -347,7 +347,7 @@ fn test_reward() { let reward = RewardTotal::::get(); assert_eq!(utxos.value, 90); - assert_eq!(reward, 10); + assert_eq!(reward, ALICE_GENESIS_BALANCE - 90); }) } @@ -466,7 +466,7 @@ fn test_send_to_address() { assert_err!( Utxo::send_to_address( Origin::signed(H256::from(alice_pub_key)), - 10_000_000, + ALICE_GENESIS_BALANCE * 10, addr.as_bytes().to_vec(), ), "Caller doesn't have enough UTXOs", @@ -591,19 +591,23 @@ fn test_token_issuance() { assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); // Checking a new UTXO assert!(UtxoStore::::contains_key(new_utxo_hash)); - assert_eq!( - 1_000_000_000, - UtxoStore::::get(new_utxo_hash) - .unwrap() - .data - .map(|x| match x { - OutputData::TokenIssuanceV1 { - amount_to_issue, .. - } => amount_to_issue, - _ => 0, - }) - .unwrap_or(0) - ); + + match UtxoStore::::get(new_utxo_hash).expect("The new output not found").data { + Some(OutputData::TokenIssuanceV1 { + token_id, + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_uri, + }) => { + assert_eq!(TokenId::new_asset(first_input_hash), token_id); + assert_eq!(1_000_000_000, amount_to_issue); + assert_eq!("BensT".as_bytes().to_vec(), token_ticker); + assert_eq!(2, number_of_decimals); + assert_eq!("facebook.com".as_bytes().to_vec(), metadata_uri); + } + _ => panic!("Transaction data is corrupted"), + } }); } @@ -777,53 +781,46 @@ fn test_token_double_creation() { }); } -#[test] -fn test_tokens_with_invalid_data() { - macro_rules! test_tx { - ($data: ident, $checking: tt, $err: expr) => { - execute_with_alice(|alice_pub_key| { - let (utxo0, input0) = tx_input_gen_no_signature(); - let output_new = TransactionOutput { - value: 1, - destination: Destination::Pubkey(alice_pub_key), - data: Some($data.clone()), - }; - let tx = Transaction { - inputs: vec![input0], - outputs: vec![output_new], - time_lock: Default::default(), - } - .sign_unchecked(&[utxo0], 0, &alice_pub_key); - let new_utxo_hash = tx.outpoint(0); - let (_, init_utxo) = genesis_utxo(); - // Spend +// This macro using for the fast creation and sending a tx +macro_rules! test_tx { + ($data: ident, $checking: tt, $err: expr) => { + execute_with_alice(|alice_pub_key| { + let (utxo0, input0) = tx_input_gen_no_signature(); + let output_new = TransactionOutput { + value: 1, + destination: Destination::Pubkey(alice_pub_key), + data: Some($data.clone()), + }; + let tx = Transaction { + inputs: vec![input0], + outputs: vec![output_new], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0], 0, &alice_pub_key); + let new_utxo_hash = tx.outpoint(0); + let (_, init_utxo) = genesis_utxo(); + // Send + assert!(UtxoStore::::contains_key(H256::from(init_utxo))); + // We can check what error we are expecting + if stringify!($checking) == "Err" { + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + $err + ); assert!(UtxoStore::::contains_key(H256::from(init_utxo))); - if stringify!($checking) == "Err" { - frame_support::assert_err_ignore_postinfo!( - Utxo::spend(Origin::signed(H256::zero()), tx), - $err - ); - assert!(UtxoStore::::contains_key(H256::from(init_utxo))); - assert!(!UtxoStore::::contains_key(new_utxo_hash)); - } else if stringify!($checking) == "Ok" { - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); - assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); - assert!(UtxoStore::::contains_key(new_utxo_hash)); - } - }); - }; - } - - // TokenID = MLT - let data = OutputData::TokenIssuanceV1 { - token_id: TokenId::mlt(), - token_ticker: vec![], - amount_to_issue: 0, - number_of_decimals: 0, - metadata_uri: vec![], + assert!(!UtxoStore::::contains_key(new_utxo_hash)); + } else if stringify!($checking) == "Ok" { + // We can check is that success + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); + assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); + assert!(UtxoStore::::contains_key(new_utxo_hash)); + } + }); }; - test_tx!(data, Err, "unable to use mlt as a token id"); +} +#[test] +fn test_tokens_issuance_empty_ticker() { // Ticker empty let data = OutputData::TokenIssuanceV1 { token_id: TokenId::new_asset(H256::random()), @@ -833,7 +830,10 @@ fn test_tokens_with_invalid_data() { metadata_uri: vec![], }; test_tx!(data, Err, "token ticker can't be empty"); +} +#[test] +fn test_tokens_issuance_too_big_ticker() { // Ticker too long let data = OutputData::TokenIssuanceV1 { token_id: TokenId::new_asset(H256::random()), @@ -843,7 +843,10 @@ fn test_tokens_with_invalid_data() { metadata_uri: vec![], }; test_tx!(data, Err, "token ticker is too long"); +} +#[test] +fn test_tokens_issuance_amount_zero() { // Amount to issue is zero let data = OutputData::TokenIssuanceV1 { token_id: TokenId::new_asset(H256::random()), @@ -853,7 +856,10 @@ fn test_tokens_with_invalid_data() { metadata_uri: vec![], }; test_tx!(data, Err, "output value must be nonzero"); +} +#[test] +fn test_tokens_issuance_too_big_decimals() { // Number of decimals more than 18 numbers let data = OutputData::TokenIssuanceV1 { token_id: TokenId::new_asset(H256::random()), @@ -863,7 +869,10 @@ fn test_tokens_with_invalid_data() { metadata_uri: vec![], }; test_tx!(data, Err, "too long decimals"); +} +#[test] +fn test_tokens_issuance_empty_metadata() { // metadata_uri empty let data = OutputData::TokenIssuanceV1 { token_id: TokenId::new_asset(H256::random()), @@ -873,7 +882,10 @@ fn test_tokens_with_invalid_data() { metadata_uri: vec![], }; test_tx!(data, Ok, ""); +} +#[test] +fn test_tokens_issuance_too_long_metadata() { // metadata_uri too long let data = OutputData::TokenIssuanceV1 { token_id: TokenId::new_asset(H256::random()), @@ -883,7 +895,10 @@ fn test_tokens_with_invalid_data() { metadata_uri: Vec::from([0u8; 10_000]), }; test_tx!(data, Err, "token metadata uri is too long"); +} +#[test] +fn test_tokens_issuance_with_junk_data() { // The data field of the maximum allowed length filled with random garbage let mut rng = rand::thread_rng(); let garbage = build_random_vec(100); @@ -895,6 +910,12 @@ fn test_tokens_with_invalid_data() { metadata_uri: garbage.clone(), }; test_tx!(data, Err, "token ticker has none ascii characters"); +} + +#[test] +fn test_tokens_issuance_with_corrupted_uri() { + let mut rng = rand::thread_rng(); + let garbage = build_random_vec(100); // garbage uri let data = OutputData::TokenIssuanceV1 { token_id: TokenId::new_asset(H256::random()), @@ -907,7 +928,7 @@ fn test_tokens_with_invalid_data() { } #[test] -fn test_tokens_transferring() { +fn test_tokens_transfer() { let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { let token_id = TokenId::new_asset(H256::random()); @@ -1047,21 +1068,10 @@ fn test_nft_transferring() { time_lock: Default::default(), } .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - let new_utxo_hash = tx.outpoint(0); - let new_utxo = tx.outputs[0].clone(); let token_utxo_hash = tx.outpoint(1); let token_utxo = tx.outputs[1].clone(); - // then send rest of the tokens to karl (proving that the first tx was successful) - let _tx = Transaction { - inputs: vec![TransactionInput::new_empty(new_utxo_hash)], - outputs: vec![TransactionOutput::new_pubkey(90, H256::from(karl_pub_key))], - time_lock: Default::default(), - } - .sign_unchecked(&[new_utxo.clone()], 0, &alice_pub_key); - // Let's fail on wrong token id let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], @@ -1080,7 +1090,6 @@ fn test_nft_transferring() { Utxo::spend(Origin::signed(H256::zero()), tx), "input for the token not found" ); - // Let's fail on exceed token amount let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index db202c9..da4d6a1 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -33,12 +33,6 @@ pub struct TokenId { } impl TokenId { - pub fn mlt() -> TokenId { - TokenId { - inner: TokenIdInner::MLT, - } - } - pub fn new_asset(first_input_hash: H256) -> TokenId { TokenId { // We are loosing the first bytes of H256 over here and using 20 the last bytes diff --git a/pallets/utxo/src/verifier.rs b/pallets/utxo/src/verifier.rs deleted file mode 100644 index d3fa3b4..0000000 --- a/pallets/utxo/src/verifier.rs +++ /dev/null @@ -1,615 +0,0 @@ -// Copyright (c) 2021 RBB S.r.l -// opensource@mintlayer.org -// SPDX-License-Identifier: MIT -// Licensed under the MIT License; -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://spdx.org/licenses/MIT -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// Author(s): A.Sinitsyn - -#[macro_export] -// The Substrate has a big macros ecosystem. That could be easily broken if T:Config will using in -// other mod instead of lib.rs. Due to we have not enough time for quality decomposition lib.rs to -// I decide to move this part of the code in the macro. -// -// At the moment, this piece of code is rough. After the test-net, we will return to https://github.com/mintlayer/core/issues/81 -// and decide how it make it better. The main problem is that there are a lot of cycles. We should split into -// stages and use all of these checks as an array of functions that we will call on a couple of main cycles. -// But, at the moment it works and it is suitable for the test-net. -macro_rules! implement_transaction_verifier { - () => { - use crate::sign::TransactionSigMsg; - use chainscript::sighash::SigHash; - - // The main object, where stored temporary data about a tx - pub struct TransactionVerifier<'a, T: Config> { - // Pointer to the transaction - tx: &'a TransactionFor, - // Vec of inputs for each Token ID - all_inputs_map: BTreeMap)>>, - // Vec of outputs for each Token ID - all_outputs_map: BTreeMap>>, - // The total summary value of the tokens in inputs for each TokenID - total_value_of_input_tokens: BTreeMap, - // The total summary value of the tokens in outputs for each TokenID - total_value_of_output_tokens: BTreeMap, - // Vec of outputs that should be written - new_utxos: Vec>, - // For more information have a look at checking_utxos_exists - spent_utxo: Result< - Vec::AccountId>>, - Vec>, - >, - // The total reward for this tx - reward: u64, - } - - impl TransactionVerifier<'_, T> { - pub fn new(tx: &TransactionFor) -> Result, &'static str> { - // Verify absolute time lock - ensure!( - tx.check_time_lock::(), - "Time lock restrictions not satisfied" - ); - // Init - let all_inputs_map = Self::init_inputs(&tx)?; - let all_outputs_map = Self::init_outputs(&tx)?; - let total_value_of_input_tokens = - Self::init_total_value_of_input_tokens(&all_inputs_map)?; - let total_value_of_output_tokens = - Self::init_total_value_of_output_tokens(&all_outputs_map)?; - Ok(TransactionVerifier { - tx, - all_inputs_map, - all_outputs_map, - total_value_of_input_tokens, - total_value_of_output_tokens, - new_utxos: Vec::new(), - spent_utxo: Ok(Vec::new()), - reward: 0, - }) - } - - // Turn Vector into BTreeMap - fn init_inputs( - tx: &TransactionFor, - ) -> Result< - BTreeMap)>>, - &'static str, - > { - let mut input_map: BTreeMap< - TokenId, - Vec<(TransactionInput, TransactionOutputFor)>, - > = BTreeMap::new(); - - for input in &tx.inputs { - let token_id = - TransactionVerifier::<'_, T>::get_token_id_from_input(input.outpoint)?; - let output = - TransactionVerifier::<'_, T>::get_output_by_outpoint(input.outpoint) - .ok_or("missing inputs")?; - - if let Some(inputs) = input_map.get_mut(&token_id) { - inputs.push((input.clone(), output)); - } else { - input_map.insert(token_id, vec![(input.clone(), output)]); - } - } - Ok(input_map) - } - // Turn Vector into BTreeMap - fn init_outputs( - tx: &TransactionFor, - ) -> Result>>, &'static str> { - let mut output_map: BTreeMap>> = - BTreeMap::new(); - - for output in &tx.outputs { - let token_id = TransactionVerifier::<'_, T>::get_token_id_from_output(&output); - if let Some(outputs) = output_map.get_mut(&token_id) { - outputs.push(output.clone()); - } else { - output_map.insert(token_id, vec![output.clone()]); - } - } - Ok(output_map) - } - - fn init_total_value_of_input_tokens( - all_inputs_map: &BTreeMap< - TokenId, - Vec<(TransactionInput, TransactionOutputFor)>, - >, - ) -> Result, &'static str> { - let mut total_value_of_input_tokens: BTreeMap = BTreeMap::new(); - let mut mlt_amount: Value = 0; - for (_, input_vec) in all_inputs_map.iter() { - for (_, input_utxo) in input_vec { - match &input_utxo.data { - Some(OutputData::TokenIssuanceV1 { - ref token_id, - token_ticker, - amount_to_issue, - number_of_decimals, - metadata_uri, - }) => { - // We have to check is this token already issued? - ensure!( - PointerToIssueToken::::contains_key(token_id), - "token has never been issued" - ); - ensure!( - token_id != &TokenId::mlt(), - "unable to use mlt as a token id" - ); - ensure!( - token_ticker.is_ascii(), - "token ticker has none ascii characters" - ); - ensure!( - metadata_uri.is_ascii(), - "metadata uri has none ascii characters" - ); - ensure!(token_ticker.len() <= 5, "token ticker is too long"); - ensure!(!token_ticker.is_empty(), "token ticker can't be empty"); - ensure!( - metadata_uri.len() <= 100, - "token metadata uri is too long" - ); - ensure!(amount_to_issue > &0u128, "output value must be nonzero"); - ensure!(number_of_decimals <= &18, "too long decimals"); - // If token has just created we can't meet another amount here. - total_value_of_input_tokens - .insert(token_id.clone(), *amount_to_issue); - // But probably in this input we have a fee - mlt_amount = mlt_amount - .checked_add(input_utxo.value) - .ok_or("input value overflow")?; - } - Some(OutputData::TokenTransferV1 { - ref token_id, - amount, - .. - }) => { - total_value_of_input_tokens.insert( - token_id.clone(), - total_value_of_input_tokens - .get(token_id) - .unwrap_or(&0) - .checked_add(*amount) - .ok_or("input value overflow")?, - ); - // But probably in this input we have a fee - mlt_amount = mlt_amount - .checked_add(input_utxo.value) - .ok_or("input value overflow")?; - } - Some(OutputData::TokenBurnV1 { .. }) => { - // Nothing to do here because tokens no longer exist. - } - Some(OutputData::NftMintV1 { - ref token_id, - data_hash, - metadata_uri, - }) => { - // We have to check is this token already issued? - ensure!( - PointerToIssueToken::::contains_key(token_id), - "unable to use an input where NFT has not minted yet" - ); - - // Check is this digital data unique? - ensure!( - NftUniqueDataHash::::contains_key(data_hash), - "unable to use an input where NFT digital data was changed" - ); - - ensure!( - token_id != &TokenId::mlt(), - "unable to use mlt as a token id" - ); - ensure!( - metadata_uri.is_ascii(), - "metadata uri has none ascii characters" - ); - // If NFT has just created we can't meet another NFT part here. - total_value_of_input_tokens.insert(token_id.clone(), 1); - } - None => { - mlt_amount = mlt_amount - .checked_add(input_utxo.value) - .ok_or("input value overflow")?; - } - } - } - } - total_value_of_input_tokens.insert(TokenId::mlt(), mlt_amount); - Ok(total_value_of_input_tokens) - } - - fn init_total_value_of_output_tokens( - all_outputs_map: &BTreeMap>>, - ) -> Result, &'static str> { - let mut total_value_of_output_tokens: BTreeMap = BTreeMap::new(); - let mut mlt_amount: Value = 0; - for (_, outputs_vec) in all_outputs_map.iter() { - for utxo in outputs_vec { - // for x in all_outputs_map { - match &utxo.data { - Some(OutputData::TokenIssuanceV1 { - ref token_id, - token_ticker, - amount_to_issue, - number_of_decimals, - metadata_uri, - }) => { - // We have to check is this token already issued? - ensure!( - !PointerToIssueToken::::contains_key(token_id), - "token has already been issued" - ); - ensure!( - token_id != &TokenId::mlt(), - "unable to use mlt as a token id" - ); - ensure!( - token_ticker.is_ascii(), - "token ticker has none ascii characters" - ); - ensure!( - metadata_uri.is_ascii(), - "metadata uri has none ascii characters" - ); - ensure!(token_ticker.len() <= 5, "token ticker is too long"); - ensure!(!token_ticker.is_empty(), "token ticker can't be empty"); - ensure!( - metadata_uri.len() <= 100, - "token metadata uri is too long" - ); - ensure!(amount_to_issue > &0u128, "output value must be nonzero"); - ensure!(number_of_decimals <= &18, "too long decimals"); - - // If token has just created we can't meet another amount here. - total_value_of_output_tokens - .insert(token_id.clone(), *amount_to_issue); - // But probably in this input we have a fee - mlt_amount = mlt_amount - .checked_add(utxo.value) - .ok_or("input value overflow")?; - } - Some(OutputData::TokenTransferV1 { - ref token_id, - amount, - .. - }) => { - total_value_of_output_tokens.insert( - token_id.clone(), - total_value_of_output_tokens - .get(token_id) - .unwrap_or(&0) - .checked_add(*amount) - .ok_or("output value overflow")?, - ); - // But probably in this input we have a fee - mlt_amount = mlt_amount - .checked_add(utxo.value) - .ok_or("input value overflow")?; - } - Some(OutputData::TokenBurnV1 { .. }) => { - // Nothing to do here because tokens no longer exist. - } - Some(OutputData::NftMintV1 { - ref token_id, - data_hash, - metadata_uri, - }) => { - // We have to check is this token already issued? - ensure!( - !PointerToIssueToken::::contains_key(token_id), - "token has already been issued" - ); - - // Check is this digital data unique? - ensure!( - !>::contains_key(data_hash), - "digital data has already been minted" - ); - - ensure!( - token_id != &TokenId::mlt(), - "unable to use mlt as a token id" - ); - ensure!( - metadata_uri.is_ascii(), - "metadata uri has none ascii characters" - ); - // If NFT has just created we can't meet another NFT part here. - total_value_of_output_tokens.insert(token_id.clone(), 1); - } - None => { - mlt_amount = mlt_amount - .checked_add(utxo.value) - .ok_or("output value overflow")?; - } - } - } - } - total_value_of_output_tokens.insert(TokenId::mlt(), mlt_amount); - Ok(total_value_of_output_tokens) - } - - fn get_token_id_from_input(outpoint: H256) -> Result { - if let Some(input_utxo) = >::get(outpoint) { - match input_utxo.data { - Some(data) => data.id().ok_or("Token had burned or input incorrect"), - None => Ok(TokenId::mlt()), - } - } else { - Ok(TokenId::mlt()) - } - } - - fn get_token_id_from_output(output: &TransactionOutputFor) -> TokenId { - match output.data { - Some(OutputData::TokenTransferV1 { ref token_id, .. }) - | Some(OutputData::TokenIssuanceV1 { ref token_id, .. }) - | Some(OutputData::NftMintV1 { ref token_id, .. }) => token_id.clone(), - Some(OutputData::TokenBurnV1 { .. }) => unreachable!(), - _ => TokenId::mlt(), - } - } - - fn get_output_by_outpoint(outpoint: H256) -> Option> { - >::get(outpoint) - } - - pub fn checking_inputs(&mut self) -> Result<(), &'static str> { - //ensure rather than assert to avoid panic - //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries - ensure!(!self.tx.inputs.is_empty(), "no inputs"); - ensure!( - self.tx.inputs.len() < (u32::MAX as usize), - "too many inputs" - ); - - //ensure each input is used only a single time - //maps each input into btree - //if map.len() > num of inputs then fail - //https://doc.rust-lang.org/std/collections/struct.BTreeMap.html - //WARNING workshop code has a bug here - //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs - //input_map.len() > transaction.inputs.len() //THIS IS WRONG - let input_map: BTreeMap<_, ()> = - self.tx.inputs.iter().map(|input| (input.outpoint, ())).collect(); - //we want map size and input size to be equal to ensure each is used only once - ensure!( - input_map.len() == self.tx.inputs.len(), - "each input should be used only once" - ); - Ok(()) - } - - pub fn checking_outputs(&mut self) -> Result<(), &'static str> { - //ensure rather than assert to avoid panic - //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries - ensure!(!self.tx.outputs.is_empty(), "no outputs"); - ensure!( - self.tx.outputs.len() < (u32::MAX as usize), - "too many outputs" - ); - - //ensure each output is unique - //map each output to btree to count unique elements - //WARNING example code has a bug here - //out_map.len() != transaction.outputs.len() //THIS IS WRONG - let out_map: BTreeMap<_, ()> = - self.tx.outputs.iter().map(|output| (output, ())).collect(); - - //check each output is defined only once - ensure!( - out_map.len() == self.tx.outputs.len(), - "each output should be used once" - ); - Ok(()) - } - - pub fn checking_signatures(&self) -> Result<(), &'static str> { - for (index, (_, inputs_vec)) in self.all_inputs_map.iter().enumerate() { - for (sub_index, (input, input_utxo)) in inputs_vec.iter().enumerate() { - let spending_utxos: Vec> = self - .all_inputs_map - .iter() - .map(|(_, inputs_vec)| { - inputs_vec - .iter() - .map(|item| item.1.clone()) - .collect::>>() - }) - .flatten() - .collect(); - match &input_utxo.destination { - Destination::Pubkey(pubkey) => { - let msg = TransactionSigMsg::construct( - SigHash::default(), - &self.tx, - &spending_utxos[..], - (index + sub_index) as u64, - u32::MAX, - ); - let ok = crate::sign::Public::Schnorr(*pubkey) - .parse_sig(&input.witness[..]) - .ok_or("bad signature format")? - .verify(&msg); - ensure!(ok, "signature must be valid"); - } - Destination::CreatePP(_, _) => { - log::info!("TODO validate spending of OP_CREATE"); - } - Destination::CallPP(_, _, _) => { - let spend = u16::from_le_bytes( - input.witness[1..].try_into().or_else(|_| { - Err(DispatchError::Other( - "Failed to convert witness to an opcode", - )) - })?, - ); - ensure!(spend == 0x1337, "OP_SPEND not found"); - } - Destination::ScriptHash(_hash) => { - let witness = input.witness.clone(); - let lock = input.lock.clone(); - crate::script::verify( - &self.tx, - // todo: Check with Lukas is it correct or no - &spending_utxos[..], - (index + sub_index) as u64, - witness, - lock, - ) - .map_err(|_| "script verification failed")?; - } - } - } - } - - Ok(()) - } - - pub fn checking_amounts(&self) -> Result<(), &'static str> { - let mut num_creations = 0; - for (token_id, output_value) in self.total_value_of_output_tokens.iter() { - match self.total_value_of_input_tokens.get(token_id) { - Some(input_value) => ensure!( - input_value >= &output_value, - "output value must not exceed input value" - ), - None => { - match self.all_outputs_map.get(token_id) { - Some(outputs_vec) => { - // We have not any input for this token, perhaps it's token creation - ensure!( - outputs_vec.len() == 1, - "attempting double creation token failed" - ); - match outputs_vec[0].data { - None - | Some(OutputData::TokenTransferV1 { .. }) - | Some(OutputData::TokenBurnV1 { .. }) => { - frame_support::fail!("input for the token not found") - } - Some(OutputData::NftMintV1 { .. }) - | Some(OutputData::TokenIssuanceV1 { .. }) => { - num_creations += 1; - continue; - } - } - } - None => unreachable!(), - } - } - } - } - // Check that enough fee - let mlt = self - .total_value_of_input_tokens - .get(&TokenId::mlt()) - .ok_or("not found MLT fees")?; - if cfg!(test) { - // For tests we will use a small amount of MLT - ensure!(mlt >= &(num_creations * 10), "insufficient fee"); - } else { - // If we are not in tests, we should use 100 MLT for each token creation - ensure!( - mlt >= &(num_creations * crate::tokens::Mlt(100).to_munit()), - "insufficient fee" - ) - } - Ok(()) - } - - pub fn checking_utxos_exists(&mut self) -> Result<(), &'static str> { - // Resolve the transaction inputs by looking up UTXOs being spent by them. - // - // This will contain one of the following: - // * Ok(utxos): a vector of UTXOs each input spends. - // * Err(missing): a vector of outputs missing from the store - - self.spent_utxo = { - let mut missing = Vec::new(); - let mut resolved: Vec> = Vec::new(); - - for input in &self.tx.inputs { - if let Some(input_utxo) = >::get(&input.outpoint) { - let lock_commitment = input_utxo.destination.lock_commitment(); - ensure!( - input.lock_hash() == *lock_commitment, - "Lock hash does not match" - ); - resolved.push(input_utxo); - } else { - missing.push(input.outpoint.clone().as_fixed_bytes().to_vec()); - } - } - missing.is_empty().then(|| resolved).ok_or(missing) - }; - - // Check that outputs are valid - for (output_index, (token_id, outputs_vec)) in - self.all_outputs_map.iter().enumerate() - { - for (sub_index, output) in outputs_vec.iter().enumerate() { - let hash = self.tx.outpoint((output_index + sub_index) as u64); - ensure!(!>::contains_key(hash), "output already exists"); - if token_id == &TokenId::mlt() { - ensure!(output.value > 0, "output value must be nonzero"); - } - self.new_utxos.push(hash.as_fixed_bytes().to_vec()); - } - } - Ok(()) - } - - pub fn calculating_reward(&mut self) -> Result<(), &'static str> { - use sp_std::convert::TryFrom; - // Reward at the moment only in MLT - self.reward = if self.total_value_of_input_tokens.contains_key(&TokenId::mlt()) - && self.total_value_of_output_tokens.contains_key(&(TokenId::mlt())) - { - u64::try_from( - self.total_value_of_input_tokens[&TokenId::mlt()] - .checked_sub(self.total_value_of_output_tokens[&TokenId::mlt()]) - .ok_or("reward underflow")?, - ) - .map_err(|_e| "too big amount of fee")? - } else { - u64::try_from( - *self - .total_value_of_input_tokens - .get(&TokenId::mlt()) - .ok_or("fee doesn't exist")?, - ) - .map_err(|_e| "too big amount of fee")? - }; - Ok(()) - } - - pub fn collect_result(&self) -> Result { - Ok(ValidTransaction { - priority: self.reward, - requires: self.spent_utxo.clone().map_or_else(|x| x, |_| Vec::new()), - provides: self.new_utxos.clone(), - longevity: TransactionLongevity::MAX, - propagate: true, - }) - } - } - }; -} From 6f858ab4a8c1acbd60e902c60cf7a70926d1504d Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 2 Nov 2021 15:49:55 +0300 Subject: [PATCH 26/53] On build server installed rust 1.55.0-nightly where `edition=2021` is an unstable feature. Fixed. Signed-off-by: sinitcin --- libs/base58_nostd/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/base58_nostd/Cargo.toml b/libs/base58_nostd/Cargo.toml index 719ef12..0205671 100644 --- a/libs/base58_nostd/Cargo.toml +++ b/libs/base58_nostd/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "base58_nostd" version = "0.1.0" -edition = "2021" +edition = "2018" authors = ["Anton Sinitsyn "] description = "Encodes and decodes the Bech32 format" From a4d12747f6896b72342e7ff4a0029311bb40f136 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 2 Nov 2021 16:26:01 +0300 Subject: [PATCH 27/53] Changed b58 to b58check for the string representation of TokenID Signed-off-by: sinitcin --- pallets/utxo/src/tokens.rs | 43 +++++++++++++------------------------- 1 file changed, 15 insertions(+), 28 deletions(-) diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index da4d6a1..195f5ce 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -19,58 +19,45 @@ impl Mlt { } } -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] -enum TokenIdInner { - MLT, - Asset(H160), -} - #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] pub struct TokenId { - inner: TokenIdInner, + inner: H160, } impl TokenId { pub fn new_asset(first_input_hash: H256) -> TokenId { TokenId { // We are loosing the first bytes of H256 over here and using 20 the last bytes - inner: TokenIdInner::Asset(H160::from(first_input_hash)), + inner: H160::from(first_input_hash), } } pub fn to_string(&self) -> Vec { - match self.inner { - TokenIdInner::MLT => sp_std::vec![], - TokenIdInner::Asset(hash) => hash.as_bytes().to_base58().to_vec(), - } - } - - fn hash160_from_bytes(bytes: &[u8]) -> Result { - ensure!( - bytes.len() == LENGTH_BYTES_TO_REPRESENT_ID, - "Unexpected length of the asset ID" - ); - let mut buffer = [0u8; 20]; - buffer.copy_from_slice(bytes); - Ok(H160::from(buffer)) + self.inner.as_bytes().to_mls_b58check().to_vec() } pub fn from_string(data: &str) -> Result { - let data = data.from_base58().map_err(|x| match x { + let data = data.from_mls_b58check().map_err(|x| match x { FromBase58Error::InvalidBase58Character { .. } => "Invalid Base58 character", FromBase58Error::InvalidBase58Length => "Invalid Base58 length", FromBase58Error::InvalidChecksum => "Invalid checksum", FromBase58Error::InvalidPrefix => "Invalid token id", })?; - - let hash = TokenId::hash160_from_bytes(data.as_slice())?; - Ok(TokenId { - inner: TokenIdInner::Asset(hash), + inner: TokenId::hash160_from_bytes(data.as_slice())?, }) } + + fn hash160_from_bytes(bytes: &[u8]) -> Result { + ensure!( + bytes.len() == LENGTH_BYTES_TO_REPRESENT_ID, + "Unexpected length of the asset ID" + ); + let mut buffer = [0u8; 20]; + buffer.copy_from_slice(bytes); + Ok(H160::from(buffer)) + } } #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] From 3f6982ef67960dc8a985cc77985dd2bb1613bab8 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 2 Nov 2021 16:54:09 +0300 Subject: [PATCH 28/53] Changed in comments `Spend` to `Submit` in tests.rs Signed-off-by: sinitcin --- pallets/utxo/src/tests.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index fc75123..e6626f7 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -680,7 +680,7 @@ fn test_nft_unique() { .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); let new_utxo_hash = tx.outpoint(1); let (_, init_utxo) = genesis_utxo(); - // Spend + // Submit assert!(UtxoStore::::contains_key(H256::from(init_utxo))); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); @@ -704,7 +704,7 @@ fn test_nft_unique() { time_lock: Default::default(), } .sign_unchecked(&[new_utxo], 0, &alice_pub_key); - // Spend + // Submit assert!(UtxoStore::::contains_key(H256::from(new_utxo_hash))); frame_support::assert_err_ignore_postinfo!( Utxo::spend(Origin::signed(H256::zero()), tx), From 44477238e608927d418336a767811dcb3e184d40 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 2 Nov 2021 18:16:30 +0300 Subject: [PATCH 29/53] Added additional checks for the token_id in issuance. Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 8 ++++++++ pallets/utxo/src/tests.rs | 14 ++------------ 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index c3f0a04..325f460 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -584,6 +584,10 @@ pub mod pallet { ensure!(amount_to_issue > &0u128, "output value must be nonzero"); ensure!(number_of_decimals <= &18, "too long decimals"); // If token has just created we can't meet another amount here. + ensure!( + !total_value_of_input_tokens.contains_key(token_id), + "this id can't be used for a token" + ); total_value_of_input_tokens.insert(token_id.clone(), *amount_to_issue); // But probably in this input we have a fee mlt_amount_in_inputs = mlt_amount_in_inputs @@ -672,6 +676,10 @@ pub mod pallet { ensure!(number_of_decimals <= &18, "too long decimals"); // If token has just created we can't meet another amount here. + ensure!( + !total_value_of_output_tokens.contains_key(token_id), + "this id can't be used for a new token" + ); total_value_of_output_tokens.insert(token_id.clone(), *amount_to_issue); // But probably in this input we have a fee mlt_amount_in_outputs = mlt_amount_in_outputs diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index e6626f7..b48c6d4 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -928,11 +928,11 @@ fn test_tokens_issuance_with_corrupted_uri() { } #[test] -fn test_tokens_transfer() { +fn test_token_transfer() { let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { let token_id = TokenId::new_asset(H256::random()); - // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself + // Alice issue 1_000_000_000 MLS-01, and send them to Karl let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], @@ -956,19 +956,9 @@ fn test_tokens_transfer() { .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - let new_utxo_hash = tx.outpoint(0); - let new_utxo = tx.outputs[0].clone(); let token_utxo_hash = tx.outpoint(1); let token_utxo = tx.outputs[1].clone(); - // then send rest of the tokens to karl (proving that the first tx was successful) - let _tx = Transaction { - inputs: vec![TransactionInput::new_empty(new_utxo_hash)], - outputs: vec![TransactionOutput::new_pubkey(90, H256::from(karl_pub_key))], - time_lock: Default::default(), - } - .sign_unchecked(&[new_utxo.clone()], 0, &alice_pub_key); - // Let's fail on wrong token id let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], From 9ae9766bcf4668241b650ebeca5e9fb48821476e Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 2 Nov 2021 18:28:45 +0300 Subject: [PATCH 30/53] Changed type from u128 to Value in the OutputData enum Signed-off-by: sinitcin --- pallets/utxo/src/tokens.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index 195f5ce..f97fe17 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -65,13 +65,13 @@ impl TokenId { pub enum OutputData { // TokenTransfer data to another user. If it is a token, then the token data must also be transferred to the recipient. #[codec(index = 1)] - TokenTransferV1 { token_id: TokenId, amount: u128 }, + TokenTransferV1 { token_id: TokenId, amount: Value }, // A new token creation #[codec(index = 2)] TokenIssuanceV1 { token_id: TokenId, token_ticker: Vec, - amount_to_issue: u128, + amount_to_issue: Value, // Should be not more than 18 numbers number_of_decimals: u8, metadata_uri: Vec, @@ -80,7 +80,7 @@ pub enum OutputData { #[codec(index = 3)] TokenBurnV1 { token_id: TokenId, - amount_to_burn: u128, + amount_to_burn: Value, }, // A new NFT creation #[codec(index = 4)] From 8b541493db811f7c1d4f12785f49c3e36e1658f7 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 2 Nov 2021 19:17:02 +0300 Subject: [PATCH 31/53] Added cases into test_token_transfer Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 10 ++-- pallets/utxo/src/tests.rs | 97 +++++++++++++++++++++++++++++++++++++-- 2 files changed, 98 insertions(+), 9 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 325f460..036fee2 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -805,10 +805,12 @@ pub mod pallet { let mut num_creations = 0; for output_token in &total_value_of_output_tokens { match total_value_of_input_tokens.get(&output_token.0) { - Some(input_value) => ensure!( - input_value >= &output_token.1, - "output value must not exceed input value" - ), + Some(input_value) => { + ensure!( + input_value == output_token.1, + "output value must not exceed input value" + ) + } None => { match &tx.outputs.iter().find(|x| match x.data { Some(ref output_data) => { diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index b48c6d4..90346ca 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -954,7 +954,6 @@ fn test_token_transfer() { time_lock: Default::default(), } .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); let token_utxo_hash = tx.outpoint(1); let token_utxo = tx.outputs[1].clone(); @@ -1016,7 +1015,7 @@ fn test_token_transfer() { "output value must not exceed input value" ); - // should be success + // Let's fail because there is occurred a tokens burn let tx = Transaction { inputs: vec![TransactionInput::new_empty(token_utxo_hash)], outputs: vec![TransactionOutput::new_p2pk_with_data( @@ -1024,13 +1023,101 @@ fn test_token_transfer() { H256::from(alice_pub_key), OutputData::TokenTransferV1 { token_id: token_id.clone(), - amount: 1_000_000_000, + amount: 300_000_000, }, )], time_lock: Default::default(), } - .sign_unchecked(&[token_utxo], 0, &karl_pub_key); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "output value must not exceed input value" + ); + + // Let's send 300_000_000 and rest back + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 300_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 700_000_000, + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let alice_tokens_utxo_hash = tx.outpoint(0); + let karl_tokens_utxo_hash = tx.outpoint(1); + let karl_tokens_utxo = tx.outputs[1].clone(); + assert!(!UtxoStore::::contains_key(H256::from( + token_utxo_hash + ))); + assert!(UtxoStore::::contains_key(alice_tokens_utxo_hash)); + assert!(UtxoStore::::contains_key(karl_tokens_utxo_hash)); + + // should be success + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(karl_tokens_utxo_hash)], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 400_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 300_000_000, + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[karl_tokens_utxo], 0, &karl_pub_key); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + assert_eq!( + 300_000_000, + UtxoStore::::get(alice_tokens_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { amount, .. } => amount, + _ => 0, + }) + .unwrap_or(0) + ); + + let new_alice_tokens_utxo_hash = tx.outpoint(0); + assert!(UtxoStore::::contains_key(new_alice_tokens_utxo_hash)); + assert_eq!( + 400_000_000, + UtxoStore::::get(new_alice_tokens_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { amount, .. } => amount, + _ => 0, + }) + .unwrap_or(0) + ); }); } From 9d66a29d56956df3fda51deaee78300f31fddc51 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Tue, 2 Nov 2021 21:22:20 +0300 Subject: [PATCH 32/53] Added additional checks for the data_hash of NFT Signed-off-by: sinitcin --- pallets/utxo/src/tests.rs | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 90346ca..9d4e18e 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -1128,6 +1128,7 @@ fn test_nft_transferring() { let token_id = TokenId::new_asset(H256::random()); // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself let (utxo0, input0) = tx_input_gen_no_signature(); + let data_hash = NftDataHash::Raw(build_random_vec(32)); let tx = Transaction { inputs: vec![input0], outputs: vec![ @@ -1137,7 +1138,7 @@ fn test_nft_transferring() { H256::from(karl_pub_key), OutputData::NftMintV1 { token_id: token_id.clone(), - data_hash: NftDataHash::Hash32([7; 32]), + data_hash: data_hash.clone(), metadata_uri: "facebook.com".as_bytes().to_vec(), }, ), @@ -1219,7 +1220,23 @@ fn test_nft_transferring() { time_lock: Default::default(), } .sign_unchecked(&[token_utxo], 0, &karl_pub_key); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let nft_utxo_hash = tx.outpoint(0); + assert!(!UtxoStore::::contains_key(H256::from( + token_utxo_hash + ))); + assert!(UtxoStore::::contains_key(nft_utxo_hash)); + assert_eq!( + data_hash, + crate::get_output_by_token_id::(token_id.clone()) + .unwrap() + .data + .map(|x| match x { + OutputData::NftMintV1 { data_hash, .. } => data_hash, + _ => NftDataHash::Raw(Vec::new()), + }) + .unwrap_or(NftDataHash::Raw(Vec::new())) + ); }); } From ca84123c196ff4ba5e0a71f934ea3c89283fc362 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Thu, 4 Nov 2021 18:26:50 +0300 Subject: [PATCH 33/53] Removed - test "Creation a token with a pre-existing ID or re-creation of an already created token". The problem is now token_id created according to tx inputs. And we can't reproduce it. Signed-off-by: sinitcin --- pallets/utxo/src/tests.rs | 578 ++++++++++++++++++-------------------- 1 file changed, 269 insertions(+), 309 deletions(-) diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 9d4e18e..b47d3ee 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -545,7 +545,8 @@ fn test_send_to_address() { } // Testing token creation: -use crate::tokens::{NftDataHash, TokenId}; +// use crate::tokens::{NftDataHash, TokenId}; +use crate::tokens::TokenId; use rand::Rng; fn build_random_vec(len: usize) -> Vec { @@ -562,13 +563,11 @@ fn build_random_vec(len: usize) -> Vec { fn test_token_issuance() { execute_with_alice(|alice_pub_key| { let (utxo0, input0) = tx_input_gen_no_signature(); - let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); - let output_new = TransactionOutput { value: 0, destination: Destination::Pubkey(alice_pub_key), data: Some(OutputData::TokenIssuanceV1 { - token_id: TokenId::new_asset(first_input_hash), + //token_id: TokenId::new_asset(first_input_hash), token_ticker: "BensT".as_bytes().to_vec(), amount_to_issue: 1_000_000_000, number_of_decimals: 2, @@ -594,13 +593,13 @@ fn test_token_issuance() { match UtxoStore::::get(new_utxo_hash).expect("The new output not found").data { Some(OutputData::TokenIssuanceV1 { - token_id, + //token_id, token_ticker, amount_to_issue, number_of_decimals, metadata_uri, }) => { - assert_eq!(TokenId::new_asset(first_input_hash), token_id); + //assert_eq!(TokenId::new_asset(first_input_hash), token_id); assert_eq!(1_000_000_000, amount_to_issue); assert_eq!("BensT".as_bytes().to_vec(), token_ticker); assert_eq!(2, number_of_decimals); @@ -611,175 +610,108 @@ fn test_token_issuance() { }); } -#[test] -// Simple creation of NFT -fn test_nft_mint() { - execute_with_alice(|alice_pub_key| { - let (utxo0, input0) = tx_input_gen_no_signature(); - let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); - let data_hash = NftDataHash::Raw(vec![1, 2, 3, 4, 5]); - let output = TransactionOutput { - value: 0, - destination: Destination::Pubkey(alice_pub_key), - data: Some(OutputData::NftMintV1 { - token_id: TokenId::new_asset(first_input_hash), - data_hash: data_hash.clone(), - metadata_uri: "facebook.com".as_bytes().to_vec(), - }), - }; - let tx = Transaction { - inputs: vec![input0], - outputs: vec![output], - time_lock: Default::default(), - } - .sign_unchecked(&[utxo0], 0, &alice_pub_key); - let new_utxo_hash = tx.outpoint(0); - let (_, init_utxo) = genesis_utxo(); - assert!(UtxoStore::::contains_key(H256::from(init_utxo))); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); - assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); - assert!(UtxoStore::::contains_key(new_utxo_hash)); - assert_eq!( - data_hash, - UtxoStore::::get(new_utxo_hash) - .unwrap() - .data - .map(|x| match x { - OutputData::NftMintV1 { data_hash, .. } => data_hash, - _ => NftDataHash::Raw(Vec::new()), - }) - .unwrap_or(NftDataHash::Raw(Vec::new())) - ); - }) -} - -#[test] -// NFT might be only unique, we can't create a few nft for one item -fn test_nft_unique() { - execute_with_alice(|alice_pub_key| { - let (utxo0, input0) = tx_input_gen_no_signature(); - let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); - - let mut nft_data = OutputData::NftMintV1 { - token_id: TokenId::new_asset(first_input_hash), - data_hash: NftDataHash::Hash32([255; 32]), - metadata_uri: "facebook.com".as_bytes().to_vec(), - }; - let tx = Transaction { - inputs: vec![input0.clone()], - outputs: vec![ - TransactionOutput { - value: 0, - destination: Destination::Pubkey(alice_pub_key), - data: Some(nft_data.clone()), - }, - TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), - ], - time_lock: Default::default(), - } - .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); - let new_utxo_hash = tx.outpoint(1); - let (_, init_utxo) = genesis_utxo(); - // Submit - assert!(UtxoStore::::contains_key(H256::from(init_utxo))); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); - // Checking a new UTXO - assert!(UtxoStore::::contains_key(new_utxo_hash)); - let new_utxo = tx.outputs[1].clone(); - - if let OutputData::NftMintV1 { - ref mut token_id, .. - } = nft_data - { - *token_id = TokenId::new_asset(H256::random()); - } - let tx = Transaction { - inputs: vec![TransactionInput::new_empty(new_utxo_hash.clone())], - outputs: vec![TransactionOutput { - value: 0, - destination: Destination::Pubkey(alice_pub_key), - data: Some(nft_data.clone()), - }], - time_lock: Default::default(), - } - .sign_unchecked(&[new_utxo], 0, &alice_pub_key); - // Submit - assert!(UtxoStore::::contains_key(H256::from(new_utxo_hash))); - frame_support::assert_err_ignore_postinfo!( - Utxo::spend(Origin::signed(H256::zero()), tx), - "digital data has already been minted" - ); - }); -} - -#[test] -// Creation a token with a pre-existing ID or re-creation of an already created token. -fn test_token_double_creation() { - execute_with_alice(|alice_pub_key| { - let (utxo0, input0) = tx_input_gen_no_signature(); - let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); - - let issuance_data = OutputData::TokenIssuanceV1 { - token_id: TokenId::new_asset(first_input_hash), - token_ticker: "BensT".as_bytes().to_vec(), - amount_to_issue: 1_000_000_000, - number_of_decimals: 2, - metadata_uri: "facebook.com".as_bytes().to_vec(), - }; - - let tx = Transaction { - inputs: vec![input0.clone()], - outputs: vec![TransactionOutput { - value: 0, - destination: Destination::Pubkey(alice_pub_key), - data: Some(issuance_data.clone()), - }], - time_lock: Default::default(), - } - .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); - let new_utxo_hash = tx.outpoint(0); - let (_, init_utxo) = genesis_utxo(); - // Spend - assert!(UtxoStore::::contains_key(H256::from(init_utxo))); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); - // Checking a new UTXO - assert!(UtxoStore::::contains_key(new_utxo_hash)); - assert_eq!( - 1_000_000_000, - UtxoStore::::get(new_utxo_hash) - .unwrap() - .data - .map(|x| match x { - OutputData::TokenIssuanceV1 { - amount_to_issue, .. - } => amount_to_issue, - _ => 0, - }) - .unwrap_or(0) - ); - let new_utxo_hash = tx.outpoint(0); - let new_utxo = tx.outputs[0].clone(); - - let tx = Transaction { - inputs: vec![TransactionInput::new_empty(new_utxo_hash.clone())], - outputs: vec![TransactionOutput { - value: 0, - destination: Destination::Pubkey(alice_pub_key), - data: Some(issuance_data.clone()), - }], - time_lock: Default::default(), - } - .sign_unchecked(&[new_utxo], 0, &alice_pub_key); - // Spend - assert!(UtxoStore::::contains_key(H256::from(new_utxo_hash))); - frame_support::assert_err_ignore_postinfo!( - Utxo::spend(Origin::signed(H256::zero()), tx), - "token has already been issued" - ); - }); -} +// todo: This part isn't fully tested, left for the next PR +// #[test] +// // Simple creation of NFT +// fn test_nft_mint() { +// execute_with_alice(|alice_pub_key| { +// let (utxo0, input0) = tx_input_gen_no_signature(); +// let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); +// let data_hash = NftDataHash::Raw(vec![1, 2, 3, 4, 5]); +// let output = TransactionOutput { +// value: 0, +// destination: Destination::Pubkey(alice_pub_key), +// data: Some(OutputData::NftMintV1 { +// token_id: TokenId::new_asset(first_input_hash), +// data_hash: data_hash.clone(), +// metadata_uri: "facebook.com".as_bytes().to_vec(), +// }), +// }; +// let tx = Transaction { +// inputs: vec![input0], +// outputs: vec![output], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[utxo0], 0, &alice_pub_key); +// let new_utxo_hash = tx.outpoint(0); +// let (_, init_utxo) = genesis_utxo(); +// assert!(UtxoStore::::contains_key(H256::from(init_utxo))); +// assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); +// assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); +// assert!(UtxoStore::::contains_key(new_utxo_hash)); +// assert_eq!( +// data_hash, +// UtxoStore::::get(new_utxo_hash) +// .unwrap() +// .data +// .map(|x| match x { +// OutputData::NftMintV1 { data_hash, .. } => data_hash, +// _ => NftDataHash::Raw(Vec::new()), +// }) +// .unwrap_or(NftDataHash::Raw(Vec::new())) +// ); +// }) +// } +// +// #[test] +// // NFT might be only unique, we can't create a few nft for one item +// fn test_nft_unique() { +// execute_with_alice(|alice_pub_key| { +// let (utxo0, input0) = tx_input_gen_no_signature(); +// let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); +// +// let mut nft_data = OutputData::NftMintV1 { +// token_id: TokenId::new_asset(first_input_hash), +// data_hash: NftDataHash::Hash32([255; 32]), +// metadata_uri: "facebook.com".as_bytes().to_vec(), +// }; +// let tx = Transaction { +// inputs: vec![input0.clone()], +// outputs: vec![ +// TransactionOutput { +// value: 0, +// destination: Destination::Pubkey(alice_pub_key), +// data: Some(nft_data.clone()), +// }, +// TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), +// ], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); +// let new_utxo_hash = tx.outpoint(1); +// let (_, init_utxo) = genesis_utxo(); +// // Submit +// assert!(UtxoStore::::contains_key(H256::from(init_utxo))); +// assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); +// assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); +// // Checking a new UTXO +// assert!(UtxoStore::::contains_key(new_utxo_hash)); +// let new_utxo = tx.outputs[1].clone(); +// +// if let OutputData::NftMintV1 { +// ref mut token_id, .. +// } = nft_data +// { +// *token_id = TokenId::new_asset(H256::random()); +// } +// let tx = Transaction { +// inputs: vec![TransactionInput::new_empty(new_utxo_hash.clone())], +// outputs: vec![TransactionOutput { +// value: 0, +// destination: Destination::Pubkey(alice_pub_key), +// data: Some(nft_data.clone()), +// }], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[new_utxo], 0, &alice_pub_key); +// // Submit +// assert!(UtxoStore::::contains_key(H256::from(new_utxo_hash))); +// frame_support::assert_err_ignore_postinfo!( +// Utxo::spend(Origin::signed(H256::zero()), tx), +// "digital data has already been minted" +// ); +// }); +// } // This macro using for the fast creation and sending a tx macro_rules! test_tx { @@ -823,7 +755,6 @@ macro_rules! test_tx { fn test_tokens_issuance_empty_ticker() { // Ticker empty let data = OutputData::TokenIssuanceV1 { - token_id: TokenId::new_asset(H256::random()), token_ticker: vec![], amount_to_issue: 0, number_of_decimals: 0, @@ -836,7 +767,6 @@ fn test_tokens_issuance_empty_ticker() { fn test_tokens_issuance_too_big_ticker() { // Ticker too long let data = OutputData::TokenIssuanceV1 { - token_id: TokenId::new_asset(H256::random()), token_ticker: Vec::from([0u8; 10_000]), amount_to_issue: 0, number_of_decimals: 0, @@ -849,7 +779,6 @@ fn test_tokens_issuance_too_big_ticker() { fn test_tokens_issuance_amount_zero() { // Amount to issue is zero let data = OutputData::TokenIssuanceV1 { - token_id: TokenId::new_asset(H256::random()), token_ticker: b"BensT".to_vec(), amount_to_issue: 0, number_of_decimals: 0, @@ -862,7 +791,6 @@ fn test_tokens_issuance_amount_zero() { fn test_tokens_issuance_too_big_decimals() { // Number of decimals more than 18 numbers let data = OutputData::TokenIssuanceV1 { - token_id: TokenId::new_asset(H256::random()), token_ticker: b"BensT".to_vec(), amount_to_issue: 1_000_000_000, number_of_decimals: 19, @@ -875,7 +803,6 @@ fn test_tokens_issuance_too_big_decimals() { fn test_tokens_issuance_empty_metadata() { // metadata_uri empty let data = OutputData::TokenIssuanceV1 { - token_id: TokenId::new_asset(H256::random()), token_ticker: b"BensT".to_vec(), amount_to_issue: 1_000_000_000, number_of_decimals: 18, @@ -888,7 +815,6 @@ fn test_tokens_issuance_empty_metadata() { fn test_tokens_issuance_too_long_metadata() { // metadata_uri too long let data = OutputData::TokenIssuanceV1 { - token_id: TokenId::new_asset(H256::random()), token_ticker: b"BensT".to_vec(), amount_to_issue: 1_000_000_000, number_of_decimals: 18, @@ -903,7 +829,6 @@ fn test_tokens_issuance_with_junk_data() { let mut rng = rand::thread_rng(); let garbage = build_random_vec(100); let data = OutputData::TokenIssuanceV1 { - token_id: TokenId::new_asset(H256::random()), token_ticker: vec![0, 255, 254, 2, 1], amount_to_issue: rng.gen::() as u128, number_of_decimals: 18, @@ -918,7 +843,6 @@ fn test_tokens_issuance_with_corrupted_uri() { let garbage = build_random_vec(100); // garbage uri let data = OutputData::TokenIssuanceV1 { - token_id: TokenId::new_asset(H256::random()), token_ticker: b"BensT".to_vec(), amount_to_issue: rng.gen::() as u128, number_of_decimals: 18, @@ -927,13 +851,51 @@ fn test_tokens_issuance_with_corrupted_uri() { test_tx!(data, Err, "metadata uri has none ascii characters"); } +#[test] +fn test_two_token_creation_in_one_tx() { + execute_with_alice(|alice_pub_key| { + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: b"Enric".to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "facebook.com".as_bytes().to_vec(), + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: b"Ben".to_vec(), + amount_to_issue: 2_000_000_000, + number_of_decimals: 3, + metadata_uri: "facebook.com".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0], 0, &alice_pub_key); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "this id can't be used for a new token" + ); + }); +} + #[test] fn test_token_transfer() { let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { - let token_id = TokenId::new_asset(H256::random()); // Alice issue 1_000_000_000 MLS-01, and send them to Karl let (utxo0, input0) = tx_input_gen_no_signature(); + let token_id = TokenId::new(&input0); let tx = Transaction { inputs: vec![input0], outputs: vec![ @@ -942,7 +904,6 @@ fn test_token_transfer() { 10, H256::from(karl_pub_key), OutputData::TokenIssuanceV1 { - token_id: token_id.clone(), token_ticker: "BensT".as_bytes().to_vec(), amount_to_issue: 1_000_000_000, // Should be not more than 18 numbers @@ -959,13 +920,14 @@ fn test_token_transfer() { let token_utxo = tx.outputs[1].clone(); // Let's fail on wrong token id + let input = TransactionInput::new_empty(token_utxo_hash); let tx = Transaction { - inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + inputs: vec![input.clone()], outputs: vec![TransactionOutput::new_p2pk_with_data( 0, H256::from(alice_pub_key), OutputData::TokenTransferV1 { - token_id: TokenId::new_asset(H256::random()), + token_id: TokenId::new(&input), amount: 1_00_000_000, }, )], @@ -1121,131 +1083,131 @@ fn test_token_transfer() { }); } -#[test] -fn test_nft_transferring() { - let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); - test_ext.execute_with(|| { - let token_id = TokenId::new_asset(H256::random()); - // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself - let (utxo0, input0) = tx_input_gen_no_signature(); - let data_hash = NftDataHash::Raw(build_random_vec(32)); - let tx = Transaction { - inputs: vec![input0], - outputs: vec![ - TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), - TransactionOutput::new_p2pk_with_data( - 10, - H256::from(karl_pub_key), - OutputData::NftMintV1 { - token_id: token_id.clone(), - data_hash: data_hash.clone(), - metadata_uri: "facebook.com".as_bytes().to_vec(), - }, - ), - ], - time_lock: Default::default(), - } - .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - let token_utxo_hash = tx.outpoint(1); - let token_utxo = tx.outputs[1].clone(); - - // Let's fail on wrong token id - let tx = Transaction { - inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_p2pk_with_data( - 0, - H256::from(alice_pub_key), - OutputData::TokenTransferV1 { - token_id: TokenId::new_asset(H256::random()), - amount: 1_00_000_000, - }, - )], - time_lock: Default::default(), - } - .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); - frame_support::assert_err_ignore_postinfo!( - Utxo::spend(Origin::signed(H256::zero()), tx), - "input for the token not found" - ); - // Let's fail on exceed token amount - let tx = Transaction { - inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_p2pk_with_data( - 0, - H256::from(alice_pub_key), - OutputData::TokenTransferV1 { - token_id: token_id.clone(), - amount: 1_000_000_001, - }, - )], - time_lock: Default::default(), - } - .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); - frame_support::assert_err_ignore_postinfo!( - Utxo::spend(Origin::signed(H256::zero()), tx), - "output value must not exceed input value" - ); - - // Let's send a big amount of MLT with the correct tokens - let tx = Transaction { - inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_p2pk_with_data( - 1_000_000_000, - H256::from(alice_pub_key), - OutputData::TokenTransferV1 { - token_id: token_id.clone(), - amount: 1_000_000_000, - }, - )], - time_lock: Default::default(), - } - .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); - frame_support::assert_err_ignore_postinfo!( - Utxo::spend(Origin::signed(H256::zero()), tx), - "output value must not exceed input value" - ); - - // should be success - let tx = Transaction { - inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_p2pk_with_data( - 0, - H256::from(alice_pub_key), - OutputData::TokenTransferV1 { - token_id: token_id.clone(), - amount: 1, - }, - )], - time_lock: Default::default(), - } - .sign_unchecked(&[token_utxo], 0, &karl_pub_key); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - let nft_utxo_hash = tx.outpoint(0); - assert!(!UtxoStore::::contains_key(H256::from( - token_utxo_hash - ))); - assert!(UtxoStore::::contains_key(nft_utxo_hash)); - assert_eq!( - data_hash, - crate::get_output_by_token_id::(token_id.clone()) - .unwrap() - .data - .map(|x| match x { - OutputData::NftMintV1 { data_hash, .. } => data_hash, - _ => NftDataHash::Raw(Vec::new()), - }) - .unwrap_or(NftDataHash::Raw(Vec::new())) - ); - }); -} +// todo: This part isn't fully tested, left for the next PR +// #[test] +// fn test_nft_transferring() { +// let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); +// test_ext.execute_with(|| { +// let token_id = TokenId::new_asset(H256::random()); +// // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself +// let (utxo0, input0) = tx_input_gen_no_signature(); +// let data_hash = NftDataHash::Raw(build_random_vec(32)); +// let tx = Transaction { +// inputs: vec![input0], +// outputs: vec![ +// TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), +// TransactionOutput::new_p2pk_with_data( +// 10, +// H256::from(karl_pub_key), +// OutputData::NftMintV1 { +// token_id: token_id.clone(), +// data_hash: data_hash.clone(), +// metadata_uri: "facebook.com".as_bytes().to_vec(), +// }, +// ), +// ], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); +// assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); +// let token_utxo_hash = tx.outpoint(1); +// let token_utxo = tx.outputs[1].clone(); +// +// // Let's fail on wrong token id +// let tx = Transaction { +// inputs: vec![TransactionInput::new_empty(token_utxo_hash)], +// outputs: vec![TransactionOutput::new_p2pk_with_data( +// 0, +// H256::from(alice_pub_key), +// OutputData::TokenTransferV1 { +// token_id: TokenId::new_asset(H256::random()), +// amount: 1_00_000_000, +// }, +// )], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); +// frame_support::assert_err_ignore_postinfo!( +// Utxo::spend(Origin::signed(H256::zero()), tx), +// "input for the token not found" +// ); +// // Let's fail on exceed token amount +// let tx = Transaction { +// inputs: vec![TransactionInput::new_empty(token_utxo_hash)], +// outputs: vec![TransactionOutput::new_p2pk_with_data( +// 0, +// H256::from(alice_pub_key), +// OutputData::TokenTransferV1 { +// token_id: token_id.clone(), +// amount: 1_000_000_001, +// }, +// )], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); +// frame_support::assert_err_ignore_postinfo!( +// Utxo::spend(Origin::signed(H256::zero()), tx), +// "output value must not exceed input value" +// ); +// +// // Let's send a big amount of MLT with the correct tokens +// let tx = Transaction { +// inputs: vec![TransactionInput::new_empty(token_utxo_hash)], +// outputs: vec![TransactionOutput::new_p2pk_with_data( +// 1_000_000_000, +// H256::from(alice_pub_key), +// OutputData::TokenTransferV1 { +// token_id: token_id.clone(), +// amount: 1_000_000_000, +// }, +// )], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); +// frame_support::assert_err_ignore_postinfo!( +// Utxo::spend(Origin::signed(H256::zero()), tx), +// "output value must not exceed input value" +// ); +// +// // should be success +// let tx = Transaction { +// inputs: vec![TransactionInput::new_empty(token_utxo_hash)], +// outputs: vec![TransactionOutput::new_p2pk_with_data( +// 0, +// H256::from(alice_pub_key), +// OutputData::TokenTransferV1 { +// token_id: token_id.clone(), +// amount: 1, +// }, +// )], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[token_utxo], 0, &karl_pub_key); +// assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); +// let nft_utxo_hash = tx.outpoint(0); +// assert!(!UtxoStore::::contains_key(H256::from( +// token_utxo_hash +// ))); +// assert!(UtxoStore::::contains_key(nft_utxo_hash)); +// assert_eq!( +// data_hash, +// crate::get_output_by_token_id::(token_id.clone()) +// .unwrap() +// .data +// .map(|x| match x { +// OutputData::NftMintV1 { data_hash, .. } => data_hash, +// _ => NftDataHash::Raw(Vec::new()), +// }) +// .unwrap_or(NftDataHash::Raw(Vec::new())) +// ); +// }); +// } #[test] // Test tx where Input with token and without MLT, output has token (without MLT) fn test_token_creation_with_insufficient_fee() { let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { - let token_id = TokenId::new_asset(H256::random()); // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { @@ -1256,7 +1218,6 @@ fn test_token_creation_with_insufficient_fee() { 0, H256::from(karl_pub_key), OutputData::TokenIssuanceV1 { - token_id: token_id.clone(), token_ticker: "BensT".as_bytes().to_vec(), amount_to_issue: 1_000_000_000, number_of_decimals: 2, @@ -1280,7 +1241,6 @@ fn test_token_creation_with_insufficient_fee() { 0, H256::from(karl_pub_key), OutputData::TokenIssuanceV1 { - token_id: TokenId::new_asset(H256::random()), token_ticker: b"Enric".to_vec(), amount_to_issue: 1_000_000_000, // Should be not more than 18 numbers @@ -1293,7 +1253,7 @@ fn test_token_creation_with_insufficient_fee() { .sign_unchecked(&[token_utxo], 0, &karl_pub_key); frame_support::assert_err_ignore_postinfo!( Utxo::spend(Origin::signed(H256::zero()), tx), - "insufficient fee" + "this id can't be used for a new token" ); }); } From 090bace0bdcdadf23ec72b1829206d21f8307df1 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Fri, 5 Nov 2021 15:08:12 +0300 Subject: [PATCH 34/53] Fixed unit tests Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 333 +++++++++++++++++------------- pallets/utxo/src/tests.rs | 232 ++++++++++++++------- pallets/utxo/src/tokens.rs | 67 +++--- test/functional/custom-types.json | 60 +++++- 4 files changed, 440 insertions(+), 252 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 036fee2..2e25f86 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -45,7 +45,9 @@ use utxo_api::UtxoApi; pub mod pallet { pub use crate::script::{BlockTime, RawBlockTime}; use crate::sign::{self, Scheme}; - use crate::tokens::{NftDataHash, OutputData, TokenId, Value}; + // todo: This part isn't fully tested, left for the next PR + // use crate::tokens::{NftDataHash}; + use crate::tokens::{OutputData, TokenId, Value}; use bech32; use chainscript::Script; use codec::{Decode, Encode}; @@ -349,14 +351,22 @@ pub mod pallet { pub(super) type UtxoStore = StorageMap<_, Identity, H256, TransactionOutputFor>; #[pallet::storage] - #[pallet::getter(fn pointer_to_issue_token)] + #[pallet::getter(fn token_issuance_transactions)] pub(super) type TokenIssuanceTransactions = - StorageMap<_, Identity, TokenId, /* UTXO */ H256, OptionQuery>; + StorageMap<_, Identity, TokenId, TransactionFor, OptionQuery>; + // When someone wants to issue a token we should calculate token_id and use it when the owner + // in other transactions will transfer the token. #[pallet::storage] - #[pallet::getter(fn nft_unique_data_hash)] - pub(super) type NftUniqueDataHash = - StorageMap<_, Identity, NftDataHash, /* UTXO */ H256, OptionQuery>; + #[pallet::getter(fn token_id_issuance)] + pub(super) type TokenIssuanceId = + StorageMap<_, Identity, /* outpoint */ H256, TokenId, OptionQuery>; + + // todo: This part isn't fully tested, left for the next PR + // #[pallet::storage] + // #[pallet::getter(fn nft_unique_data_hash)] + // pub(super) type NftUniqueDataHash = + // StorageMap<_, Identity, NftDataHash, /* UTXO */ H256, OptionQuery>; #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] @@ -372,12 +382,13 @@ pub mod pallet { } } - pub(crate) fn get_output_by_token_id( - token_id: TokenId, - ) -> Option> { - let utxo_id = TokenIssuanceTransactions::::get(token_id)?; - UtxoStore::::get(utxo_id) - } + // todo: This part isn't fully tested, left for the next PR + // pub(crate) fn get_output_by_token_id( + // token_id: TokenId, + // ) -> Option> { + // let utxo_id = TokenIssuanceTransactions::::get(token_id)?; + // UtxoStore::::get(utxo_id) + // } // Strips a transaction of its Signature fields by replacing value with ZERO-initialized fixed hash. pub fn get_simple_transaction( @@ -465,8 +476,6 @@ pub mod pallet { pub fn validate_transaction( tx: &TransactionFor, ) -> Result { - //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries - //ensure rather than assert to avoid panic //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries ensure!(!tx.inputs.is_empty(), "no inputs"); @@ -537,13 +546,19 @@ pub mod pallet { let full_inputs: Vec<(TokenId, TransactionOutputFor)> = tx .inputs .iter() - .filter_map(|input| >::get(&input.outpoint)) - .filter_map(|output| match output.data { + .filter_map(|input| Some((input.outpoint, >::get(&input.outpoint)?))) + .filter_map(|(outpoint, output)| match output.data { Some(ref data) => match data { - OutputData::TokenTransferV1 { token_id, .. } - | OutputData::TokenIssuanceV1 { token_id, .. } - | OutputData::NftMintV1 { token_id, .. } => Some((token_id.clone(), output)), - OutputData::TokenBurnV1 { .. } => None, + OutputData::TokenTransferV1 { token_id, .. } => { + Some((token_id.clone(), output)) + } + OutputData::TokenIssuanceV1 { .. } => { + let token_id = >::get(outpoint)?; + Some((token_id, output)) + } + // todo: This part isn't fully tested, left for the next PR + // | OutputData::NftMintV1 { token_id, .. } + // OutputData::TokenBurnV1 { .. } => None, }, None => { // We do not calculate MLT here @@ -559,17 +574,14 @@ pub mod pallet { let output = >::get(&input.outpoint).ok_or("missing inputs")?; match &output.data { Some(OutputData::TokenIssuanceV1 { - ref token_id, token_ticker, amount_to_issue, number_of_decimals, metadata_uri, }) => { // We have to check is this token already issued? - ensure!( - TokenIssuanceTransactions::::contains_key(token_id), - "token has never been issued" - ); + let token_id = TokenIssuanceId::::get(input.outpoint) + .ok_or("token has never been issued")?; ensure!( token_ticker.is_ascii(), "token ticker has none ascii characters" @@ -585,7 +597,7 @@ pub mod pallet { ensure!(number_of_decimals <= &18, "too long decimals"); // If token has just created we can't meet another amount here. ensure!( - !total_value_of_input_tokens.contains_key(token_id), + !total_value_of_input_tokens.contains_key(&token_id), "this id can't be used for a token" ); total_value_of_input_tokens.insert(token_id.clone(), *amount_to_issue); @@ -599,6 +611,10 @@ pub mod pallet { amount, .. }) => { + ensure!( + TokenIssuanceTransactions::::contains_key(token_id), + "token has never been issued" + ); total_value_of_input_tokens.insert( token_id.clone(), total_value_of_input_tokens @@ -612,31 +628,33 @@ pub mod pallet { .checked_add(output.value) .ok_or("input value overflow")?; } - Some(OutputData::TokenBurnV1 { .. }) => { - // Nothing to do here because tokens no longer exist. - } - Some(OutputData::NftMintV1 { - ref token_id, - data_hash, - metadata_uri, - }) => { - // We have to check is this token already issued? - ensure!( - TokenIssuanceTransactions::::contains_key(token_id), - "unable to use an input where NFT has not minted yet" - ); - // Check is this digital data unique? - ensure!( - NftUniqueDataHash::::contains_key(data_hash), - "unable to use an input where NFT digital data was changed" - ); - ensure!( - metadata_uri.is_ascii(), - "metadata uri has none ascii characters" - ); - // If NFT has just created we can't meet another NFT part here. - total_value_of_input_tokens.insert(token_id.clone(), 1); - } + + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::TokenBurnV1 { .. }) => { + // // Nothing to do here because tokens no longer exist. + // } + // Some(OutputData::NftMintV1 { + // ref token_id, + // data_hash, + // metadata_uri, + // }) => { + // // We have to check is this token already issued? + // ensure!( + // TokenIssuanceTransactions::::contains_key(token_id), + // "unable to use an input where NFT has not minted yet" + // ); + // // Check is this digital data unique? + // ensure!( + // NftUniqueDataHash::::contains_key(data_hash), + // "unable to use an input where NFT digital data was changed" + // ); + // ensure!( + // metadata_uri.is_ascii(), + // "metadata uri has none ascii characters" + // ); + // // If NFT has just created we can't meet another NFT part here. + // total_value_of_input_tokens.insert(token_id.clone(), 1); + // } None => { mlt_amount_in_inputs = mlt_amount_in_inputs .checked_add(output.value) @@ -650,15 +668,16 @@ pub mod pallet { for output in &tx.outputs { match &output.data { Some(OutputData::TokenIssuanceV1 { - ref token_id, token_ticker, amount_to_issue, number_of_decimals, metadata_uri, }) => { // We have to check is this token already issued? + let token_id = TokenId::new(&tx.inputs[0]); + ensure!( - !TokenIssuanceTransactions::::contains_key(token_id), + !TokenIssuanceTransactions::::contains_key(&token_id), "token has already been issued" ); ensure!( @@ -677,7 +696,7 @@ pub mod pallet { // If token has just created we can't meet another amount here. ensure!( - !total_value_of_output_tokens.contains_key(token_id), + !total_value_of_output_tokens.contains_key(&token_id), "this id can't be used for a new token" ); total_value_of_output_tokens.insert(token_id.clone(), *amount_to_issue); @@ -691,6 +710,10 @@ pub mod pallet { amount, .. }) => { + ensure!( + TokenIssuanceTransactions::::contains_key(token_id), + "input for the token not found" + ); total_value_of_output_tokens.insert( token_id.clone(), total_value_of_output_tokens @@ -704,32 +727,33 @@ pub mod pallet { .checked_add(output.value) .ok_or("input value overflow")?; } - Some(OutputData::TokenBurnV1 { .. }) => { - // Nothing to do here because tokens no longer exist. - } - Some(OutputData::NftMintV1 { - ref token_id, - data_hash, - metadata_uri, - }) => { - // We have to check is this token already issued? - ensure!( - !TokenIssuanceTransactions::::contains_key(token_id), - "token has already been issued" - ); - - // Check is this digital data unique? - ensure!( - !>::contains_key(data_hash), - "digital data has already been minted" - ); - ensure!( - metadata_uri.is_ascii(), - "metadata uri has none ascii characters" - ); - // If NFT has just created we can't meet another NFT part here. - total_value_of_output_tokens.insert(token_id.clone(), 1); - } + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::TokenBurnV1 { .. }) => { + // // Nothing to do here because tokens no longer exist. + // } + // Some(OutputData::NftMintV1 { + // ref token_id, + // data_hash, + // metadata_uri, + // }) => { + // // We have to check is this token already issued? + // ensure!( + // !TokenIssuanceTransactions::::contains_key(token_id), + // "token has already been issued" + // ); + // + // // Check is this digital data unique? + // ensure!( + // !>::contains_key(data_hash), + // "digital data has already been minted" + // ); + // ensure!( + // metadata_uri.is_ascii(), + // "metadata uri has none ascii characters" + // ); + // // If NFT has just created we can't meet another NFT part here. + // total_value_of_output_tokens.insert(token_id.clone(), 1); + // } None => { mlt_amount_in_outputs = mlt_amount_in_outputs .checked_add(output.value) @@ -741,11 +765,13 @@ pub mod pallet { // Check for token creation for output in tx.outputs.iter() { let tid = match output.data { - Some(OutputData::TokenTransferV1 { ref token_id, .. }) - | Some(OutputData::TokenIssuanceV1 { ref token_id, .. }) => token_id.clone(), - Some(OutputData::NftMintV1 { .. }) - | Some(OutputData::TokenBurnV1 { .. }) - | None => continue, + Some(OutputData::TokenTransferV1 { ref token_id, .. }) => token_id.clone(), + Some(OutputData::TokenIssuanceV1 { .. }) => TokenId::new(&tx.inputs[0]), + None => continue, + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::NftMintV1 { .. }) + // | Some(OutputData::TokenBurnV1 { .. }) + // | None => continue, }; // If we have input and output for the same token it's not a problem if full_inputs.iter().find(|&x| (x.0 == tid) && (x.1 != *output)).is_some() { @@ -771,13 +797,14 @@ pub mod pallet { Some(OutputData::TokenTransferV1 { amount, .. }) => { ensure!(amount > 0, "output value must be nonzero") } - Some(OutputData::TokenBurnV1 { amount_to_burn, .. }) => { - ensure!(amount_to_burn > 0, "output value must be nonzero") - } - Some(OutputData::NftMintV1 { .. }) => { - // Nothing to check - } None => ensure!(output.value > 0, "output value must be nonzero"), + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::TokenBurnV1 { amount_to_burn, .. }) => { + // ensure!(amount_to_burn > 0, "output value must be nonzero") + // } + // Some(OutputData::NftMintV1 { .. }) => { + // // Nothing to check + // } } let hash = tx.outpoint(output_index as u64); ensure!(!>::contains_key(hash), "output already exists"); @@ -802,43 +829,57 @@ pub mod pallet { "output value must not exceed input value" ); - let mut num_creations = 0; - for output_token in &total_value_of_output_tokens { - match total_value_of_input_tokens.get(&output_token.0) { + let mut issuance_counter = 0; + for (token_id, token_value) in &total_value_of_output_tokens { + match total_value_of_input_tokens.get(&token_id) { Some(input_value) => { ensure!( - input_value == output_token.1, + input_value == token_value, "output value must not exceed input value" ) } + // We have an output, but we have not an input None => { - match &tx.outputs.iter().find(|x| match x.data { + // find TransactionOutput for this token_id + let output = &tx.outputs.iter().find(|x| match x.data { Some(ref output_data) => { - output_data.id().as_ref() == Some(output_token.0) + output_data.id(&tx.inputs[0]).as_ref() == Some(token_id) } None => false, - }) { + }); + + match output { Some(output) => match output.data { - None - | Some(OutputData::TokenTransferV1 { .. }) - | Some(OutputData::TokenBurnV1 { .. }) => { - frame_support::fail!("input for the token not found") - } - Some(OutputData::NftMintV1 { .. }) - | Some(OutputData::TokenIssuanceV1 { .. }) => { - num_creations += 1; + // todo: This part isn't fully tested, left for the next PR + // | Some(OutputData::TokenBurnV1 { .. }) => + // Some(OutputData::NftMintV1 { .. }) + Some(OutputData::TokenIssuanceV1 { .. }) => { + // If we make a new token then okay, this is not a problem + issuance_counter += 1; continue; } + None | Some(OutputData::TokenTransferV1 { .. }) => { + // But we can't send a token without input + frame_support::fail!("input for the token not found2") + } }, + // This situation should never happen, but let's cover it None => frame_support::fail!("corrupted output data"), } } } } ensure!( - mlt_amount_in_inputs >= (num_creations * crate::tokens::Mlt(100).to_munit()), - "insufficient fee" + issuance_counter <= 1, + "too many issuance in one transaction" ); + if issuance_counter == 1 { + // The sender should pay not less than 100 MLT for issuance + ensure!( + mlt_amount_in_inputs >= crate::tokens::Mlt(100).to_munit(), + "insufficient fee" + ); + } for (index, (input, input_utxo)) in tx.inputs.iter().zip(input_utxos).enumerate() { match &input_utxo.destination { @@ -922,28 +963,33 @@ pub mod pallet { log::debug!("inserting to UtxoStore {:?} as key {:?}", output, hash); >::insert(hash, output); match &output.data { - Some(OutputData::NftMintV1 { - token_id, - data_hash, - .. - }) => { - // We have to control that digital data of NFT is unique. - // Otherwise, anybody else might make a new NFT with exactly the same hash. - >::insert(data_hash, hash); - // Also, we should provide possibility of find an output that by token_id. - // This output is a place where token was created. It allow us to check that a token or - // a NFT have not created yet. - >::insert(token_id, hash); - } - Some(OutputData::TokenIssuanceV1 { token_id, .. }) => { - // For MLS-01 we save a relation between token_id and the output where + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::NftMintV1 { + // token_id, + // data_hash, + // .. + // }) => { + // // We have to control that digital data of NFT is unique. + // // Otherwise, anybody else might make a new NFT with exactly the same hash. + // >::insert(data_hash, hash); + // // Also, we should provide possibility of find an output that by token_id. + // // This output is a place where token was created. It allow us to check that a token or + // // a NFT have not created yet. + // >::insert(token_id, hash); + // } + Some(OutputData::TokenIssuanceV1 { .. }) => { + let token_id = TokenId::new(&tx.inputs[0]); + // Link output hash + >::insert(hash, &token_id); + // For MLS-01 we save a relation between token_id and the tx where // token was created. - >::insert(token_id, hash); + >::insert(&token_id, &tx); } // For the security reason we are implementing all cases - Some(OutputData::TokenBurnV1 { .. }) - | Some(OutputData::TokenTransferV1 { .. }) - | None => continue, + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::TokenBurnV1 { .. }) + // | + Some(OutputData::TokenTransferV1 { .. }) | None => continue, } } Destination::CreatePP(script, data) => { @@ -1115,22 +1161,23 @@ impl crate::Pallet { 1337 } - pub fn nft_read( - nft_id: &core::primitive::str, - ) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)> { - match crate::pallet::get_output_by_token_id::( - crate::tokens::TokenId::from_string(&nft_id).ok()?, - )? - .data - { - Some(crate::tokens::OutputData::NftMintV1 { - data_hash, - metadata_uri, - .. - }) => Some((metadata_uri, data_hash.encode())), - _ => None, - } - } + // todo: This part isn't fully tested, left for the next PR + // pub fn nft_read( + // nft_id: &core::primitive::str, + // ) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)> { + // match crate::pallet::get_output_by_token_id::( + // crate::tokens::TokenId::from_string(&nft_id).ok()?, + // )? + // .data + // { + // Some(crate::tokens::OutputData::NftMintV1 { + // data_hash, + // metadata_uri, + // .. + // }) => Some((metadata_uri, data_hash.encode())), + // _ => None, + // } + // } } fn coin_picker(outpoints: &Vec) -> Result, DispatchError> { diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index b47d3ee..4be2780 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -889,15 +889,17 @@ fn test_two_token_creation_in_one_tx() { }); } -#[test] -fn test_token_transfer() { +// Let's wrap common acts +fn test_tx_issuance_for_transfer(expecting_err_msg: &'static str, test_func: F) +where + F: Fn(TokenId, Public, Public, H256, TransactionOutput) -> Transaction, +{ let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); test_ext.execute_with(|| { // Alice issue 1_000_000_000 MLS-01, and send them to Karl let (utxo0, input0) = tx_input_gen_no_signature(); - let token_id = TokenId::new(&input0); let tx = Transaction { - inputs: vec![input0], + inputs: vec![input0.clone()], outputs: vec![ TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), TransactionOutput::new_p2pk_with_data( @@ -915,86 +917,163 @@ fn test_token_transfer() { time_lock: Default::default(), } .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + let token_id = TokenId::new(&tx.inputs[0]); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - let token_utxo_hash = tx.outpoint(1); - let token_utxo = tx.outputs[1].clone(); - // Let's fail on wrong token id - let input = TransactionInput::new_empty(token_utxo_hash); - let tx = Transaction { - inputs: vec![input.clone()], - outputs: vec![TransactionOutput::new_p2pk_with_data( - 0, - H256::from(alice_pub_key), - OutputData::TokenTransferV1 { - token_id: TokenId::new(&input), - amount: 1_00_000_000, - }, - )], - time_lock: Default::default(), - } - .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); - frame_support::assert_err_ignore_postinfo!( - Utxo::spend(Origin::signed(H256::zero()), tx), - "input for the token not found" + let token_utxo_hash = tx.outpoint(1); + let token_utxo: TransactionOutput = tx.outputs[1].clone(); + // Call a test func + let tx = test_func( + token_id, + alice_pub_key, + karl_pub_key, + token_utxo_hash, + token_utxo, ); - - // Let's fail on exceed token amount - let tx = Transaction { - inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_p2pk_with_data( - 0, - H256::from(alice_pub_key), - OutputData::TokenTransferV1 { - token_id: token_id.clone(), - amount: 1_000_000_001, - }, - )], - time_lock: Default::default(), - } - .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); frame_support::assert_err_ignore_postinfo!( Utxo::spend(Origin::signed(H256::zero()), tx), - "output value must not exceed input value" + expecting_err_msg ); + }); +} - // Let's send a big amount of MLT with the correct tokens - let tx = Transaction { - inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_p2pk_with_data( - 1_000_000_000, - H256::from(alice_pub_key), - OutputData::TokenTransferV1 { - token_id: token_id.clone(), - amount: 1_000_000_000, - }, - )], - time_lock: Default::default(), - } - .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); - frame_support::assert_err_ignore_postinfo!( - Utxo::spend(Origin::signed(H256::zero()), tx), - "output value must not exceed input value" - ); +#[test] +fn test_token_transfer_with_wrong_token_id() { + let test_fun = Box::new( + move |_token_id, + alice_pub_key, + karl_pub_key, + token_utxo_hash, + token_utxo: TransactionOutput| { + let input = TransactionInput::new_empty(token_utxo_hash); + Transaction { + inputs: vec![input.clone()], + outputs: vec![TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: TokenId::new(&input), + amount: 1_00_000_000, + }, + )], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key) + }, + ); + test_tx_issuance_for_transfer("input for the token not found", test_fun); +} - // Let's fail because there is occurred a tokens burn +#[test] +fn test_token_transfer_exceed_amount_tokens() { + let test_fun = Box::new( + move |token_id, + alice_pub_key, + karl_pub_key, + token_utxo_hash, + token_utxo: TransactionOutput| { + Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id, + amount: 1_000_000_001, + }, + )], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key) + }, + ); + test_tx_issuance_for_transfer("output value must not exceed input value", test_fun); +} + +#[test] +fn test_token_transfer_exceed_amount_mlt() { + let test_fun = Box::new( + move |token_id: TokenId, + alice_pub_key, + karl_pub_key, + token_utxo_hash, + token_utxo: TransactionOutput| { + Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_p2pk_with_data( + 1_000_000_000, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 1_000_000_000, + }, + )], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key) + }, + ); + test_tx_issuance_for_transfer("output value must not exceed input value", test_fun); +} + +#[test] +fn test_token_transfer_send_part_others_burn() { + let test_fun = Box::new( + move |token_id: TokenId, + alice_pub_key, + karl_pub_key, + token_utxo_hash, + token_utxo: TransactionOutput| { + Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![ + // Send only 30%, let's forget about another 70% of tokens + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 300_000_000, + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key) + }, + ); + test_tx_issuance_for_transfer("output value must not exceed input value", test_fun); +} + +#[test] +fn test_token_transfer() { + let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + test_ext.execute_with(|| { + // Alice issue 1_000_000_000 MLS-01, and send them to Karl + let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { - inputs: vec![TransactionInput::new_empty(token_utxo_hash)], - outputs: vec![TransactionOutput::new_p2pk_with_data( - 0, - H256::from(alice_pub_key), - OutputData::TokenTransferV1 { - token_id: token_id.clone(), - amount: 300_000_000, - }, - )], + inputs: vec![input0], + outputs: vec![ + TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), + TransactionOutput::new_p2pk_with_data( + 10, + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "BensT".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 2, + metadata_uri: "facebook.com".as_bytes().to_vec(), + }, + ), + ], time_lock: Default::default(), } - .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); - frame_support::assert_err_ignore_postinfo!( - Utxo::spend(Origin::signed(H256::zero()), tx), - "output value must not exceed input value" - ); + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + let token_id = TokenId::new(&tx.inputs[0]); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let token_utxo_hash = tx.outpoint(1); + let token_utxo = tx.outputs[1].clone(); // Let's send 300_000_000 and rest back let tx = Transaction { @@ -1213,7 +1292,10 @@ fn test_token_creation_with_insufficient_fee() { let tx = Transaction { inputs: vec![input0], outputs: vec![ - TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), + TransactionOutput::new_pubkey( + crate::tokens::Mlt(1).to_munit(), + H256::from(karl_pub_key), + ), TransactionOutput::new_p2pk_with_data( 0, H256::from(karl_pub_key), @@ -1253,7 +1335,7 @@ fn test_token_creation_with_insufficient_fee() { .sign_unchecked(&[token_utxo], 0, &karl_pub_key); frame_support::assert_err_ignore_postinfo!( Utxo::spend(Origin::signed(H256::zero()), tx), - "this id can't be used for a new token" + "insufficient fee" ); }); } diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index f97fe17..60958e1 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -1,12 +1,15 @@ #![cfg_attr(not(feature = "std"), no_std)] +use crate::TransactionInput; use base58_nostd::{FromBase58, FromBase58Error, ToBase58}; use codec::{Decode, Encode}; use frame_support::ensure; use frame_support::{dispatch::Vec, RuntimeDebug}; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_core::{H160, H256}; +use sp_core::Hasher; +use sp_core::H160; +use sp_runtime::traits::BlakeTwo256; const LENGTH_BYTES_TO_REPRESENT_ID: usize = 20; @@ -26,7 +29,9 @@ pub struct TokenId { } impl TokenId { - pub fn new_asset(first_input_hash: H256) -> TokenId { + // Token id depends on signed or unsigned the same input + pub fn new(first_input: &TransactionInput) -> TokenId { + let first_input_hash = BlakeTwo256::hash(first_input.encode().as_slice()); TokenId { // We are loosing the first bytes of H256 over here and using 20 the last bytes inner: H160::from(first_input_hash), @@ -69,45 +74,47 @@ pub enum OutputData { // A new token creation #[codec(index = 2)] TokenIssuanceV1 { - token_id: TokenId, + // token_id: TokenId, token_ticker: Vec, amount_to_issue: Value, // Should be not more than 18 numbers number_of_decimals: u8, metadata_uri: Vec, }, - // Burning a token or NFT - #[codec(index = 3)] - TokenBurnV1 { - token_id: TokenId, - amount_to_burn: Value, - }, - // A new NFT creation - #[codec(index = 4)] - NftMintV1 { - token_id: TokenId, - data_hash: NftDataHash, - metadata_uri: Vec, - }, -} + // todo: This part isn't fully tested, left for the next PR -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] -pub enum NftDataHash { - #[codec(index = 1)] - Hash32([u8; 32]), - #[codec(index = 2)] - Raw(Vec), - // Or any type that you want to implement + // // Burning a token or NFT + // #[codec(index = 3)] + // TokenBurnV1 { + // token_id: TokenId, + // amount_to_burn: Value, + // }, + // // A new NFT creation + // #[codec(index = 4)] + // NftMintV1 { + // token_id: TokenId, + // data_hash: NftDataHash, + // metadata_uri: Vec, + // }, } +// todo: This part isn't fully tested, left for the next PR +// #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +// #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +// pub enum NftDataHash { +// #[codec(index = 1)] +// Hash32([u8; 32]), +// #[codec(index = 2)] +// Raw(Vec), +// // Or any type that you want to implement +// } + impl OutputData { - pub(crate) fn id(&self) -> Option { + pub(crate) fn id(&self, first_input: &TransactionInput) -> Option { match self { - OutputData::TokenTransferV1 { ref token_id, .. } - | OutputData::TokenIssuanceV1 { ref token_id, .. } - | OutputData::NftMintV1 { ref token_id, .. } => Some(token_id.clone()), - _ => None, + OutputData::TokenTransferV1 { ref token_id, .. } => Some(token_id.clone()), + // OutputData::NftMintV1 { .. } | + OutputData::TokenIssuanceV1 { .. } => Some(TokenId::new(first_input)), } } } diff --git a/test/functional/custom-types.json b/test/functional/custom-types.json index 2778941..30cfd96 100644 --- a/test/functional/custom-types.json +++ b/test/functional/custom-types.json @@ -25,6 +25,60 @@ [ "CallPP", "DestinationCallPP" ] ] }, + "NftDataHash": { + "type": "struct", + "type_mapping": [ + [ "Hash32", "[u8; 32]" ], + [ "Raw", "Vec" ] + ] + }, + "TokenId": { + "type": "struct", + "type_mapping": [ + [ "inner", "H160" ] + ] + }, + "TokenTransferV1": { + "type": "struct", + "type_mapping": [ + [ "token_id", "TokenId" ], + [ "amount", "Value" ] + ] + }, + "TokenIssuanceV1": { + "type": "struct", + "type_mapping": [ + [ "token_id", "TokenId" ], + [ "token_ticker", "Vec" ], + [ "amount_to_issue", "Value" ], + [ "number_of_decimals", "u8" ], + [ "metadata_uri", "Vec" ] + ] + }, + "TokenBurnV1": { + "type": "struct", + "type_mapping": [ + [ "token_id", "TokenId" ], + [ "amount_to_burn", "Value" ] + ] + }, + "NftMintV1": { + "type": "struct", + "type_mapping": [ + [ "token_id", "TokenId" ], + [ "data_hash", "NftDataHash" ], + [ "metadata_uri", "Vec" ] + ] + }, + "OutputData": { + "type": "enum", + "type_mapping": [ + [ "TokenTransfer", "TokenTransferV1" ], + [ "TokenIssuance", "TokenIssuanceV1" ], + [ "TokenBurn", "TokenBurnV1" ], + [ "NftMint", "NftMintV1" ] + ] + }, "TransactionInput": { "type": "struct", "type_mapping": [ @@ -37,8 +91,8 @@ "type": "struct", "type_mapping": [ [ "value", "Value" ], - [ "header", "TXOutputHeader"], - [ "destination", "Destination" ] + [ "destination", "Destination" ], + [ "data", "Option"] ] }, "Transaction": { @@ -61,10 +115,8 @@ "Address": "MultiAddress", "LookupSource": "MultiAddress", "Value": "u128", - "TXOutputHeader": "u128", "value": "Value", "pub_key": "H256", - "header": "TXOutputHeader", "Difficulty": "U256", "DifficultyAndTimestamp": { "type": "struct", From cea04cc536ccfaf7fe3607ebb5a473aa7b982697 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Fri, 5 Nov 2021 15:28:32 +0300 Subject: [PATCH 35/53] Fixed conflict after merge Signed-off-by: sinitcin --- Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 4b25f3d..53adeea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4148,8 +4148,8 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "pp-api", - "rand 0.4.6", "proptest", + "rand 0.4.6", "serde", "sp-consensus-aura", "sp-core", From 7af916b77ffb5a23360c9a649b4167c02e067cb7 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Fri, 5 Nov 2021 15:36:49 +0300 Subject: [PATCH 36/53] Fixed conflict after merge Signed-off-by: sinitcin --- pallets/utxo/src/tests.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 5f486c4..fe8d4be 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -16,8 +16,8 @@ // Author(s): C. Yap use crate::{ - mock::*, tokens::Value, BlockTime, Destination, RawBlockTime, RewardTotal, Transaction, TransactionInput, - TransactionOutput, UtxoStore, + mock::*, tokens::Value, BlockTime, Destination, RawBlockTime, RewardTotal, Transaction, + TransactionInput, TransactionOutput, UtxoStore, }; use chainscript::{opcodes::all as opc, Builder}; use codec::Encode; @@ -27,9 +27,9 @@ use frame_support::{ sp_runtime::traits::{BlakeTwo256, Hash}, }; +use crate::script::test::gen_block_time_real; use crate::tokens::OutputData; use proptest::prelude::*; -use crate::script::test::gen_block_time_real; use sp_core::{sp_std::vec, sr25519::Public, testing::SR25519, H256, H512}; fn tx_input_gen_no_signature() -> (TransactionOutput, TransactionInput) { @@ -604,7 +604,6 @@ proptest! { std::cmp::min_by_key(time0, time1, RawBlockTime::as_u64), std::cmp::max_by_key(time0, time1, RawBlockTime::as_u64), ); - let (res0, res1) = execute_with_alice(|alice| { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { From 429062435018870fe6a73d58b284fc4839f1c01f Mon Sep 17 00:00:00 2001 From: sinitcin Date: Fri, 5 Nov 2021 16:07:55 +0300 Subject: [PATCH 37/53] Changed links from facebook.com to mintlayer.org Signed-off-by: sinitcin --- pallets/utxo/src/tests.rs | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index fe8d4be..688c40b 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -709,7 +709,7 @@ fn test_token_issuance() { token_ticker: "BensT".as_bytes().to_vec(), amount_to_issue: 1_000_000_000, number_of_decimals: 2, - metadata_uri: "facebook.com".as_bytes().to_vec(), + metadata_uri: "mintlayer.org".as_bytes().to_vec(), }), }; let tx = Transaction { @@ -741,7 +741,7 @@ fn test_token_issuance() { assert_eq!(1_000_000_000, amount_to_issue); assert_eq!("BensT".as_bytes().to_vec(), token_ticker); assert_eq!(2, number_of_decimals); - assert_eq!("facebook.com".as_bytes().to_vec(), metadata_uri); + assert_eq!("mintlayer.org".as_bytes().to_vec(), metadata_uri); } _ => panic!("Transaction data is corrupted"), } @@ -762,7 +762,7 @@ fn test_token_issuance() { // data: Some(OutputData::NftMintV1 { // token_id: TokenId::new_asset(first_input_hash), // data_hash: data_hash.clone(), -// metadata_uri: "facebook.com".as_bytes().to_vec(), +// metadata_uri: "mintlayer.org".as_bytes().to_vec(), // }), // }; // let tx = Transaction { @@ -801,7 +801,7 @@ fn test_token_issuance() { // let mut nft_data = OutputData::NftMintV1 { // token_id: TokenId::new_asset(first_input_hash), // data_hash: NftDataHash::Hash32([255; 32]), -// metadata_uri: "facebook.com".as_bytes().to_vec(), +// metadata_uri: "mintlayer.org".as_bytes().to_vec(), // }; // let tx = Transaction { // inputs: vec![input0.clone()], @@ -1003,7 +1003,7 @@ fn test_two_token_creation_in_one_tx() { token_ticker: b"Enric".to_vec(), amount_to_issue: 1_000_000_000, number_of_decimals: 2, - metadata_uri: "facebook.com".as_bytes().to_vec(), + metadata_uri: "mintlayer.org".as_bytes().to_vec(), }, ), TransactionOutput::new_p2pk_with_data( @@ -1013,7 +1013,7 @@ fn test_two_token_creation_in_one_tx() { token_ticker: b"Ben".to_vec(), amount_to_issue: 2_000_000_000, number_of_decimals: 3, - metadata_uri: "facebook.com".as_bytes().to_vec(), + metadata_uri: "mintlayer.org".as_bytes().to_vec(), }, ), ], @@ -1048,7 +1048,7 @@ where amount_to_issue: 1_000_000_000, // Should be not more than 18 numbers number_of_decimals: 2, - metadata_uri: "facebook.com".as_bytes().to_vec(), + metadata_uri: "mintlayer.org".as_bytes().to_vec(), }, ), ], @@ -1201,7 +1201,7 @@ fn test_token_transfer() { amount_to_issue: 1_000_000_000, // Should be not more than 18 numbers number_of_decimals: 2, - metadata_uri: "facebook.com".as_bytes().to_vec(), + metadata_uri: "mintlayer.org".as_bytes().to_vec(), }, ), ], @@ -1319,7 +1319,7 @@ fn test_token_transfer() { // OutputData::NftMintV1 { // token_id: token_id.clone(), // data_hash: data_hash.clone(), -// metadata_uri: "facebook.com".as_bytes().to_vec(), +// metadata_uri: "mintlayer.org".as_bytes().to_vec(), // }, // ), // ], @@ -1441,7 +1441,7 @@ fn test_token_creation_with_insufficient_fee() { token_ticker: "BensT".as_bytes().to_vec(), amount_to_issue: 1_000_000_000, number_of_decimals: 2, - metadata_uri: "facebook.com".as_bytes().to_vec(), + metadata_uri: "mintlayer.org".as_bytes().to_vec(), }, ), ], @@ -1465,7 +1465,7 @@ fn test_token_creation_with_insufficient_fee() { amount_to_issue: 1_000_000_000, // Should be not more than 18 numbers number_of_decimals: 2, - metadata_uri: "facebook.com".as_bytes().to_vec(), + metadata_uri: "mintlayer.org".as_bytes().to_vec(), }, )], time_lock: Default::default(), From 26a5251d5878169d45c1de0414673eaa5299c3f1 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Sat, 6 Nov 2021 13:23:17 +0300 Subject: [PATCH 38/53] Fixed existing functional tests due to changes in the TransactionOutput format. Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 4 +--- test/functional/example_test.py | 12 ++++++------ test/functional/feature_alice_bob_test.py | 12 ++++++------ test/functional/feature_smart_contract_test.py | 16 ++++++++-------- test/functional/test_framework/mintlayer/utxo.py | 8 ++++---- 5 files changed, 25 insertions(+), 27 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 2e25f86..c74d336 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -233,10 +233,9 @@ pub mod pallet { } impl TransactionOutput { - /// By default the header is 0: + /// By default the data is None: /// token type for both the value and fee is MLT, /// and the signature method is BLS. - /// functions are available in TXOutputHeaderImpls to update the header. pub fn new_pubkey(value: Value, pubkey: H256) -> Self { let pubkey = sp_core::sr25519::Public::from_h256(pubkey); Self { @@ -420,7 +419,6 @@ pub mod pallet { >::put(remainder as Value); for authority in auths { - // TODO: where do we get the header info? let utxo = TransactionOutput::new_pubkey(share_value, *authority); let hash = { diff --git a/test/functional/example_test.py b/test/functional/example_test.py index 5792c72..3c92f19 100755 --- a/test/functional/example_test.py +++ b/test/functional/example_test.py @@ -113,13 +113,13 @@ def run_test(self): outputs=[ utxo.Output( value=50, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), utxo.Output( value=100, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), ] @@ -135,8 +135,8 @@ def run_test(self): outputs=[ utxo.Output( value=60, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), ] ).sign(alice, [tx1.outputs[1]]) diff --git a/test/functional/feature_alice_bob_test.py b/test/functional/feature_alice_bob_test.py index 00bf8d9..1de62a7 100755 --- a/test/functional/feature_alice_bob_test.py +++ b/test/functional/feature_alice_bob_test.py @@ -73,8 +73,8 @@ def run_test(self): outputs=[ utxo.Output( value=50, - header=0, - destination=utxo.DestPubkey(bob.public_key) + destination=utxo.DestPubkey(bob.public_key), + data=None ), ] ).sign(alice, [utxos[0][1]]) @@ -88,13 +88,13 @@ def run_test(self): outputs=[ utxo.Output( value=30, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), utxo.Output( value=20, - header=0, - destination=utxo.DestPubkey(bob.public_key) + destination=utxo.DestPubkey(bob.public_key), + data=None ), ] ).sign(bob, tx1.outputs) diff --git a/test/functional/feature_smart_contract_test.py b/test/functional/feature_smart_contract_test.py index 3564bc6..8b82847 100755 --- a/test/functional/feature_smart_contract_test.py +++ b/test/functional/feature_smart_contract_test.py @@ -77,16 +77,16 @@ def run_test(self): outputs=[ utxo.Output( value=50, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), utxo.Output( value=10, - header=0, destination=utxo.DestCreatePP( code=os.path.join(os.path.dirname(__file__), "code.wasm"), data=[0xed, 0x4b, 0x9d, 0x1b], # default() constructor selector - ) + ), + data=None ), ] ).sign(alice, [initial_utxo[1]]) @@ -122,17 +122,17 @@ def run_test(self): outputs=[ utxo.Output( value=49, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), utxo.Output( value=1, - header=0, destination=utxo.DestCallPP( dest_account=acc_id, fund=False, input_data=bytes.fromhex(msg_data.to_hex()[2:]), - ) + ), + data=None ), ] ).sign(alice, [tx0.outputs[0]], [0]) diff --git a/test/functional/test_framework/mintlayer/utxo.py b/test/functional/test_framework/mintlayer/utxo.py index a12dd6a..6557320 100644 --- a/test/functional/test_framework/mintlayer/utxo.py +++ b/test/functional/test_framework/mintlayer/utxo.py @@ -136,15 +136,15 @@ def json(self): return { 'CallPP': { 'dest_account': self.acct, 'fund': self.fund, 'input_data': self.data } } class Output(): - def __init__(self, value, header, destination): + def __init__(self, value, destination, data): self.value = value - self.header = header self.destination = destination + self.data = data @staticmethod def load(obj): dest = Destination.load(obj['destination']) - return Output(obj['value'], obj['header'], dest) + return Output(obj['value'], dest, obj['data']) def type_string(self): return 'TransactionOutput' @@ -152,8 +152,8 @@ def type_string(self): def json(self): return { 'value': self.value, - 'header': self.header, 'destination': self.destination.json(), + 'data': self.data, } From 4842067616c50be4b9df292baa912a49f57e07c7 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Sun, 7 Nov 2021 12:55:08 +0300 Subject: [PATCH 39/53] Added more tests for b58 include external data and data generated from our functional tests. Signed-off-by: sinitcin --- .gitignore | 2 + libs/base58_nostd/Cargo.toml | 8 + libs/base58_nostd/src/lib.rs | 383 ++++++++++++++++++++++++++++++++--- pallets/utxo/src/tokens.rs | 7 +- 4 files changed, 368 insertions(+), 32 deletions(-) diff --git a/.gitignore b/.gitignore index ab4b353..678d120 100644 --- a/.gitignore +++ b/.gitignore @@ -29,3 +29,5 @@ test/config.ini # direnv cache .direnv +# Python compiled files +*.pyc diff --git a/libs/base58_nostd/Cargo.toml b/libs/base58_nostd/Cargo.toml index 0205671..07dc268 100644 --- a/libs/base58_nostd/Cargo.toml +++ b/libs/base58_nostd/Cargo.toml @@ -18,3 +18,11 @@ default-features = false git = 'https://github.com/paritytech/substrate.git' version = '4.0.0-dev' branch = "master" + +[features] +default = ['std'] +testcontext = [] +std = [ + "sp-std/std", + "frame-support/std", +] diff --git a/libs/base58_nostd/src/lib.rs b/libs/base58_nostd/src/lib.rs index df6c76a..d889d32 100644 --- a/libs/base58_nostd/src/lib.rs +++ b/libs/base58_nostd/src/lib.rs @@ -29,18 +29,23 @@ use frame_support::sp_io::hashing::sha2_256; use sp_std::vec; use sp_std::vec::Vec; -pub const TOKEN_ID_PREFIX: &'static str = "MLS"; +pub const TOKEN_ID_PREFIX: u8 = b"M"[0]; const BASE58_ALPHABET: &'static [u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; -const B58_DIGITS_MAP: &'static [i8] = &[ +const B58_BITCOIN_DIGITS_MAP: &'static [i8] = &[ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, -1, -1, -1, -1, -1, -1, -1, 9, 10, 11, 12, 13, 14, 15, 16, -1, 17, 18, 19, 20, 21, -1, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, -1, -1, -1, -1, -1, -1, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, -1, -1, -1, -1, -1, + 57, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, ]; /// Errors that can occur when decoding base58 encoded string. @@ -61,7 +66,7 @@ pub trait ToBase58 { /// Converts a value of `self` to a base58 value, returning the owned string. fn to_base58(&self) -> Vec; /// Converts a value of `self` to a base58 value with checksum applied, returning the owned string. - fn to_mls_b58check(&self) -> Vec; + fn to_mls_b58check(&self, prefix: Option>) -> Vec; } /// A trait for converting base58 encoded values. @@ -70,7 +75,7 @@ pub trait FromBase58 { fn from_base58(&self) -> Result, FromBase58Error>; /// Converts a value of `self`, interpreted as base58 encoded data with checksum applied, into an owned vector of bytes, /// returning a vector. - fn from_mls_b58check(&self) -> Result, FromBase58Error>; + fn from_mls_b58check(&self, prefix: Option>) -> Result, FromBase58Error>; } fn checksum(payload: &[u8]) -> Vec { @@ -132,12 +137,12 @@ fn decode_from_base58(payload: &str) -> Result, FromBase58Error> { return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); } - if B58_DIGITS_MAP[b58[i] as usize] == -1 { + if B58_BITCOIN_DIGITS_MAP[b58[i] as usize] == -1 { // // Invalid base58 digit return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); } - let mut c = B58_DIGITS_MAP[b58[i] as usize] as u64; + let mut c = B58_BITCOIN_DIGITS_MAP[b58[i] as usize] as u64; let mut j = out.len(); while j != 0 { j -= 1; @@ -193,22 +198,28 @@ impl FromBase58 for str { decode_from_base58(self) } - fn from_mls_b58check(&self) -> Result, FromBase58Error> { + fn from_mls_b58check(&self, prefix: Option>) -> Result, FromBase58Error> { let mut payload: Vec = self.from_base58()?; - // Let's check is it right prefix or not - if &payload[0..3] != TOKEN_ID_PREFIX.as_bytes() { - Err(FromBase58Error::InvalidPrefix)?; - } if payload.len() < 5 { return Err(FromBase58Error::InvalidChecksum); } let checksum_index = payload.len() - 4; let provided_checksum = payload.split_off(checksum_index); - let checksum = checksum(&payload)[..4].to_vec(); + let checksum = checksum(&payload).to_vec(); if checksum != provided_checksum { return Err(FromBase58Error::InvalidChecksum); } - Ok(payload[TOKEN_ID_PREFIX.len()..].to_vec()) + if let Some(ref prefix) = prefix { + let payload_prefix = payload[..prefix.len()].to_vec(); + // Let's check is it right prefix or not + if &payload_prefix != prefix { + Err(FromBase58Error::InvalidPrefix)?; + } + } + match prefix { + Some(prefix) => Ok(payload[prefix.len()..].to_vec()), + None => Ok(payload), + } } } @@ -217,8 +228,12 @@ impl ToBase58 for [u8] { encode_to_base58(self) } - fn to_mls_b58check(&self) -> Vec { - let mut payload = TOKEN_ID_PREFIX.as_bytes().to_vec(); + fn to_mls_b58check(&self, prefix: Option>) -> Vec { + let mut payload = match prefix { + Some(prefix) => prefix.clone(), + None => vec![], + }; + // let mut payload = vec![prefix]; payload.extend(self); payload.extend(checksum(payload.as_slice())); encode_to_base58(payload.as_slice()) @@ -227,7 +242,7 @@ impl ToBase58 for [u8] { #[cfg(test)] mod tests { - use super::{FromBase58, FromBase58Error, ToBase58}; + use super::{FromBase58, FromBase58Error, ToBase58, TOKEN_ID_PREFIX}; #[test] fn test_from_base58_basic() { @@ -295,26 +310,303 @@ mod tests { assert_eq!(b"\0\0\0\0abc".to_base58(), "1111ZiCa".as_bytes()); } + #[test] + fn test_from_base58_compatible_functional_tests() { + // The data was being prepared in python script + + assert_eq!( + "2QjRKB7mHaXRjhUmgcQGAbDHPre2Uvq9ev4YiiFgLoUPrQdB52MuHoRwmB" + .from_base58() + .unwrap(), + b"To be, or not to be, that is the question:" + ); + + assert_eq!( + "LApxNT84PpjfwjYZyDdhQTNAuEp28SssymbKcj68fEc7wLh2qpkpXAuf" + .from_base58() + .unwrap(), + b"Whether 'tis nobler in the mind to suffer" + ); + + assert_eq!( + "USm3fpdSjgtutT9UNHZgsaR4UBcHmgYfxcaVubFjhj9Tio5Nfq9XNV5puD7H" + .from_base58() + .unwrap(), + b"The slings and arrows of outrageous fortune," + ); + + assert_eq!( + "JRYvHV9zVEFpwLXQLjTs8VhnP1nPiBZUFdHA5into6ntyEPsLwpnR8Vp" + .from_base58() + .unwrap(), + b"Or to take arms against a sea of troubles" + ); + + assert_eq!( + "26LXuFRSRgp2fUf8QhNjeEHjniK599smzB7pJsqf1XpLS9bkgd4d7gM9UX" + .from_base58() + .unwrap(), + b"And by opposing end them. To die-to sleep," + ); + + assert_eq!( + "3dU1LpdBTnUsha3T3cGiEUZPTtzRfLhCA83k22CMvbzKV9oMb87".from_base58().unwrap(), + b"No more; and by a sleep to say we end" + ); + + assert_eq!( + "ADeyMxyacx916HoiijiCJRMqdjtWULxSE2eSz1t11rQbLSvVbhv6cCiwqKFAQav" + .from_base58() + .unwrap(), + b"The heart-ache and the thousand natural shocks" + ); + + assert_eq!( + "2QhwWNuP7oGHaHRjydcvqxLC31wKkZ12MWFBoXpe1wLJ15z6vSRuqUdNYd" + .from_base58() + .unwrap(), + b"That flesh is heir to: 'tis a consummation" + ); + + assert_eq!( + "4Q7Mny7G48TgtAU6u3eqhT7FDqALB7LZ466AThn4G9jv7BBhx9pXbJz".from_base58().unwrap(), + b"Devoutly to be wish'd. To die, to sleep;" + ); + + assert_eq!( + "Efu1HHgBffNXqXSgamBAvVNBN28JgEtp2QBqZsTRvbn44DQFEL2YfVYnFrPAdBcEz25" + .from_base58() + .unwrap(), + b"To sleep, perchance to dream-ay, there's the rub:" + ); + } + + #[test] + fn test_to_base58_compatible_functional_tests() { + // The data was being prepared in python script + + assert_eq!( + b"To be, or not to be, that is the question:".to_base58(), + "2QjRKB7mHaXRjhUmgcQGAbDHPre2Uvq9ev4YiiFgLoUPrQdB52MuHoRwmB".as_bytes() + ); + + assert_eq!( + b"Whether 'tis nobler in the mind to suffer".to_base58(), + "LApxNT84PpjfwjYZyDdhQTNAuEp28SssymbKcj68fEc7wLh2qpkpXAuf".as_bytes() + ); + + assert_eq!( + b"The slings and arrows of outrageous fortune,".to_base58(), + "USm3fpdSjgtutT9UNHZgsaR4UBcHmgYfxcaVubFjhj9Tio5Nfq9XNV5puD7H".as_bytes() + ); + + assert_eq!( + b"Or to take arms against a sea of troubles".to_base58(), + "JRYvHV9zVEFpwLXQLjTs8VhnP1nPiBZUFdHA5into6ntyEPsLwpnR8Vp".as_bytes() + ); + + assert_eq!( + b"And by opposing end them. To die-to sleep,".to_base58(), + "26LXuFRSRgp2fUf8QhNjeEHjniK599smzB7pJsqf1XpLS9bkgd4d7gM9UX".as_bytes() + ); + + assert_eq!( + b"No more; and by a sleep to say we end".to_base58(), + "3dU1LpdBTnUsha3T3cGiEUZPTtzRfLhCA83k22CMvbzKV9oMb87".as_bytes() + ); + + assert_eq!( + b"The heart-ache and the thousand natural shocks".to_base58(), + "ADeyMxyacx916HoiijiCJRMqdjtWULxSE2eSz1t11rQbLSvVbhv6cCiwqKFAQav".as_bytes() + ); + + assert_eq!( + b"That flesh is heir to: 'tis a consummation".to_base58(), + "2QhwWNuP7oGHaHRjydcvqxLC31wKkZ12MWFBoXpe1wLJ15z6vSRuqUdNYd".as_bytes() + ); + + assert_eq!( + b"Devoutly to be wish'd. To die, to sleep;".to_base58(), + "4Q7Mny7G48TgtAU6u3eqhT7FDqALB7LZ466AThn4G9jv7BBhx9pXbJz".as_bytes() + ); + + assert_eq!( + b"To sleep, perchance to dream-ay, there's the rub:".to_base58(), + "Efu1HHgBffNXqXSgamBAvVNBN28JgEtp2QBqZsTRvbn44DQFEL2YfVYnFrPAdBcEz25".as_bytes() + ); + } + #[test] fn to_base58check() { assert_eq!( - b"SOME_TOKEN_ID".to_mls_b58check(), - "25TfmUELb1jGfVSAbKsV4fAVTKAn".as_bytes() + b"SOME_TOKEN_ID".to_mls_b58check(Some(vec![TOKEN_ID_PREFIX])), + "4D27mSFWbKGNea2eGBpjuCbEy".as_bytes() + ); + + // Took from js library: + // https://github.com/wzbg/base58check/blob/master/test.js + + assert_eq!( + [ + 0xf5, 0xf2, 0xd6, 0x24, 0xcf, 0xb5, 0xc3, 0xf6, 0x6d, 0x06, 0x12, 0x3d, 0x08, 0x29, + 0xd1, 0xc9, 0xce, 0xbf, 0x77, 0x0e + ] + .to_mls_b58check(Some(vec![0])), + "1PRTTaJesdNovgne6Ehcdu1fpEdX7913CK".as_bytes() + ); + + assert_eq!( + [ + 0x1E, 0x99, 0x42, 0x3A, 0x4E, 0xD2, 0x76, 0x08, 0xA1, 0x5A, 0x26, 0x16, 0xA2, 0xB0, + 0xE9, 0xE5, 0x2C, 0xED, 0x33, 0x0A, 0xC5, 0x30, 0xED, 0xCC, 0x32, 0xC8, 0xFF, 0xC6, + 0xA5, 0x26, 0xAE, 0xDD, + ] + .to_mls_b58check(Some(vec![0x80])), + "5J3mBbAH58CpQ3Y5RNJpUKPE62SQ5tfcvU2JpbnkeyhfsYB1Jcn".as_bytes() + ); + + assert_eq!( + [ + 0x27, 0xb5, 0x89, 0x1b, 0x01, 0xda, 0x2d, 0xb7, 0x4c, 0xde, 0x16, 0x89, 0xa9, 0x7a, + 0x2a, 0xcb, 0xe2, 0x3d, 0x5f, 0xb1 + ] + .to_mls_b58check(Some(vec![0])), + "14cxpo3MBCYYWCgF74SWTdcmxipnGUsPw3".as_bytes() + ); + + assert_eq!( + [ + 0x3a, 0xba, 0x41, 0x62, 0xc7, 0x25, 0x1c, 0x89, 0x12, 0x07, 0xb7, 0x47, 0x84, 0x05, + 0x51, 0xa7, 0x19, 0x39, 0xb0, 0xde, 0x08, 0x1f, 0x85, 0xc4, 0xe4, 0x4c, 0xf7, 0xc1, + 0x3e, 0x41, 0xda, 0xa6 + ] + .to_mls_b58check(Some(vec![0x80])), + "5JG9hT3beGTJuUAmCQEmNaxAuMacCTfXuw1R3FCXig23RQHMr4K".as_bytes() + ); + + assert_eq!( + [ + 0x08, 0x6e, 0xaa, 0x67, 0x78, 0x95, 0xf9, 0x2d, 0x4a, 0x6c, 0x5e, 0xf7, 0x40, 0xc1, + 0x68, 0x93, 0x2b, 0x5e, 0x3f, 0x44 + ] + .to_mls_b58check(Some(vec![0])), + "1mayif3H2JDC62S4N3rLNtBNRAiUUP99k".as_bytes() + ); + + assert_eq!( + [ + 0xed, 0xdb, 0xdc, 0x11, 0x68, 0xf1, 0xda, 0xea, 0xdb, 0xd3, 0xe4, 0x4c, 0x1e, 0x3f, + 0x8f, 0x5a, 0x28, 0x4c, 0x20, 0x29, 0xf7, 0x8a, 0xd2, 0x6a, 0xf9, 0x85, 0x83, 0xa4, + 0x99, 0xde, 0x5b, 0x19 + ] + .to_mls_b58check(Some(vec![0x80])), + "5Kd3NBUAdUnhyzenEwVLy9pBKxSwXvE9FMPyR4UKZvpe6E3AgLr".as_bytes() ); } #[test] fn from_base58check() { assert_eq!( - "25TfmUELb1jGfVSAbKsV4fAVTKAn".from_mls_b58check().unwrap(), - b"SOME_TOKEN_ID".to_vec() + "3vQB7B6MrGQZaxCuFg4oh".from_mls_b58check(None).unwrap(), + b"hello world".to_vec() + ); + + // Took from js library: + // https://github.com/wzbg/base58check/blob/master/test.js + + assert_eq!( + "1PRTTaJesdNovgne6Ehcdu1fpEdX7913CK".from_mls_b58check(Some(vec![0])).unwrap(), + vec![ + 0xf5, 0xf2, 0xd6, 0x24, 0xcf, 0xb5, 0xc3, 0xf6, 0x6d, 0x06, 0x12, 0x3d, 0x08, 0x29, + 0xd1, 0xc9, 0xce, 0xbf, 0x77, 0x0e + ] + ); + + assert_eq!( + "5J3mBbAH58CpQ3Y5RNJpUKPE62SQ5tfcvU2JpbnkeyhfsYB1Jcn" + .from_mls_b58check(Some(vec![0x80])) + .unwrap(), + vec![ + 0x1E, 0x99, 0x42, 0x3A, 0x4E, 0xD2, 0x76, 0x08, 0xA1, 0x5A, 0x26, 0x16, 0xA2, 0xB0, + 0xE9, 0xE5, 0x2C, 0xED, 0x33, 0x0A, 0xC5, 0x30, 0xED, 0xCC, 0x32, 0xC8, 0xFF, 0xC6, + 0xA5, 0x26, 0xAE, 0xDD, + ] + ); + + assert_eq!( + "14cxpo3MBCYYWCgF74SWTdcmxipnGUsPw3".from_mls_b58check(Some(vec![0])).unwrap(), + vec![ + 0x27, 0xb5, 0x89, 0x1b, 0x01, 0xda, 0x2d, 0xb7, 0x4c, 0xde, 0x16, 0x89, 0xa9, 0x7a, + 0x2a, 0xcb, 0xe2, 0x3d, 0x5f, 0xb1 + ] + ); + + assert_eq!( + "5JG9hT3beGTJuUAmCQEmNaxAuMacCTfXuw1R3FCXig23RQHMr4K" + .from_mls_b58check(Some(vec![0x80])) + .unwrap(), + vec![ + 0x3a, 0xba, 0x41, 0x62, 0xc7, 0x25, 0x1c, 0x89, 0x12, 0x07, 0xb7, 0x47, 0x84, 0x05, + 0x51, 0xa7, 0x19, 0x39, 0xb0, 0xde, 0x08, 0x1f, 0x85, 0xc4, 0xe4, 0x4c, 0xf7, 0xc1, + 0x3e, 0x41, 0xda, 0xa6 + ] + ); + + assert_eq!( + "1mayif3H2JDC62S4N3rLNtBNRAiUUP99k".from_mls_b58check(Some(vec![0])).unwrap(), + vec![ + 0x08, 0x6e, 0xaa, 0x67, 0x78, 0x95, 0xf9, 0x2d, 0x4a, 0x6c, 0x5e, 0xf7, 0x40, 0xc1, + 0x68, 0x93, 0x2b, 0x5e, 0x3f, 0x44 + ] + ); + + assert_eq!( + "5Kd3NBUAdUnhyzenEwVLy9pBKxSwXvE9FMPyR4UKZvpe6E3AgLr" + .from_mls_b58check(Some(vec![0x80])) + .unwrap(), + vec![ + 0xed, 0xdb, 0xdc, 0x11, 0x68, 0xf1, 0xda, 0xea, 0xdb, 0xd3, 0xe4, 0x4c, 0x1e, 0x3f, + 0x8f, 0x5a, 0x28, 0x4c, 0x20, 0x29, 0xf7, 0x8a, 0xd2, 0x6a, 0xf9, 0x85, 0x83, 0xa4, + 0x99, 0xde, 0x5b, 0x19 + ] ); } #[test] fn from_base58check_with_invalid_checksum() { assert_eq!( - "j8YiVRUK8wrJ2wzLH7W6222".from_mls_b58check(), + "j8YiVRUK8wrJ2wzLH7W6221".from_mls_b58check(Some(vec![TOKEN_ID_PREFIX])), + Err(FromBase58Error::InvalidChecksum) + ); + + assert_eq!( + "1PRTTaJesdNovgne6Ehcdu1fpEdX7913C1".from_mls_b58check(Some(vec![0])), + Err(FromBase58Error::InvalidChecksum) + ); + + assert_eq!( + "5J3mBbAH58CpQ3Y5RNJpUKPE62SQ5tfcvU2JpbnkeyhfsYB1Jc9" + .from_mls_b58check(Some(vec![0x80])), + Err(FromBase58Error::InvalidChecksum) + ); + + assert_eq!( + "14cxpo3MBCYYWCgF74SWTdcmxipnGUs153".from_mls_b58check(Some(vec![0])), + Err(FromBase58Error::InvalidChecksum) + ); + assert_eq!( + "5JG9hT3beGTJuUAmCQEmNaxAuMacCTfXuw1R3FCXig23RQH1234" + .from_mls_b58check(Some(vec![0x80])), + Err(FromBase58Error::InvalidChecksum) + ); + + assert_eq!( + "1mayif3H2JDC62S4N3rLNtBNRAiUUchek".from_mls_b58check(Some(vec![0])), + Err(FromBase58Error::InvalidChecksum) + ); + assert_eq!( + "5Kd3NBUAdUnhyzenEwVLy9pBKxSwXvE9FMPyR4UKZvpe6E3kehc" + .from_mls_b58check(Some(vec![0x80])), Err(FromBase58Error::InvalidChecksum) ); } @@ -322,18 +614,51 @@ mod tests { #[test] #[should_panic] fn from_base58check_with_invalid_length() { - "Wh4bh".from_mls_b58check().unwrap(); + "Wh4bh".from_mls_b58check(Some(vec![TOKEN_ID_PREFIX])).unwrap(); } #[test] fn base58check_loop() { - let text = - "To be, or not to be, that is the Question: Whether ’tis Nobler in the mind to suffer."; - let enc = text.as_bytes().to_mls_b58check(); - let enc = sp_std::str::from_utf8(enc.as_slice()).unwrap(); + // Using encoding and decoding for 5 times because during these operations the buffer is growing. + // If we want to have more loops we have to check is it working with more than 128 bytes or not. + let text = "To be, or not to be"; + + let mut buffer = text; + let mut enc; + // encode + for _ in 0..5 { + enc = buffer.as_bytes().to_mls_b58check(Some(vec![TOKEN_ID_PREFIX])); + buffer = sp_std::str::from_utf8(enc.as_slice()).unwrap(); + } // decode back - let dec = enc.from_mls_b58check().unwrap(); - assert_eq!(sp_std::str::from_utf8(dec.as_slice()).unwrap(), text); + let mut dec; + for _ in 0..5 { + dec = buffer.from_mls_b58check(Some(vec![TOKEN_ID_PREFIX])).unwrap(); + buffer = sp_std::str::from_utf8(dec.as_slice()).unwrap(); + } + assert_eq!(buffer, text); + } + + #[test] + fn base58check_bitcoin_test() { + // Took from bitcoin: + // https://github.com/bitcoin/bitcoin/blob/master/src/test/base58_tests.cpp + assert_eq!( + "3vQB7B6MrGQZaxCuFg4oh".from_mls_b58check(None).unwrap(), + b"hello world".to_vec() + ); + assert_eq!( + "3vQB7B6MrGQZaxCuFg4oi".from_mls_b58check(None), + Err(FromBase58Error::InvalidChecksum) + ); + assert_eq!( + "3vQB7B6MrGQZaxCuFg4oh0IOl".from_mls_b58check(None), + Err(FromBase58Error::InvalidBase58Character('0', 21)) + ); + assert_eq!( + "3vQB7B6MrGQZaxCuFg4oh\0".from_mls_b58check(None), + Err(FromBase58Error::InvalidBase58Character('\0', 21)) + ); } } diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index 60958e1..5b6fa0a 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -1,7 +1,7 @@ #![cfg_attr(not(feature = "std"), no_std)] use crate::TransactionInput; -use base58_nostd::{FromBase58, FromBase58Error, ToBase58}; +use base58_nostd::{FromBase58, FromBase58Error, ToBase58, TOKEN_ID_PREFIX}; use codec::{Decode, Encode}; use frame_support::ensure; use frame_support::{dispatch::Vec, RuntimeDebug}; @@ -10,6 +10,7 @@ use serde::{Deserialize, Serialize}; use sp_core::Hasher; use sp_core::H160; use sp_runtime::traits::BlakeTwo256; +use sp_std::vec; const LENGTH_BYTES_TO_REPRESENT_ID: usize = 20; @@ -39,11 +40,11 @@ impl TokenId { } pub fn to_string(&self) -> Vec { - self.inner.as_bytes().to_mls_b58check().to_vec() + self.inner.as_bytes().to_mls_b58check(Some(vec![TOKEN_ID_PREFIX])).to_vec() } pub fn from_string(data: &str) -> Result { - let data = data.from_mls_b58check().map_err(|x| match x { + let data = data.from_mls_b58check(Some(vec![TOKEN_ID_PREFIX])).map_err(|x| match x { FromBase58Error::InvalidBase58Character { .. } => "Invalid Base58 character", FromBase58Error::InvalidBase58Length => "Invalid Base58 length", FromBase58Error::InvalidChecksum => "Invalid checksum", From fd3f582ce7033d6a9d539901e0ac2469d13b2793 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Sun, 7 Nov 2021 13:05:07 +0300 Subject: [PATCH 40/53] Returned lost comment about race conditions Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index c74d336..4852e67 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -515,6 +515,13 @@ pub mod pallet { tx.check_time_lock::(), "Time lock restrictions not satisfied" ); + // In order to avoid race condition in network we maintain a list of required utxos for a tx + // Example of race condition: + // Assume both alice and bob have 10 coins each and bob owes charlie 20 coins + // In order to pay charlie alice must first send 10 coins to bob which creates a new utxo + // If bob uses the new utxo to try and send the coins to charlie before charlie receives the alice to bob 10 coins utxo + // then the tx from bob to charlie is invalid. By maintaining a list of required utxos we can ensure the tx can happen as and + // when the utxo is available. We use max longevity at the moment. That should be fixed. // Resolve the transaction inputs by looking up UTXOs being spent by them. // From 806f42e0229f8d58582bcb94470ea90413c87639 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Sun, 7 Nov 2021 14:50:31 +0300 Subject: [PATCH 41/53] Added a new test: test_transfer_and_issuance_in_one_tx. Signed-off-by: sinitcin --- pallets/utxo/src/tests.rs | 134 +++++++++++++++++++++++++++++++++++++- 1 file changed, 133 insertions(+), 1 deletion(-) diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 688c40b..730adda 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -905,7 +905,7 @@ fn test_tokens_issuance_empty_ticker() { fn test_tokens_issuance_too_big_ticker() { // Ticker too long let data = OutputData::TokenIssuanceV1 { - token_ticker: Vec::from([0u8; 10_000]), + token_ticker: Vec::from([b"A"[0]; 10_000]), amount_to_issue: 0, number_of_decimals: 0, metadata_uri: vec![], @@ -1478,6 +1478,138 @@ fn test_token_creation_with_insufficient_fee() { }); } +#[test] +fn test_transfer_and_issuance_in_one_tx() { + let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + test_ext.execute_with(|| { + // Alice issue 1_000_000_000 MLS-01, and send them to Karl + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0], + outputs: vec![ + TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), + TransactionOutput::new_p2pk_with_data( + crate::tokens::Mlt(1000).to_munit(), + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "BensT".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 2, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + let first_issuance_token_id = TokenId::new(&tx.inputs[0]); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let token_utxo_hash = tx.outpoint(1); + let token_utxo = tx.outputs[1].clone(); + + // Let's send 300_000_000 and rest back and create another token + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: first_issuance_token_id.clone(), + amount: 300_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenTransferV1 { + token_id: first_issuance_token_id.clone(), + amount: 700_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "Token".as_bytes().to_vec(), + amount_to_issue: 5_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 12, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let alice_transfer_utxo_hash = tx.outpoint(0); + let karl_transfer_utxo_hash = tx.outpoint(1); + let karl_issuance_utxo_hash = tx.outpoint(2); + assert!(!UtxoStore::::contains_key(H256::from( + token_utxo_hash + ))); + assert!(UtxoStore::::contains_key(alice_transfer_utxo_hash)); + assert!(UtxoStore::::contains_key(karl_transfer_utxo_hash)); + assert!(UtxoStore::::contains_key(karl_issuance_utxo_hash)); + + // Let's check token transfer + UtxoStore::::get(alice_transfer_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { token_id, amount } => { + assert_eq!(token_id, first_issuance_token_id); + assert_eq!(amount, 300_000_000); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + + UtxoStore::::get(karl_transfer_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { token_id, amount } => { + assert_eq!(token_id, first_issuance_token_id); + assert_eq!(amount, 700_000_000); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + + // Let's check token issuance + UtxoStore::::get(karl_issuance_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenIssuanceV1 { + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_uri, + } => { + assert_eq!(token_ticker, "Token".as_bytes().to_vec()); + assert_eq!(amount_to_issue, 5_000_000_000); + assert_eq!(number_of_decimals, 12); + assert_eq!(metadata_uri, "mintlayer.org".as_bytes().to_vec()); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + }); +} + +#[test] +fn test_transfer_for_multiple_tokens() {} + #[test] fn test_immutable_tx_format() { // todo: Testing the compatibility of the old version with the new one - not done yet From ead1b5212fdf50133043ce4b21b6e4b5a436bb7e Mon Sep 17 00:00:00 2001 From: sinitcin Date: Sun, 7 Nov 2021 16:09:39 +0300 Subject: [PATCH 42/53] Fixed types for the UI in utxo Readme. Signed-off-by: sinitcin --- pallets/utxo/README.md | 110 ++++++++++++++++++++++++----------------- 1 file changed, 66 insertions(+), 44 deletions(-) diff --git a/pallets/utxo/README.md b/pallets/utxo/README.md index 1a24933..2603e1e 100644 --- a/pallets/utxo/README.md +++ b/pallets/utxo/README.md @@ -10,50 +10,72 @@ To run the test cases, just run command `cargo test`. 1. After running the core, declare the custom datatypes. GO to **Settings** > **Developer** tab and paste in the ff. JSON and then save: ```json { - "Value": "u128", - "Destination": { - "_enum": { - "Pubkey": "Pubkey", - "CreatePP": "DestinationCreatePP", - "CallPP": "DestinationCallPP", - "ScriptHash": "H256" - } - }, - "DestinationCreatePP": { - "code": "Vec", - "data": "Vec" - }, - "DestinationCallPP": { - "dest_account": "AccountId", - "input_data": "Vec" - }, - "TransactionInput": { - "outpoint": "Hash", - "lock": "Vec", - "witness": "Vec" - }, - "TransactionOutput": { - "value": "Value", - "header": "TXOutputHeader", - "destination": "Destination" - }, - "TransactionOutputFor": "TransactionOutput", - "Transaction": { - "inputs": "Vec", - "outputs": "Vec", - "time_lock": "Compact" - }, - "TransactionFor": "Transaction", - "Address": "MultiAddress", - "LookupSource": "MultiAddress", - "TXOutputHeader": "u128", - "Difficulty": "U256", - "DifficultyAndTimestamp": { - "difficulty": "Difficulty", - "timestamp": "Moment" - }, - "Pubkey": "H256", - "Public": "H256" + "Value": "u128", + "Destination": { + "_enum": { + "Pubkey": "Pubkey", + "CreatePP": "DestinationCreatePP", + "CallPP": "DestinationCallPP", + "ScriptHash": "H256" + } + }, + "DestinationCreatePP": { + "code": "Vec", + "data": "Vec" + }, + "DestinationCallPP": { + "dest_account": "AccountId", + "input_data": "Vec" + }, + "TransactionInput": { + "outpoint": "Hash", + "lock": "Vec", + "witness": "Vec" + }, + "NftDataHash": { + "Hash32": "[u8; 32]", + "Raw": "Vec" + }, + "TokenId": { + "inner": "H160" + }, + "TokenTransferV1": { + "token_id": "TokenId", + "amount": "Value" + }, + "TokenIssuanceV1": { + "token_ticker": "Vec", + "amount_to_issue": "Value", + "number_of_decimals": "u8", + "metadata_uri": "Vec" + }, + "OutputData": { + "_enum": { + "TokenTransferV1": "TokenTransferV1", + "TokenIssuanceV1": "TokenIssuanceV1" + } + }, + "TransactionOutput": { + "value": "Value", + "destination": "Destination", + "data": "Option" + }, + "TransactionOutputFor": "TransactionOutput", + "Transaction": { + "inputs": "Vec", + "outputs": "Vec", + "time_lock": "Compact" + }, + "TransactionFor": "Transaction", + "Address": "MultiAddress", + "LookupSource": "MultiAddress", + "Difficulty": "U256", + "DifficultyAndTimestamp": { + "difficulty": "Difficulty", + "timestamp": "Moment" + }, + "Pubkey": "H256", + "Public": "H256" } ``` 2. To confirm that Alice already has UTXO at genesis, go to **Developer** > **Chain state** > **Storage**. From 4048d126a092ef715b9fc5e14e76514c1ce2a27e Mon Sep 17 00:00:00 2001 From: sinitcin Date: Sun, 7 Nov 2021 17:27:54 +0300 Subject: [PATCH 43/53] Added a new test: test_transfer_for_multiple_tokens. Changed custom-types and readme --- pallets/utxo/README.md | 13 +- pallets/utxo/src/tests.rs | 194 +++++++++++++++++++++++++++++- test/functional/custom-types.json | 1 - 3 files changed, 198 insertions(+), 10 deletions(-) diff --git a/pallets/utxo/README.md b/pallets/utxo/README.md index 2603e1e..cc89d54 100644 --- a/pallets/utxo/README.md +++ b/pallets/utxo/README.md @@ -32,10 +32,6 @@ To run the test cases, just run command `cargo test`. "lock": "Vec", "witness": "Vec" }, - "NftDataHash": { - "Hash32": "[u8; 32]", - "Raw": "Vec" - }, "TokenId": { "inner": "H160" }, @@ -44,10 +40,10 @@ To run the test cases, just run command `cargo test`. "amount": "Value" }, "TokenIssuanceV1": { - "token_ticker": "Vec", + "token_ticker": "String", "amount_to_issue": "Value", "number_of_decimals": "u8", - "metadata_uri": "Vec" + "metadata_uri": "String" }, "OutputData": { "_enum": { @@ -75,7 +71,8 @@ To run the test cases, just run command `cargo test`. "timestamp": "Moment" }, "Pubkey": "H256", - "Public": "H256" + "Public": "H256", + "String": "Vec" } ``` 2. To confirm that Alice already has UTXO at genesis, go to **Developer** > **Chain state** > **Storage**. @@ -89,7 +86,7 @@ Click the **+** button on the right. It should show: header: 0 } ``` -3. Let's spend 50 of Alice's utxo to Bob. Go to **Developer** > **Extrinsics**. +3. Let's spend 50 of AlicFe's utxo to Bob. Go to **Developer** > **Extrinsics**. Choose `utxo` for _submit the following extrinsic_ dropdown. Input the following parameters (and then submit transaction): * outpoint: `0xe9ea4ce6bf71396302db8d08e7924b5be6a5b0913798bd38741c6c6e9811e864` diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 730adda..3263495 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -1608,7 +1608,199 @@ fn test_transfer_and_issuance_in_one_tx() { } #[test] -fn test_transfer_for_multiple_tokens() {} +fn test_transfer_for_multiple_tokens() { + let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + test_ext.execute_with(|| { + // + // Issue token 1 and send all tokens to Karl + // + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0], + outputs: vec![TransactionOutput::new_p2pk_with_data( + crate::tokens::Mlt(1000).to_munit(), + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "TKN1".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "tkn1.mintlayer.org".as_bytes().to_vec(), + }, + )], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + let tkn1_token_id = TokenId::new(&tx.inputs[0]); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let tkn1_utxo_hash = tx.outpoint(0); + let tkn1_utxo = tx.outputs[0].clone(); + // + // Issue token 2 and send all tokens to Alice + // + let input1 = TransactionInput::new_empty(tkn1_utxo_hash); + let tx = Transaction { + inputs: vec![input1], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn1_token_id.clone(), + amount: 1_000_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + crate::tokens::Mlt(500).to_munit(), + H256::from(alice_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "TKN2".as_bytes().to_vec(), + amount_to_issue: 2_000_000_000, + number_of_decimals: 4, + metadata_uri: "tkn2.mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[tkn1_utxo.clone()], 0, &karl_pub_key); + let tkn2_token_id = TokenId::new(&tx.inputs[0]); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let tkn1_utxo_hash = tx.outpoint(0); + let tkn2_utxo_hash = tx.outpoint(1); + // + // Issue token 3 and send all tokens to Karl + // + let input1 = TransactionInput::new_empty(tkn1_utxo_hash); + let input2 = TransactionInput::new_empty(tkn2_utxo_hash); + let prev_utxos = [tx.outputs[0].clone(), tx.outputs[1].clone()]; + let tx = Transaction { + inputs: vec![input1, input2], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn1_token_id.clone(), + amount: 1_000_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn2_token_id.clone(), + amount: 2_000_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + crate::tokens::Mlt(300).to_munit(), + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "TKN3".as_bytes().to_vec(), + amount_to_issue: 3_000_000_000, + number_of_decimals: 6, + metadata_uri: "tkn3.mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&prev_utxos, 0, &alice_pub_key) + .sign_unchecked(&prev_utxos, 1, &alice_pub_key); + let tkn3_token_id = TokenId::new(&tx.inputs[0]); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let tkn1_utxo_hash = tx.outpoint(0); + let tkn2_utxo_hash = tx.outpoint(1); + let tkn3_utxo_hash = tx.outpoint(2); + + // + // Transfer 3 kinds of tokens to Alice and check them all + // + let input1 = TransactionInput::new_empty(tkn1_utxo_hash); + let input2 = TransactionInput::new_empty(tkn2_utxo_hash); + let input3 = TransactionInput::new_empty(tkn3_utxo_hash); + let prev_utxos = [tx.outputs[0].clone(), tx.outputs[1].clone(), tx.outputs[2].clone()]; + let tx = Transaction { + inputs: vec![input1, input2, input3], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn1_token_id.clone(), + amount: 1_000_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn2_token_id.clone(), + amount: 2_000_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn3_token_id.clone(), + amount: 3_000_000_000, + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&prev_utxos, 0, &karl_pub_key) + .sign_unchecked(&prev_utxos, 1, &karl_pub_key) + .sign_unchecked(&prev_utxos, 2, &karl_pub_key); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let tkn1_utxo_hash = tx.outpoint(0); + let tkn2_utxo_hash = tx.outpoint(1); + let tkn3_utxo_hash = tx.outpoint(2); + // Check tkn1 + UtxoStore::::get(tkn1_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { token_id, amount } => { + assert_eq!(token_id, tkn1_token_id); + assert_eq!(amount, 1_000_000_000); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + // Check tkn2 + UtxoStore::::get(tkn2_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { token_id, amount } => { + assert_eq!(token_id, tkn2_token_id); + assert_eq!(amount, 2_000_000_000); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + // Check tkn3 + UtxoStore::::get(tkn3_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { token_id, amount } => { + assert_eq!(token_id, tkn3_token_id); + assert_eq!(amount, 3_000_000_000); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + }); +} #[test] fn test_immutable_tx_format() { diff --git a/test/functional/custom-types.json b/test/functional/custom-types.json index 30cfd96..5479dc8 100644 --- a/test/functional/custom-types.json +++ b/test/functional/custom-types.json @@ -48,7 +48,6 @@ "TokenIssuanceV1": { "type": "struct", "type_mapping": [ - [ "token_id", "TokenId" ], [ "token_ticker", "Vec" ], [ "amount_to_issue", "Value" ], [ "number_of_decimals", "u8" ], From 00aa8c5cb2087f11d6b280d16ec49fd0fdfaf37a Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 8 Nov 2021 12:32:34 +0300 Subject: [PATCH 44/53] Fixed tests, now position of checks in validation function do not affect to tokens tests. Signed-off-by: sinitcin --- pallets/utxo/src/tests.rs | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 3263495..0b25192 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -894,9 +894,9 @@ fn test_tokens_issuance_empty_ticker() { // Ticker empty let data = OutputData::TokenIssuanceV1 { token_ticker: vec![], - amount_to_issue: 0, - number_of_decimals: 0, - metadata_uri: vec![], + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "mintlayer.org".to_bytes().to_vec(), }; test_tx!(data, Err, "token ticker can't be empty"); } @@ -906,9 +906,9 @@ fn test_tokens_issuance_too_big_ticker() { // Ticker too long let data = OutputData::TokenIssuanceV1 { token_ticker: Vec::from([b"A"[0]; 10_000]), - amount_to_issue: 0, - number_of_decimals: 0, - metadata_uri: vec![], + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "mintlayer.org".to_bytes().to_vec(), }; test_tx!(data, Err, "token ticker is too long"); } @@ -919,8 +919,8 @@ fn test_tokens_issuance_amount_zero() { let data = OutputData::TokenIssuanceV1 { token_ticker: b"BensT".to_vec(), amount_to_issue: 0, - number_of_decimals: 0, - metadata_uri: vec![], + number_of_decimals: 2, + metadata_uri: "mintlayer.org".to_bytes().to_vec(), }; test_tx!(data, Err, "output value must be nonzero"); } @@ -932,7 +932,7 @@ fn test_tokens_issuance_too_big_decimals() { token_ticker: b"BensT".to_vec(), amount_to_issue: 1_000_000_000, number_of_decimals: 19, - metadata_uri: vec![], + metadata_uri: "mintlayer.org".to_bytes().to_vec(), }; test_tx!(data, Err, "too long decimals"); } @@ -1463,7 +1463,6 @@ fn test_token_creation_with_insufficient_fee() { OutputData::TokenIssuanceV1 { token_ticker: b"Enric".to_vec(), amount_to_issue: 1_000_000_000, - // Should be not more than 18 numbers number_of_decimals: 2, metadata_uri: "mintlayer.org".as_bytes().to_vec(), }, @@ -1494,7 +1493,6 @@ fn test_transfer_and_issuance_in_one_tx() { OutputData::TokenIssuanceV1 { token_ticker: "BensT".as_bytes().to_vec(), amount_to_issue: 1_000_000_000, - // Should be not more than 18 numbers number_of_decimals: 2, metadata_uri: "mintlayer.org".as_bytes().to_vec(), }, From 4af552b8f4d651872b256a5eac82b6fae4d62484 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 8 Nov 2021 15:09:36 +0300 Subject: [PATCH 45/53] Fixed after merg with Carla's code Signed-off-by: sinitcin --- node/src/chain_spec.rs | 2 +- node/src/command.rs | 4 ++-- pallets/utxo/src/lib.rs | 18 ++++++++---------- pallets/utxo/src/mock.rs | 7 +++---- pallets/utxo/src/rewards.rs | 4 ++-- pallets/utxo/src/staking.rs | 11 ++++------- pallets/utxo/src/staking_tests.rs | 3 +-- pallets/utxo/src/tests.rs | 12 +++++++----- runtime/Cargo.toml | 1 - runtime/src/staking.rs | 2 +- 10 files changed, 29 insertions(+), 35 deletions(-) diff --git a/node/src/chain_spec.rs b/node/src/chain_spec.rs index afdffd2..857f7ba 100644 --- a/node/src/chain_spec.rs +++ b/node/src/chain_spec.rs @@ -25,7 +25,7 @@ pub struct MltKeysInfo { pub sr25519_public_controller: sr25519::Public, pub sr25519_public_stash: sr25519::Public, pub ed25519_public: sp_core::ed25519::Public, - pub mlt_tokens: pallet_utxo::Value, + pub mlt_tokens: pallet_utxo::tokens::Value, } impl MltKeysInfo { diff --git a/node/src/command.rs b/node/src/command.rs index 542af4c..76d2c0b 100644 --- a/node/src/command.rs +++ b/node/src/command.rs @@ -70,7 +70,7 @@ struct MltKeysFromFile { } impl MltKeysFromFile { - fn into_mlt_keys_info(self, mlt_tokens: pallet_utxo::Value) -> MltKeysInfo { + fn into_mlt_keys_info(self, mlt_tokens: pallet_utxo::tokens::Value) -> MltKeysInfo { MltKeysInfo { name: self.name, sr25519_public_controller: sr25519::Public::from_h256(self.sr25519_public_controller), @@ -99,7 +99,7 @@ pub fn fetch_keys(auth_keys_url: &'static str) -> Result, Strin let users = users["users"].as_array().ok_or("invalid json to extract user list")?; let share_per_user = TEST_NET_MLT_ORIG_SUPPLY - .checked_div(users.len() as pallet_utxo::Value) + .checked_div(users.len() as pallet_utxo::tokens::Value) .ok_or("unable to share mlt orig supply evenly.")?; for user in users { diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 1d745f3..2095067 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -19,20 +19,18 @@ pub use pallet::*; - -#[cfg(test)] -mod mock; -#[cfg(test)] -mod tests; -#[cfg(test)] -mod staking_tests; #[cfg(feature = "runtime-benchmarks")] mod benchmarking; -mod header; +#[cfg(test)] +mod mock; mod rewards; mod script; mod sign; pub mod staking; +#[cfg(test)] +mod staking_tests; +#[cfg(test)] +mod tests; pub mod tokens; pub mod weights; @@ -54,8 +52,8 @@ pub mod pallet { use crate::sign::{self, Scheme}; // todo: This part isn't fully tested, left for the next PR // use crate::tokens::{NftDataHash}; - use crate::tokens::{OutputData, TokenId, Value}; use crate::staking::{self, StakingHelper}; + use crate::tokens::{OutputData, TokenId, Value}; use bech32; use chainscript::Script; use codec::{Decode, Encode}; @@ -80,6 +78,7 @@ pub mod pallet { testing::SR25519, H256, H512, }; + pub const MLT_UNIT: Value = 1_000 * 100_000_000; #[pallet::error] pub enum Error { @@ -944,7 +943,6 @@ pub mod pallet { new_utxos.push(hash.as_fixed_bytes().to_vec()); match output.destination { - Destination::Pubkey(_) | Destination::ScriptHash(_) => {} Destination::CreatePP(_, _) => { ensure!(!>::contains_key(hash), "output already exists"); log::info!("TODO validate CreatePP as output"); diff --git a/pallets/utxo/src/mock.rs b/pallets/utxo/src/mock.rs index 04241f6..bcedc41 100644 --- a/pallets/utxo/src/mock.rs +++ b/pallets/utxo/src/mock.rs @@ -20,6 +20,7 @@ use pallet_utxo::TransactionOutput; use pp_api::ProgrammablePoolApi; use crate::tokens::Value; +use crate::MLT_UNIT; use frame_support::dispatch::DispatchResultWithPostInfo; use frame_support::{dispatch::Vec, weights::Weight}; use frame_support::{ @@ -64,7 +65,8 @@ thread_local! { pub const ALICE_PHRASE: &str = "news slush supreme milk chapter athlete soap sausage put clutch what kitten"; -pub const ALICE_GENESIS_BALANCE: Value = 1_000_000_000_000_000; +// 1 / 10 of TEST_NET_MLT_ORIG_SUPPLY +pub const ALICE_GENESIS_BALANCE: Value = MLT_UNIT * 400_000_000_00; pub fn genesis_utxo() -> (TransactionOutput, H256) { let keystore = KeyStore::new(); @@ -343,7 +345,6 @@ impl pallet_utxo::Config for Test { type Call = Call; type WeightInfo = crate::weights::WeightInfo; type ProgrammablePool = MockPool; - type AssetId = u64; type RewardReductionFraction = RewardReductionFraction; type RewardReductionPeriod = RewardReductionPeriod; @@ -376,7 +377,6 @@ pub fn alice_test_ext() -> TestExternalities { ALICE_GENESIS_BALANCE, H256::from(alice_pub_key), )], - _marker: Default::default(), locked_utxos: vec![], } .assimilate_storage(&mut t) @@ -402,7 +402,6 @@ pub fn alice_test_ext_and_keys() -> (TestExternalities, Public, Public) { ALICE_GENESIS_BALANCE, H256::from(alice_pub_key), )], - _marker: Default::default(), locked_utxos: vec![], } .assimilate_storage(&mut t) diff --git a/pallets/utxo/src/rewards.rs b/pallets/utxo/src/rewards.rs index ce78223..e6e915a 100644 --- a/pallets/utxo/src/rewards.rs +++ b/pallets/utxo/src/rewards.rs @@ -16,8 +16,8 @@ // Author(s): C. Yap use crate::{ - convert_to_h256, BlockAuthor, Config, Event, Pallet, RewardTotal, TransactionOutput, UtxoStore, - Value, + convert_to_h256, tokens::Value, BlockAuthor, Config, Event, Pallet, RewardTotal, + TransactionOutput, UtxoStore, }; use frame_support::traits::Get; diff --git a/pallets/utxo/src/staking.rs b/pallets/utxo/src/staking.rs index e466366..06b8900 100644 --- a/pallets/utxo/src/staking.rs +++ b/pallets/utxo/src/staking.rs @@ -16,8 +16,8 @@ // Author(s): C. Yap use crate::{ - convert_to_h256, Config, Destination, Error, Event, LockedUtxos, Pallet, RewardTotal, - StakingCount, TransactionOutput, UtxoStore, Value, + convert_to_h256, tokens::Value, Config, Destination, Error, Event, LockedUtxos, Pallet, + RewardTotal, StakingCount, TransactionOutput, UtxoStore, }; use frame_support::{ dispatch::{DispatchResultWithPostInfo, Vec}, @@ -164,7 +164,7 @@ pub(crate) fn withdraw(stash_account: T::AccountId) -> DispatchResult pub mod validation { use super::*; use crate::staking::utils::get_all_locked_utxo_outpoints; - use crate::{OutputHeaderHelper, TokenType, TransactionOutputFor}; + use crate::TransactionOutputFor; /// to validate `LockForStaking` and `LockExtraForStaking` pub fn validate_staking_ops( @@ -177,10 +177,7 @@ pub mod validation { ); ensure!( - matches!( - tx.header.as_tx_output_header().token_type(), - Some(TokenType::MLT) - ), + tx.data.is_none(), "only MLT tokens are supported for staking" ); diff --git a/pallets/utxo/src/staking_tests.rs b/pallets/utxo/src/staking_tests.rs index fcf20e1..42567c8 100644 --- a/pallets/utxo/src/staking_tests.rs +++ b/pallets/utxo/src/staking_tests.rs @@ -111,12 +111,12 @@ fn non_mlt_staking() { // minimum value to stake is 10, but KARL only staked 5. TransactionOutput { value: 5, - header: 1, // not an MLT token destination: Destination::LockForStaking { stash_account: H256::from(karl_pub_key), controller_account: H256::from(greg_pub_key), session_key: vec![2, 1], }, + data: None, }, TransactionOutput::new_pubkey(90, H256::from(karl_pub_key)), ], @@ -319,7 +319,6 @@ fn non_validator_withdrawing() { fn withdrawing_before_expected_period() { let (mut test_ext, keys_and_hashes) = multiple_keys_test_ext(); test_ext.execute_with(|| { - // ALICE (index 0) wants to stop validating. let (alice_pub_key, _) = keys_and_hashes[0]; diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 09ef51e..277bc3c 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -172,6 +172,8 @@ fn attack_with_sending_to_own_account() { #[test] fn attack_with_empty_transactions() { alice_test_ext().execute_with(|| { + // We should use the real input because. Otherwise, appears another error + let (_, input) = tx_input_gen_no_signature(); assert_err!( Utxo::spend(Origin::signed(H256::zero()), Transaction::default()), // empty tx "no inputs" @@ -1030,7 +1032,7 @@ fn test_tx_issuance_for_transfer(expecting_err_msg: &'static str, test_func: where F: Fn(TokenId, Public, Public, H256, TransactionOutput) -> Transaction, { - let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + let (mut test_ext, alice_pub_key, karl_pub_key) = alice_test_ext_and_keys(); test_ext.execute_with(|| { // Alice issue 1_000_000_000 MLS-01, and send them to Karl let (utxo0, input0) = tx_input_gen_no_signature(); @@ -1183,7 +1185,7 @@ fn test_token_transfer_send_part_others_burn() { #[test] fn test_token_transfer() { - let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + let (mut test_ext, alice_pub_key, karl_pub_key) = alice_test_ext_and_keys(); test_ext.execute_with(|| { // Alice issue 1_000_000_000 MLS-01, and send them to Karl let (utxo0, input0) = tx_input_gen_no_signature(); @@ -1421,7 +1423,7 @@ fn test_token_transfer() { #[test] // Test tx where Input with token and without MLT, output has token (without MLT) fn test_token_creation_with_insufficient_fee() { - let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + let (mut test_ext, alice_pub_key, karl_pub_key) = alice_test_ext_and_keys(); test_ext.execute_with(|| { // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself let (utxo0, input0) = tx_input_gen_no_signature(); @@ -1477,7 +1479,7 @@ fn test_token_creation_with_insufficient_fee() { #[test] fn test_transfer_and_issuance_in_one_tx() { - let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + let (mut test_ext, alice_pub_key, karl_pub_key) = alice_test_ext_and_keys(); test_ext.execute_with(|| { // Alice issue 1_000_000_000 MLS-01, and send them to Karl let (utxo0, input0) = tx_input_gen_no_signature(); @@ -1605,7 +1607,7 @@ fn test_transfer_and_issuance_in_one_tx() { #[test] fn test_transfer_for_multiple_tokens() { - let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + let (mut test_ext, alice_pub_key, karl_pub_key) = alice_test_ext_and_keys(); test_ext.execute_with(|| { // // Issue token 1 and send all tokens to Karl diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 2f61151..1c0c972 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -17,7 +17,6 @@ branch = "master" [dependencies] log = "0.4.14" -pallet-utxo-tokens = { path = "../pallets/utxo/tokens" } [dependencies.codec] default-features = false diff --git a/runtime/src/staking.rs b/runtime/src/staking.rs index deffd1f..2abe1a1 100644 --- a/runtime/src/staking.rs +++ b/runtime/src/staking.rs @@ -51,7 +51,7 @@ where fn bond( controller_account: StakeAccountId, stash_account: StakeAccountId, - value: pallet_utxo::Value, + value: pallet_utxo::tokens::Value, ) -> DispatchResult { let controller_lookup: LookupSourceOf = T::Lookup::unlookup(controller_account.clone()); let reward_destination = pallet_staking::RewardDestination::Staked; From 953a1c7d6976be04abf7c4fa84d38732840646d8 Mon Sep 17 00:00:00 2001 From: b-yap <2826165+b-yap@users.noreply.github.com> Date: Mon, 8 Nov 2021 20:49:50 +0800 Subject: [PATCH 46/53] rewards_and_staking: fix `non_mlt_staking()` test case --- pallets/utxo/src/staking_tests.rs | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/pallets/utxo/src/staking_tests.rs b/pallets/utxo/src/staking_tests.rs index 42567c8..9c7ffff 100644 --- a/pallets/utxo/src/staking_tests.rs +++ b/pallets/utxo/src/staking_tests.rs @@ -100,30 +100,40 @@ fn less_than_minimum_stake() { #[test] fn non_mlt_staking() { + use crate::tokens::OutputData; + let (mut test_ext, keys_and_hashes) = multiple_keys_test_ext(); test_ext.execute_with(|| { let (karl_pub_key, karl_genesis) = keys_and_hashes[1]; let (greg_pub_key, _) = keys_and_hashes[2]; - let mut tx = Transaction { + + let utxo = UtxoStore::::get(karl_genesis).expect("kar's utxo does not exist"); + + let tx = Transaction { inputs: vec![TransactionInput::new_empty(karl_genesis)], outputs: vec![ // KARL (index 1) wants to be a validator. He will use GREG (index 2) as the controller account. // minimum value to stake is 10, but KARL only staked 5. TransactionOutput { - value: 5, + value: 10, destination: Destination::LockForStaking { stash_account: H256::from(karl_pub_key), controller_account: H256::from(greg_pub_key), session_key: vec![2, 1], }, - data: None, + data: Some(OutputData::TokenIssuanceV1 { + token_ticker: "Token".as_bytes().to_vec(), + amount_to_issue: 5_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 12, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }), }, - TransactionOutput::new_pubkey(90, H256::from(karl_pub_key)), + TransactionOutput::new_pubkey(80, H256::from(karl_pub_key)), ], time_lock: Default::default(), - }; - let karl_sig = crypto::sr25519_sign(SR25519, &karl_pub_key, &tx.encode()).unwrap(); - tx.inputs[0].witness = karl_sig.0.to_vec(); + }.sign(&[utxo],0,&karl_pub_key) + .expect("karl's pub key not found"); assert_err!( Utxo::spend(Origin::signed(H256::zero()), tx), From 4803ffa13123b9d266ee72c5e1649f78aade467a Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 8 Nov 2021 17:01:53 +0300 Subject: [PATCH 47/53] Fixed test_reward Signed-off-by: sinitcin --- pallets/utxo/src/lib.rs | 3 +++ pallets/utxo/src/tests.rs | 52 +++++++++++++++++++++++++++++++++++---- 2 files changed, 50 insertions(+), 5 deletions(-) diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 2095067..559c699 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -1065,6 +1065,9 @@ pub mod pallet { reward = mlt_amount_in_inputs .checked_sub(mlt_amount_in_outputs) .ok_or("reward underflow")?; + if reward >= u64::MAX.into() { + frame_support::fail!("reward exceed allowed amount"); + } } Ok(ValidTransaction { diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 277bc3c..2e582a1 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -334,22 +334,64 @@ fn tx_from_alice_to_karl() { fn test_reward() { execute_with_alice(|alice_pub_key| { let (utxo0, input0) = tx_input_gen_no_signature(); + + // Check the default parameters + let utxos = UtxoStore::::get(input0.outpoint).unwrap(); + assert_eq!(utxos.value, ALICE_GENESIS_BALANCE); + let reward = RewardTotal::::get(); + assert_eq!(reward, 0); + + // Make a new transaction let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(90, H256::from(alice_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let utxo_hash = tx.outpoint(0); // if the previous spend succeeded, there should be one utxo - // that has a value of 90 and a reward that has a value of 10 - let utxos = UtxoStore::::iter_values().next().unwrap(); + // that has a value of ALICE_GENESIS_BALANCE - 90 and a reward that has a value of 90 + let utxos = UtxoStore::::get(utxo_hash).unwrap(); + let reward = RewardTotal::::get(); + assert_eq!(utxos.value, ALICE_GENESIS_BALANCE - 90); + assert_eq!(reward, 90); + }) +} + +#[test] +fn test_reward_overflow() { + execute_with_alice(|alice_pub_key| { + let (utxo0, input0) = tx_input_gen_no_signature(); + + // Check the default parameters + let utxos = UtxoStore::::get(input0.outpoint).unwrap(); + assert_eq!(utxos.value, ALICE_GENESIS_BALANCE); let reward = RewardTotal::::get(); + assert_eq!(reward, 0); - assert_eq!(utxos.value, 90); - assert_eq!(reward, ALICE_GENESIS_BALANCE - 90); + // Make a new transaction where + // Input balance: 4_000_000_000_000_000_000_000 + // u64::MAX: 18_446_744_073_709_551_615 + // the difference: 3_981_553_255_926_290_448_385 + let tx = Transaction { + inputs: vec![input0], + outputs: vec![TransactionOutput::new_pubkey( + 3_981_553_255_926_290_448_385, + H256::from(alice_pub_key), + )], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0], 0, &alice_pub_key); + assert_err!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "reward exceed allowed amount" + ); }) } From 9ecbb443b0353abf0bc5c358c605c491cc141f5c Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 8 Nov 2021 19:15:11 +0300 Subject: [PATCH 48/53] Fixed unit tests after increasing genesis value for accounts Signed-off-by: sinitcin --- pallets/utxo/src/tests.rs | 119 ++++++++++++++++++++++++++----------- pallets/utxo/src/tokens.rs | 4 +- 2 files changed, 87 insertions(+), 36 deletions(-) diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 2e582a1..9cd3445 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -82,14 +82,20 @@ fn test_script_preimage() { let (utxo0, input0) = tx_input_gen_no_signature(); let tx1 = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_script_hash(50, script_hash)], + outputs: vec![TransactionOutput::new_script_hash( + ALICE_GENESIS_BALANCE - 50, + script_hash, + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); let tx2 = Transaction { inputs: vec![TransactionInput::new_script(tx1.outpoint(0), script, witness_script)], - outputs: vec![TransactionOutput::new_script_hash(20, H256::zero())], + outputs: vec![TransactionOutput::new_script_hash( + ALICE_GENESIS_BALANCE - 120, + H256::zero(), + )], time_lock: Default::default(), }; @@ -106,8 +112,14 @@ fn test_unchecked_2nd_output() { let tx1 = Transaction { inputs: vec![input0], outputs: vec![ - TransactionOutput::new_pubkey(30, H256::from(alice_pub_key)), - TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 30, + H256::from(alice_pub_key), + ), + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 50, + H256::from(alice_pub_key), + ), ], time_lock: Default::default(), } @@ -132,7 +144,10 @@ fn test_simple_tx() { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 50, + H256::from(alice_pub_key), + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); @@ -144,7 +159,10 @@ fn test_simple_tx() { assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); assert!(UtxoStore::::contains_key(new_utxo_hash)); - assert_eq!(50, UtxoStore::::get(new_utxo_hash).unwrap().value); + assert_eq!( + ALICE_GENESIS_BALANCE - 50, + UtxoStore::::get(new_utxo_hash).unwrap().value + ); }) } @@ -307,7 +325,10 @@ fn tx_from_alice_to_karl() { inputs: vec![input0], outputs: vec![ TransactionOutput::new_pubkey(10, H256::from(karl_pub_key)), - TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + ), ], time_lock: Default::default(), } @@ -320,7 +341,10 @@ fn tx_from_alice_to_karl() { // then send rest of the tokens to karl (proving that the first tx was successful) let tx = Transaction { inputs: vec![TransactionInput::new_empty(new_utxo_hash)], - outputs: vec![TransactionOutput::new_pubkey(90, H256::from(karl_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(karl_pub_key), + )], time_lock: Default::default(), } .sign_unchecked(&[new_utxo], 0, &alice_pub_key); @@ -401,7 +425,10 @@ fn test_script() { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(90, H256::from(alice_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); @@ -416,7 +443,10 @@ fn test_time_lock_tx() { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(90, H256::from(alice_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + )], time_lock: BlockTime::Blocks(10).as_raw().unwrap(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); @@ -435,7 +465,10 @@ fn test_time_lock_script_fail() { let script_hash: H256 = BlakeTwo256::hash(script.as_ref()); let tx1 = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_script_hash(90, script_hash)], + outputs: vec![TransactionOutput::new_script_hash( + ALICE_GENESIS_BALANCE - 90, + script_hash, + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); @@ -446,7 +479,10 @@ fn test_time_lock_script_fail() { // the time lock restrictions imposed by the scripting system. let tx2 = Transaction { inputs: vec![TransactionInput::new_script(outpoint, script, Default::default())], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 150, + H256::from(alice_pub_key), + )], time_lock: Default::default(), }; assert_err!( @@ -465,7 +501,10 @@ fn attack_double_spend_by_tweaking_input() { let drop_script_hash = BlakeTwo256::hash(drop_script.as_ref()); let tx0 = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_script_hash(50, drop_script_hash)], + outputs: vec![TransactionOutput::new_script_hash( + ALICE_GENESIS_BALANCE - 50, + drop_script_hash, + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); @@ -480,8 +519,11 @@ fn attack_double_spend_by_tweaking_input() { }) .collect(); let tx1 = Transaction { - inputs: inputs, - outputs: vec![TransactionOutput::new_pubkey(500, H256::from(alice_pub_key))], + inputs, + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 500, + H256::from(alice_pub_key), + )], time_lock: Default::default(), }; assert_err!( @@ -613,7 +655,7 @@ proptest! { let script_hash: H256 = BlakeTwo256::hash(script.as_ref()); let tx1 = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_script_hash(90, script_hash)], + outputs: vec![TransactionOutput::new_script_hash(ALICE_GENESIS_BALANCE - 90, script_hash)], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice); @@ -622,7 +664,7 @@ proptest! { let tx2 = Transaction { inputs: vec![TransactionInput::new_script(outpoint, script, Default::default())], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice))], + outputs: vec![TransactionOutput::new_pubkey(ALICE_GENESIS_BALANCE - u32::MAX as Value, H256::from(alice))], time_lock: tx_lock_time, }; Utxo::spend(Origin::signed(H256::zero()), tx2) @@ -650,7 +692,7 @@ proptest! { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice))], + outputs: vec![TransactionOutput::new_pubkey(ALICE_GENESIS_BALANCE - 50, H256::from(alice))], time_lock: tx_lock_time, } .sign_unchecked(&[utxo0], 0, &alice); @@ -683,7 +725,7 @@ proptest! { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice))], + outputs: vec![TransactionOutput::new_pubkey(ALICE_GENESIS_BALANCE - 50, H256::from(alice))], time_lock: now, } .sign_unchecked(&[utxo0], 0, &alice); @@ -713,7 +755,7 @@ proptest! { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice))], + outputs: vec![TransactionOutput::new_pubkey(ALICE_GENESIS_BALANCE - 50, H256::from(alice))], time_lock: time, } .sign_unchecked(&[utxo0], 0, &alice); @@ -744,7 +786,7 @@ fn test_token_issuance() { execute_with_alice(|alice_pub_key| { let (utxo0, input0) = tx_input_gen_no_signature(); let output_new = TransactionOutput { - value: 0, + value: ALICE_GENESIS_BALANCE, destination: Destination::Pubkey(alice_pub_key), data: Some(OutputData::TokenIssuanceV1 { //token_id: TokenId::new_asset(first_input_hash), @@ -899,7 +941,7 @@ macro_rules! test_tx { execute_with_alice(|alice_pub_key| { let (utxo0, input0) = tx_input_gen_no_signature(); let output_new = TransactionOutput { - value: 1, + value: ALICE_GENESIS_BALANCE - 1, destination: Destination::Pubkey(alice_pub_key), data: Some($data.clone()), }; @@ -1081,7 +1123,10 @@ where let tx = Transaction { inputs: vec![input0.clone()], outputs: vec![ - TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + ), TransactionOutput::new_p2pk_with_data( 10, H256::from(karl_pub_key), @@ -1129,11 +1174,11 @@ fn test_token_transfer_with_wrong_token_id() { Transaction { inputs: vec![input.clone()], outputs: vec![TransactionOutput::new_p2pk_with_data( - 0, + ALICE_GENESIS_BALANCE - u64::MAX as Value, H256::from(alice_pub_key), OutputData::TokenTransferV1 { token_id: TokenId::new(&input), - amount: 1_00_000_000, + amount: 100_000_000, }, )], time_lock: Default::default(), @@ -1234,9 +1279,12 @@ fn test_token_transfer() { let tx = Transaction { inputs: vec![input0], outputs: vec![ - TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + ), TransactionOutput::new_p2pk_with_data( - 10, + 90, H256::from(karl_pub_key), OutputData::TokenIssuanceV1 { token_ticker: "BensT".as_bytes().to_vec(), @@ -1473,11 +1521,11 @@ fn test_token_creation_with_insufficient_fee() { inputs: vec![input0], outputs: vec![ TransactionOutput::new_pubkey( - crate::tokens::Mlt(1).to_munit(), + ALICE_GENESIS_BALANCE - u64::MAX as Value, H256::from(karl_pub_key), ), TransactionOutput::new_p2pk_with_data( - 0, + crate::tokens::Mlt(99).to_munit(), H256::from(karl_pub_key), OutputData::TokenIssuanceV1 { token_ticker: "BensT".as_bytes().to_vec(), @@ -1528,7 +1576,10 @@ fn test_transfer_and_issuance_in_one_tx() { let tx = Transaction { inputs: vec![input0], outputs: vec![ - TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - crate::tokens::Mlt(1000).to_munit(), + H256::from(alice_pub_key), + ), TransactionOutput::new_p2pk_with_data( crate::tokens::Mlt(1000).to_munit(), H256::from(karl_pub_key), @@ -1658,7 +1709,7 @@ fn test_transfer_for_multiple_tokens() { let tx = Transaction { inputs: vec![input0], outputs: vec![TransactionOutput::new_p2pk_with_data( - crate::tokens::Mlt(1000).to_munit(), + ALICE_GENESIS_BALANCE - crate::tokens::Mlt(100).to_munit(), H256::from(karl_pub_key), OutputData::TokenIssuanceV1 { token_ticker: "TKN1".as_bytes().to_vec(), @@ -1690,7 +1741,7 @@ fn test_transfer_for_multiple_tokens() { }, ), TransactionOutput::new_p2pk_with_data( - crate::tokens::Mlt(500).to_munit(), + ALICE_GENESIS_BALANCE - crate::tokens::Mlt(100).to_munit(), H256::from(alice_pub_key), OutputData::TokenIssuanceV1 { token_ticker: "TKN2".as_bytes().to_vec(), @@ -1733,7 +1784,7 @@ fn test_transfer_for_multiple_tokens() { }, ), TransactionOutput::new_p2pk_with_data( - crate::tokens::Mlt(300).to_munit(), + ALICE_GENESIS_BALANCE - crate::tokens::Mlt(100).to_munit(), H256::from(karl_pub_key), OutputData::TokenIssuanceV1 { token_ticker: "TKN3".as_bytes().to_vec(), @@ -1780,7 +1831,7 @@ fn test_transfer_for_multiple_tokens() { }, ), TransactionOutput::new_p2pk_with_data( - 0, + ALICE_GENESIS_BALANCE - crate::tokens::Mlt(100).to_munit(), H256::from(alice_pub_key), OutputData::TokenTransferV1 { token_id: tkn3_token_id.clone(), diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs index 5b6fa0a..4e62f43 100644 --- a/pallets/utxo/src/tokens.rs +++ b/pallets/utxo/src/tokens.rs @@ -1,6 +1,6 @@ #![cfg_attr(not(feature = "std"), no_std)] -use crate::TransactionInput; +use crate::{TransactionInput, MLT_UNIT}; use base58_nostd::{FromBase58, FromBase58Error, ToBase58, TOKEN_ID_PREFIX}; use codec::{Decode, Encode}; use frame_support::ensure; @@ -19,7 +19,7 @@ pub type Value = u128; pub struct Mlt(pub Value); impl Mlt { pub fn to_munit(&self) -> Value { - self.0 * 1_000 * 100_000_000 + self.0 * MLT_UNIT } } From 0ca12d934bcada00bd8f28e5064d689949105596 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 8 Nov 2021 19:15:25 +0300 Subject: [PATCH 49/53] Fixed Carla's functional tests Signed-off-by: sinitcin --- test/functional/feature_staking_diff_addresses.py | 12 ++++++------ test/functional/feature_staking_extra.py | 4 ++-- .../feature_staking_extra_not_validator.py | 4 ++-- .../feature_staking_extra_wrong_controller.py | 4 ++-- test/functional/feature_staking_first_time.py | 12 ++++++------ test/functional/feature_staking_less_than_minimum.py | 12 ++++++------ 6 files changed, 24 insertions(+), 24 deletions(-) diff --git a/test/functional/feature_staking_diff_addresses.py b/test/functional/feature_staking_diff_addresses.py index 036beef..7b6faac 100755 --- a/test/functional/feature_staking_diff_addresses.py +++ b/test/functional/feature_staking_diff_addresses.py @@ -82,18 +82,18 @@ def run_test(self): outputs=[ utxo.Output( value=40000 * COIN, - header=0, - destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0x7e0dd8c53a47b22451dc3a73b29d72a2ce1405a4191f3c31ff927fea7b0514182f81ffc984364cc85499595eaefc509a06710c5277dcd22ebd7464917dfd9230') + destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0x7e0dd8c53a47b22451dc3a73b29d72a2ce1405a4191f3c31ff927fea7b0514182f81ffc984364cc85499595eaefc509a06710c5277dcd22ebd7464917dfd9230'), + data=None ), utxo.Output( value=40001 * COIN, - header=0, - destination=utxo.DestLockForStaking(dave_stash.public_key, dave.public_key,'0x0699553a3c5bfa89e41d94a45ceb9103ae9f87089b4a70de4c2a3eb922e1b9362fe0d8868ae4c9d5a9fba98d29b45d2c2630f4936077999f9334da1cca2e37e9') + destination=utxo.DestLockForStaking(dave_stash.public_key, dave.public_key,'0x0699553a3c5bfa89e41d94a45ceb9103ae9f87089b4a70de4c2a3eb922e1b9362fe0d8868ae4c9d5a9fba98d29b45d2c2630f4936077999f9334da1cca2e37e9'), + data=None ), utxo.Output( value=39999919999 * COIN, - header=0, - destination=utxo.DestPubkey(charlie.public_key) + destination=utxo.DestPubkey(charlie.public_key), + data=None ) ] ).sign(alice, [utxos[0][1]]) diff --git a/test/functional/feature_staking_extra.py b/test/functional/feature_staking_extra.py index 899b759..ef393d8 100755 --- a/test/functional/feature_staking_extra.py +++ b/test/functional/feature_staking_extra.py @@ -80,8 +80,8 @@ def run_test(self): outputs=[ utxo.Output( value=40000 * COIN, - header=0, - destination=utxo.DestLockExtraForStaking(alice_stash.public_key, alice.public_key) + destination=utxo.DestLockExtraForStaking(alice_stash.public_key, alice.public_key), + data=None ), ] ).sign(alice_stash, [utxos[0][1]]) diff --git a/test/functional/feature_staking_extra_not_validator.py b/test/functional/feature_staking_extra_not_validator.py index 6b24aa3..7aa0b9c 100755 --- a/test/functional/feature_staking_extra_not_validator.py +++ b/test/functional/feature_staking_extra_not_validator.py @@ -85,8 +85,8 @@ def run_test(self): outputs=[ utxo.Output( value=40000 * COIN, - header=0, - destination=utxo.DestLockExtraForStaking(charlie_stash.public_key, charlie.public_key) + destination=utxo.DestLockExtraForStaking(charlie_stash.public_key, charlie.public_key), + data=None ), ] ).sign(charlie_stash, [utxos[0][1]]) diff --git a/test/functional/feature_staking_extra_wrong_controller.py b/test/functional/feature_staking_extra_wrong_controller.py index 67dcf1e..b46d52e 100755 --- a/test/functional/feature_staking_extra_wrong_controller.py +++ b/test/functional/feature_staking_extra_wrong_controller.py @@ -80,8 +80,8 @@ def run_test(self): outputs=[ utxo.Output( value=40000 * COIN, - header=0, - destination=utxo.DestLockExtraForStaking(alice_stash.public_key, bob.public_key) + destination=utxo.DestLockExtraForStaking(alice_stash.public_key, bob.public_key), + data=None ), ] ).sign(alice_stash, [utxos[0][1]]) diff --git a/test/functional/feature_staking_first_time.py b/test/functional/feature_staking_first_time.py index 9adf2db..60780ac 100755 --- a/test/functional/feature_staking_first_time.py +++ b/test/functional/feature_staking_first_time.py @@ -77,8 +77,8 @@ def run_test(self): outputs=[ utxo.Output( value=50000 * COIN, - header=0, - destination=utxo.DestPubkey(charlie_stash.public_key) + destination=utxo.DestPubkey(charlie_stash.public_key), + data=None ), ] ).sign(alice, [utxos[0][1]]) @@ -92,13 +92,13 @@ def run_test(self): outputs=[ utxo.Output( value=40000 * COIN, - header=0, - destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0xa03bcfaac6ebdc26bb9c256c51b08f9c1c6d4569f48710a42939168d1d7e5b6086b20e145e97158f6a0b5bff2994439d3320543c8ff382d1ab3e5eafffaf1a18') + destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0xa03bcfaac6ebdc26bb9c256c51b08f9c1c6d4569f48710a42939168d1d7e5b6086b20e145e97158f6a0b5bff2994439d3320543c8ff382d1ab3e5eafffaf1a18'), + data=None ), utxo.Output( value=9999 * COIN, - header=0, - destination=utxo.DestPubkey(charlie_stash.public_key) + destination=utxo.DestPubkey(charlie_stash.public_key), + data=None ), ] ).sign(charlie_stash, tx1.outputs) diff --git a/test/functional/feature_staking_less_than_minimum.py b/test/functional/feature_staking_less_than_minimum.py index a0e6ca7..e23d78f 100755 --- a/test/functional/feature_staking_less_than_minimum.py +++ b/test/functional/feature_staking_less_than_minimum.py @@ -77,8 +77,8 @@ def run_test(self): outputs=[ utxo.Output( value=50000 * COIN, - header=0, - destination=utxo.DestPubkey(charlie_stash.public_key) + destination=utxo.DestPubkey(charlie_stash.public_key), + data=None ), ] ).sign(alice, [utxos[0][1]]) @@ -92,13 +92,13 @@ def run_test(self): outputs=[ utxo.Output( value=4000 * COIN, - header=0, - destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0xa03bcfaac6ebdc26bb9c256c51b08f9c1c6d4569f48710a42939168d1d7e5b6086b20e145e97158f6a0b5bff2994439d3320543c8ff382d1ab3e5eafffaf1a18') + destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0xa03bcfaac6ebdc26bb9c256c51b08f9c1c6d4569f48710a42939168d1d7e5b6086b20e145e97158f6a0b5bff2994439d3320543c8ff382d1ab3e5eafffaf1a18'), + data=None ), utxo.Output( value=45999 * COIN, - header=0, - destination=utxo.DestPubkey(charlie_stash.public_key) + destination=utxo.DestPubkey(charlie_stash.public_key), + data=None ), ] ).sign(charlie_stash, tx1.outputs) From bd41c317a010205145d945e4d00bddc2ab763646 Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 8 Nov 2021 20:14:14 +0300 Subject: [PATCH 50/53] Fixed a few more functional tests Signed-off-by: sinitcin --- test/functional/example_test.py | 6 ++++++ test/functional/feature_smart_contract_test.py | 12 ++++++++++++ test/functional/feature_staking_extra.py | 6 ++++++ test/functional/feature_staking_first_time.py | 8 +++++++- 4 files changed, 31 insertions(+), 1 deletion(-) diff --git a/test/functional/example_test.py b/test/functional/example_test.py index 3c92f19..bb29f69 100755 --- a/test/functional/example_test.py +++ b/test/functional/example_test.py @@ -121,6 +121,12 @@ def run_test(self): destination=utxo.DestPubkey(alice.public_key), data=None ), + # This output prevent reward overflow + utxo.Output( + value=3981553255926290448385, # genesis amount - u64::MAX + destination=utxo.DestPubkey(alice.public_key), + data=None + ) ] ).sign(alice, [utxos[0][1]]) diff --git a/test/functional/feature_smart_contract_test.py b/test/functional/feature_smart_contract_test.py index fb714e5..0093e92 100755 --- a/test/functional/feature_smart_contract_test.py +++ b/test/functional/feature_smart_contract_test.py @@ -88,6 +88,12 @@ def run_test(self): ), data=None ), + # This output prevent reward overflow + utxo.Output( + value=3981553255926290448385, # genesis amount - u64::MAX + destination=utxo.DestPubkey(alice.public_key), + data=None + ) ] ).sign(alice, [initial_utxo[1]]) @@ -134,6 +140,12 @@ def run_test(self): ), data=None ), + # This output prevent reward overflow + utxo.Output( + value=3981553255926290448385, # genesis amount - u64::MAX + destination=utxo.DestPubkey(alice.public_key), + data=None + ) ] ).sign(alice, [tx0.outputs[0]], [0]) (ext_hash, blk_hash,_) = client.submit(alice, tx1) diff --git a/test/functional/feature_staking_extra.py b/test/functional/feature_staking_extra.py index ef393d8..ad45f0d 100755 --- a/test/functional/feature_staking_extra.py +++ b/test/functional/feature_staking_extra.py @@ -83,6 +83,12 @@ def run_test(self): destination=utxo.DestLockExtraForStaking(alice_stash.public_key, alice.public_key), data=None ), + # This output prevent reward overflow + utxo.Output( + value=3981553255926290448385, # genesis amount - u64::MAX + destination=utxo.DestPubkey(alice.public_key), + data=None + ) ] ).sign(alice_stash, [utxos[0][1]]) (_,_,events) = client.submit(alice_stash, tx1) diff --git a/test/functional/feature_staking_first_time.py b/test/functional/feature_staking_first_time.py index 60780ac..4fb62f1 100755 --- a/test/functional/feature_staking_first_time.py +++ b/test/functional/feature_staking_first_time.py @@ -80,6 +80,12 @@ def run_test(self): destination=utxo.DestPubkey(charlie_stash.public_key), data=None ), + # This output prevent reward overflow + utxo.Output( + value=3981553255926290448385 - 50000 * COIN, # genesis amount - u64::MAX + destination=utxo.DestPubkey(alice.public_key), + data=None + ) ] ).sign(alice, [utxos[0][1]]) client.submit(alice, tx1) @@ -101,7 +107,7 @@ def run_test(self): data=None ), ] - ).sign(charlie_stash, tx1.outputs) + ).sign(charlie_stash, [tx1.outputs[0]]) (_,_,events) = client.submit(charlie_stash, tx2) # there should already be 3 staking, adding Charlie in the list. From 4985d7bba922614566144c359c7b524cde7c20cf Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 8 Nov 2021 20:35:51 +0300 Subject: [PATCH 51/53] Fixed one more functional test Signed-off-by: sinitcin --- test/functional/feature_smart_contract_test.py | 8 +------- test/functional/feature_staking_first_time.py | 11 +++-------- 2 files changed, 4 insertions(+), 15 deletions(-) diff --git a/test/functional/feature_smart_contract_test.py b/test/functional/feature_smart_contract_test.py index 0093e92..d155788 100755 --- a/test/functional/feature_smart_contract_test.py +++ b/test/functional/feature_smart_contract_test.py @@ -90,7 +90,7 @@ def run_test(self): ), # This output prevent reward overflow utxo.Output( - value=3981553255926290448385, # genesis amount - u64::MAX + value=3981553255926290448385, # = genesis amount - u64::MAX destination=utxo.DestPubkey(alice.public_key), data=None ) @@ -140,12 +140,6 @@ def run_test(self): ), data=None ), - # This output prevent reward overflow - utxo.Output( - value=3981553255926290448385, # genesis amount - u64::MAX - destination=utxo.DestPubkey(alice.public_key), - data=None - ) ] ).sign(alice, [tx0.outputs[0]], [0]) (ext_hash, blk_hash,_) = client.submit(alice, tx1) diff --git a/test/functional/feature_staking_first_time.py b/test/functional/feature_staking_first_time.py index 4fb62f1..1fba839 100755 --- a/test/functional/feature_staking_first_time.py +++ b/test/functional/feature_staking_first_time.py @@ -76,16 +76,11 @@ def run_test(self): ], outputs=[ utxo.Output( - value=50000 * COIN, + #value=50000 * COIN, + value=3981553255926290448385, destination=utxo.DestPubkey(charlie_stash.public_key), data=None ), - # This output prevent reward overflow - utxo.Output( - value=3981553255926290448385 - 50000 * COIN, # genesis amount - u64::MAX - destination=utxo.DestPubkey(alice.public_key), - data=None - ) ] ).sign(alice, [utxos[0][1]]) client.submit(alice, tx1) @@ -107,7 +102,7 @@ def run_test(self): data=None ), ] - ).sign(charlie_stash, [tx1.outputs[0]]) + ).sign(charlie_stash, tx1.outputs) (_,_,events) = client.submit(charlie_stash, tx2) # there should already be 3 staking, adding Charlie in the list. From d7e2e7a3ebf3b0f2719f3a1157fcd2bc32bd2f4b Mon Sep 17 00:00:00 2001 From: sinitcin Date: Mon, 8 Nov 2021 21:36:54 +0300 Subject: [PATCH 52/53] Stuck on broken test feature_staking_first_time.py Signed-off-by: sinitcin --- pallets/utxo/src/staking_tests.rs | 63 ++++++++++++++++++- test/functional/feature_staking_first_time.py | 6 +- 2 files changed, 64 insertions(+), 5 deletions(-) diff --git a/pallets/utxo/src/staking_tests.rs b/pallets/utxo/src/staking_tests.rs index 9c7ffff..d05355f 100644 --- a/pallets/utxo/src/staking_tests.rs +++ b/pallets/utxo/src/staking_tests.rs @@ -17,12 +17,68 @@ use crate::{ mock::*, Destination, Error, LockedUtxos, StakingCount, Transaction, TransactionInput, - TransactionOutput, UtxoStore, + TransactionOutput, UtxoStore, MLT_UNIT, }; use codec::Encode; use frame_support::{assert_err, assert_ok, sp_io::crypto}; use sp_core::{sp_std::vec, testing::SR25519, H256}; +// JUST FOR SEEKING BUG IN FUNCTIONAL TEST +// todo: Remove this +#[test] +fn staking_first_time() { + let (mut test_ext, keys_and_hashes) = multiple_keys_test_ext(); + test_ext.execute_with(|| { + let (karl_pub_key, karl_genesis) = keys_and_hashes[1]; + let (alice_pub_key, _) = keys_and_hashes[0]; + let (greg_pub_key, _) = keys_and_hashes[2]; + + let utxo = UtxoStore::::get(karl_genesis).expect("tom's utxo does not exist"); + let tx1 = Transaction { + inputs: vec![TransactionInput::new_empty(karl_genesis)], + outputs: vec![TransactionOutput::new_pubkey(100, H256::from(alice_pub_key))], + time_lock: Default::default(), + } + .sign(&[utxo], 0, &karl_pub_key) + .expect("karl's pub key not found"); + let utxo = &tx1.outputs[0]; + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx1.clone())); + + let tx2 = Transaction { + inputs: vec![TransactionInput::new_empty(tx1.outpoint(0))], + outputs: vec![ + // KARL (index 1) wants to be a validator. He will use GREG (index 2) as the controller account. + // minimum value to stake is 10, + TransactionOutput::new_lock_for_staking( + 90, // 40000 * MLT_UNIT, + H256::from(greg_pub_key), + H256::from(greg_pub_key), + vec![2, 1], + ), + TransactionOutput::new_pubkey( + 10, /*9999 * MLT_UNIT*/ + H256::from(karl_pub_key), + ), + ], + time_lock: Default::default(), + } + .sign(&[utxo.clone()], 0, &alice_pub_key) + .expect("Alice's pub key not found"); + let new_utxo_hash = tx2.outpoint(1); + + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx2)); + assert!(UtxoStore::::contains_key(new_utxo_hash)); + assert!(StakingCount::::contains_key(H256::from(greg_pub_key))); + assert!(StakingCount::::contains_key(H256::from( + alice_pub_key + ))); + assert_eq!( + StakingCount::::get(H256::from(greg_pub_key)), + Some((1, 90)) + ); + }) +} + #[test] fn simple_staking() { let (mut test_ext, keys_and_hashes) = multiple_keys_test_ext(); @@ -132,8 +188,9 @@ fn non_mlt_staking() { TransactionOutput::new_pubkey(80, H256::from(karl_pub_key)), ], time_lock: Default::default(), - }.sign(&[utxo],0,&karl_pub_key) - .expect("karl's pub key not found"); + } + .sign(&[utxo], 0, &karl_pub_key) + .expect("karl's pub key not found"); assert_err!( Utxo::spend(Origin::signed(H256::zero()), tx), diff --git a/test/functional/feature_staking_first_time.py b/test/functional/feature_staking_first_time.py index 1fba839..0e39693 100755 --- a/test/functional/feature_staking_first_time.py +++ b/test/functional/feature_staking_first_time.py @@ -92,12 +92,14 @@ def run_test(self): ], outputs=[ utxo.Output( - value=40000 * COIN, + # value=40000 * COIN, + value=1, destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0xa03bcfaac6ebdc26bb9c256c51b08f9c1c6d4569f48710a42939168d1d7e5b6086b20e145e97158f6a0b5bff2994439d3320543c8ff382d1ab3e5eafffaf1a18'), data=None ), utxo.Output( - value=9999 * COIN, + # value=9999 * COIN, + value=1, destination=utxo.DestPubkey(charlie_stash.public_key), data=None ), From 4d258e657586cb17aa11b43efd39d1efb4c51d7e Mon Sep 17 00:00:00 2001 From: b-yap <2826165+b-yap@users.noreply.github.com> Date: Tue, 9 Nov 2021 14:27:07 +0800 Subject: [PATCH 53/53] rewards_staking: fix the issue of exceeding u64 value of transaction fee --- test/functional/feature_staking_first_time.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/test/functional/feature_staking_first_time.py b/test/functional/feature_staking_first_time.py index 0e39693..48aa722 100755 --- a/test/functional/feature_staking_first_time.py +++ b/test/functional/feature_staking_first_time.py @@ -76,11 +76,16 @@ def run_test(self): ], outputs=[ utxo.Output( - #value=50000 * COIN, - value=3981553255926290448385, + value=50000 * COIN, destination=utxo.DestPubkey(charlie_stash.public_key), data=None ), + utxo.Output( + value=39999949950 * COIN, + destination=utxo.DestPubkey(alice.public_key), + data=None + ), + ] ).sign(alice, [utxos[0][1]]) client.submit(alice, tx1) @@ -92,20 +97,18 @@ def run_test(self): ], outputs=[ utxo.Output( - # value=40000 * COIN, - value=1, + value=40000 * COIN, destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0xa03bcfaac6ebdc26bb9c256c51b08f9c1c6d4569f48710a42939168d1d7e5b6086b20e145e97158f6a0b5bff2994439d3320543c8ff382d1ab3e5eafffaf1a18'), data=None ), utxo.Output( - # value=9999 * COIN, - value=1, + value=9999 * COIN, destination=utxo.DestPubkey(charlie_stash.public_key), data=None ), ] - ).sign(charlie_stash, tx1.outputs) - (_,_,events) = client.submit(charlie_stash, tx2) + ).sign(charlie_stash, [tx1.outputs[0]]) + client.submit(charlie_stash, tx2) # there should already be 3 staking, adding Charlie in the list. assert_equal( len(list(client.staking_count())), 3 )