diff --git a/.gitignore b/.gitignore index 3fd92f9..678d120 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ # Intellij IDEA .idea/ *.iml +/customSpec.json # VSCode .vscode/ @@ -27,3 +28,6 @@ test/config.ini # direnv cache .direnv + +# Python compiled files +*.pyc diff --git a/Cargo.lock b/Cargo.lock index d227942..e84a3ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -396,6 +396,14 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5024ee8015f02155eee35c711107ddd9a9bf3cb689cf2a9089c97e79b6e1ae83" +[[package]] +name = "base58_nostd" +version = "0.1.0" +dependencies = [ + "frame-support", + "sp-std", +] + [[package]] name = "base64" version = "0.12.3" @@ -3400,7 +3408,6 @@ dependencies = [ "pallet-transaction-payment-rpc", "pallet-utxo-rpc", "pallet-utxo-rpc-runtime-api", - "pallet-utxo-tokens", "sc-authority-discovery", "sc-basic-authorship", "sc-cli", @@ -3683,7 +3690,6 @@ dependencies = [ "pallet-transaction-payment-rpc-runtime-api", "pallet-utxo", "pallet-utxo-rpc-runtime-api", - "pallet-utxo-tokens", "parity-scale-codec", "sp-api", "sp-block-builder", @@ -4170,6 +4176,7 @@ dependencies = [ name = "pallet-utxo" version = "0.1.0" dependencies = [ + "base58_nostd", "bech32", "chainscript", "frame-benchmarking", @@ -4179,10 +4186,10 @@ dependencies = [ "log", "pallet-authorship", "pallet-timestamp", - "pallet-utxo-tokens", "parity-scale-codec", "pp-api", "proptest", + "rand 0.4.6", "serde", "sp-core", "sp-keystore", @@ -4201,11 +4208,11 @@ dependencies = [ "jsonrpc-core-client", "jsonrpc-derive", "pallet-utxo-rpc-runtime-api", - "pallet-utxo-tokens", "parity-scale-codec", "serde", "sp-api", "sp-blockchain", + "sp-core", "sp-runtime", ] @@ -4214,25 +4221,14 @@ name = "pallet-utxo-rpc-runtime-api" version = "0.1.0" dependencies = [ "frame-support", - "pallet-utxo-tokens", "parity-scale-codec", "serde", "serde_json", "sp-api", + "sp-core", "sp-runtime", ] -[[package]] -name = "pallet-utxo-tokens" -version = "0.1.0" -dependencies = [ - "frame-support", - "hex-literal 0.2.1", - "log", - "parity-scale-codec", - "serde", -] - [[package]] name = "parity-db" version = "0.3.1" diff --git a/libs/base58_nostd/Cargo.toml b/libs/base58_nostd/Cargo.toml new file mode 100644 index 0000000..07dc268 --- /dev/null +++ b/libs/base58_nostd/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "base58_nostd" +version = "0.1.0" +edition = "2018" +authors = ["Anton Sinitsyn "] +description = "Encodes and decodes the Bech32 format" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies.frame-support] +default-features = false +git = 'https://github.com/paritytech/substrate.git' +version = '4.0.0-dev' +branch = "master" + +[dependencies.sp-std] +default-features = false +git = 'https://github.com/paritytech/substrate.git' +version = '4.0.0-dev' +branch = "master" + +[features] +default = ['std'] +testcontext = [] +std = [ + "sp-std/std", + "frame-support/std", +] diff --git a/libs/base58_nostd/src/lib.rs b/libs/base58_nostd/src/lib.rs new file mode 100644 index 0000000..d889d32 --- /dev/null +++ b/libs/base58_nostd/src/lib.rs @@ -0,0 +1,664 @@ +// Copyright (c) 2021 RBB S.r.l +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// Based on https://github.com/trezor/trezor-crypto/blob/master/base58.c +// commit hash: c6e7d37 +// license: MIT +// works only up to 128 bytes + +#![cfg_attr(not(feature = "std"), no_std)] + +use frame_support::sp_io::hashing::sha2_256; +use sp_std::vec; +use sp_std::vec::Vec; + +pub const TOKEN_ID_PREFIX: u8 = b"M"[0]; + +const BASE58_ALPHABET: &'static [u8] = + b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; + +const B58_BITCOIN_DIGITS_MAP: &'static [i8] = &[ + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, -1, -1, -1, -1, -1, -1, -1, 9, 10, 11, 12, 13, 14, 15, 16, -1, + 17, 18, 19, 20, 21, -1, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, -1, -1, -1, -1, -1, -1, 33, + 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, +]; + +/// Errors that can occur when decoding base58 encoded string. +#[derive(Debug, PartialEq)] +pub enum FromBase58Error { + /// The input contained a character which is not a part of the base58 format. + InvalidBase58Character(char, usize), + /// The input had invalid length. + InvalidBase58Length, + /// Base58 string contains invalid checksum + InvalidChecksum, + /// The input has invalid prefix. + InvalidPrefix, +} + +/// A trait for converting a value to base58 encoded string. +pub trait ToBase58 { + /// Converts a value of `self` to a base58 value, returning the owned string. + fn to_base58(&self) -> Vec; + /// Converts a value of `self` to a base58 value with checksum applied, returning the owned string. + fn to_mls_b58check(&self, prefix: Option>) -> Vec; +} + +/// A trait for converting base58 encoded values. +pub trait FromBase58 { + /// Convert a value of `self`, interpreted as base58 encoded data, into an owned vector of bytes, returning a vector. + fn from_base58(&self) -> Result, FromBase58Error>; + /// Converts a value of `self`, interpreted as base58 encoded data with checksum applied, into an owned vector of bytes, + /// returning a vector. + fn from_mls_b58check(&self, prefix: Option>) -> Result, FromBase58Error>; +} + +fn checksum(payload: &[u8]) -> Vec { + let sha256 = sha2_256(payload); + let doubled_sha256 = sha2_256(&sha256); + // Return the first 4 bytes of sha256(sha256(payload)) + Vec::from(&doubled_sha256[..4]) +} + +fn encode_to_base58(payload: &[u8]) -> Vec { + let zcount = payload.iter().take_while(|x| **x == 0).count(); + let size = (payload.len() - zcount) * 138 / 100 + 1; + let mut buffer = vec![0u8; size]; + let mut i = zcount; + let mut high = size - 1; + while i < payload.len() { + let mut carry = payload[i] as u32; + let mut j = size - 1; + + while j > high || carry != 0 { + carry += 256 * buffer[j] as u32; + buffer[j] = (carry % 58) as u8; + carry /= 58; + if j > 0 { + j -= 1; + } + } + i += 1; + high = j; + } + let mut j = buffer.iter().take_while(|x| **x == 0).count(); + let mut result = Vec::new(); + for _ in 0..zcount { + result.push(b'1'); + } + while j < size { + result.push(BASE58_ALPHABET[buffer[j] as usize]); + j += 1; + } + result +} + +fn decode_from_base58(payload: &str) -> Result, FromBase58Error> { + let mut bin = [0u8; 132]; + let mut out = [0u32; (132 + 3) / 4]; + let bytesleft = (bin.len() % 4) as u8; + let zeromask = match bytesleft { + 0 => 0u32, + _ => 0xffffffff << (bytesleft * 8), + }; + + let zcount = payload.chars().take_while(|x| *x == '1').count(); + let mut i = zcount; + let b58: Vec = payload.bytes().collect(); + + while i < payload.len() { + if (b58[i] & 0x80) != 0 { + // High-bit set on invalid digit + return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); + } + + if B58_BITCOIN_DIGITS_MAP[b58[i] as usize] == -1 { + // // Invalid base58 digit + return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); + } + + let mut c = B58_BITCOIN_DIGITS_MAP[b58[i] as usize] as u64; + let mut j = out.len(); + while j != 0 { + j -= 1; + let t = out[j] as u64 * 58 + c; + c = (t & 0x3f00000000) >> 32; + out[j] = (t & 0xffffffff) as u32; + } + + if c != 0 { + // Output number too big (carry to the next int32) + return Err(FromBase58Error::InvalidBase58Length); + } + + if (out[0] & zeromask) != 0 { + // Output number too big (last int32 filled too far) + return Err(FromBase58Error::InvalidBase58Length); + } + + i += 1; + } + + let mut i = 1; + let mut j = 0; + + bin[0] = match bytesleft { + 3 => ((out[0] & 0xff0000) >> 16) as u8, + 2 => ((out[0] & 0xff00) >> 8) as u8, + 1 => { + j = 1; + (out[0] & 0xff) as u8 + } + _ => { + i = 0; + bin[0] + } + }; + + while j < out.len() { + bin[i] = ((out[j] >> 0x18) & 0xff) as u8; + bin[i + 1] = ((out[j] >> 0x10) & 0xff) as u8; + bin[i + 2] = ((out[j] >> 8) & 0xff) as u8; + bin[i + 3] = ((out[j] >> 0) & 0xff) as u8; + i += 4; + j += 1; + } + + let leading_zeros = bin.iter().take_while(|x| **x == 0).count(); + Ok(bin[leading_zeros - zcount..].to_vec()) +} + +impl FromBase58 for str { + fn from_base58(&self) -> Result, FromBase58Error> { + decode_from_base58(self) + } + + fn from_mls_b58check(&self, prefix: Option>) -> Result, FromBase58Error> { + let mut payload: Vec = self.from_base58()?; + if payload.len() < 5 { + return Err(FromBase58Error::InvalidChecksum); + } + let checksum_index = payload.len() - 4; + let provided_checksum = payload.split_off(checksum_index); + let checksum = checksum(&payload).to_vec(); + if checksum != provided_checksum { + return Err(FromBase58Error::InvalidChecksum); + } + if let Some(ref prefix) = prefix { + let payload_prefix = payload[..prefix.len()].to_vec(); + // Let's check is it right prefix or not + if &payload_prefix != prefix { + Err(FromBase58Error::InvalidPrefix)?; + } + } + match prefix { + Some(prefix) => Ok(payload[prefix.len()..].to_vec()), + None => Ok(payload), + } + } +} + +impl ToBase58 for [u8] { + fn to_base58(&self) -> Vec { + encode_to_base58(self) + } + + fn to_mls_b58check(&self, prefix: Option>) -> Vec { + let mut payload = match prefix { + Some(prefix) => prefix.clone(), + None => vec![], + }; + // let mut payload = vec![prefix]; + payload.extend(self); + payload.extend(checksum(payload.as_slice())); + encode_to_base58(payload.as_slice()) + } +} + +#[cfg(test)] +mod tests { + use super::{FromBase58, FromBase58Error, ToBase58, TOKEN_ID_PREFIX}; + + #[test] + fn test_from_base58_basic() { + assert_eq!("".from_base58().unwrap(), b""); + assert_eq!("Z".from_base58().unwrap(), &[32]); + assert_eq!("n".from_base58().unwrap(), &[45]); + assert_eq!("q".from_base58().unwrap(), &[48]); + assert_eq!("r".from_base58().unwrap(), &[49]); + assert_eq!("z".from_base58().unwrap(), &[57]); + assert_eq!("4SU".from_base58().unwrap(), &[45, 49]); + assert_eq!("4k8".from_base58().unwrap(), &[49, 49]); + assert_eq!("ZiCa".from_base58().unwrap(), &[97, 98, 99]); + assert_eq!("3mJr7AoUXx2Wqd".from_base58().unwrap(), b"1234598760"); + assert_eq!( + "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f".from_base58().unwrap(), + b"abcdefghijklmnopqrstuvwxyz" + ); + } + + #[test] + fn test_from_base58_invalid_char() { + assert!("0".from_base58().is_err()); + assert!("O".from_base58().is_err()); + assert!("I".from_base58().is_err()); + assert!("l".from_base58().is_err()); + assert!("3mJr0".from_base58().is_err()); + assert!("O3yxU".from_base58().is_err()); + assert!("3sNI".from_base58().is_err()); + assert!("4kl8".from_base58().is_err()); + assert!("s!5<".from_base58().is_err()); + assert!("t$@mX<*".from_base58().is_err()); + } + + #[test] + fn test_from_base58_initial_zeros() { + assert_eq!("1ZiCa".from_base58().unwrap(), b"\0abc"); + assert_eq!("11ZiCa".from_base58().unwrap(), b"\0\0abc"); + assert_eq!("111ZiCa".from_base58().unwrap(), b"\0\0\0abc"); + assert_eq!("1111ZiCa".from_base58().unwrap(), b"\0\0\0\0abc"); + } + + #[test] + fn test_to_base58_basic() { + assert_eq!(b"".to_base58(), "".as_bytes()); + assert_eq!(&[32].to_base58(), "Z".as_bytes()); + assert_eq!(&[45].to_base58(), "n".as_bytes()); + assert_eq!(&[48].to_base58(), "q".as_bytes()); + assert_eq!(&[49].to_base58(), "r".as_bytes()); + assert_eq!(&[57].to_base58(), "z".as_bytes()); + assert_eq!(&[45, 49].to_base58(), "4SU".as_bytes()); + assert_eq!(&[49, 49].to_base58(), "4k8".as_bytes()); + assert_eq!(b"abc".to_base58(), "ZiCa".as_bytes()); + assert_eq!(b"1234598760".to_base58(), "3mJr7AoUXx2Wqd".as_bytes()); + assert_eq!( + b"abcdefghijklmnopqrstuvwxyz".to_base58(), + "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f".as_bytes() + ); + } + + #[test] + fn test_to_base58_initial_zeros() { + assert_eq!(b"\0abc".to_base58(), "1ZiCa".as_bytes()); + assert_eq!(b"\0\0abc".to_base58(), "11ZiCa".as_bytes()); + assert_eq!(b"\0\0\0abc".to_base58(), "111ZiCa".as_bytes()); + assert_eq!(b"\0\0\0\0abc".to_base58(), "1111ZiCa".as_bytes()); + } + + #[test] + fn test_from_base58_compatible_functional_tests() { + // The data was being prepared in python script + + assert_eq!( + "2QjRKB7mHaXRjhUmgcQGAbDHPre2Uvq9ev4YiiFgLoUPrQdB52MuHoRwmB" + .from_base58() + .unwrap(), + b"To be, or not to be, that is the question:" + ); + + assert_eq!( + "LApxNT84PpjfwjYZyDdhQTNAuEp28SssymbKcj68fEc7wLh2qpkpXAuf" + .from_base58() + .unwrap(), + b"Whether 'tis nobler in the mind to suffer" + ); + + assert_eq!( + "USm3fpdSjgtutT9UNHZgsaR4UBcHmgYfxcaVubFjhj9Tio5Nfq9XNV5puD7H" + .from_base58() + .unwrap(), + b"The slings and arrows of outrageous fortune," + ); + + assert_eq!( + "JRYvHV9zVEFpwLXQLjTs8VhnP1nPiBZUFdHA5into6ntyEPsLwpnR8Vp" + .from_base58() + .unwrap(), + b"Or to take arms against a sea of troubles" + ); + + assert_eq!( + "26LXuFRSRgp2fUf8QhNjeEHjniK599smzB7pJsqf1XpLS9bkgd4d7gM9UX" + .from_base58() + .unwrap(), + b"And by opposing end them. To die-to sleep," + ); + + assert_eq!( + "3dU1LpdBTnUsha3T3cGiEUZPTtzRfLhCA83k22CMvbzKV9oMb87".from_base58().unwrap(), + b"No more; and by a sleep to say we end" + ); + + assert_eq!( + "ADeyMxyacx916HoiijiCJRMqdjtWULxSE2eSz1t11rQbLSvVbhv6cCiwqKFAQav" + .from_base58() + .unwrap(), + b"The heart-ache and the thousand natural shocks" + ); + + assert_eq!( + "2QhwWNuP7oGHaHRjydcvqxLC31wKkZ12MWFBoXpe1wLJ15z6vSRuqUdNYd" + .from_base58() + .unwrap(), + b"That flesh is heir to: 'tis a consummation" + ); + + assert_eq!( + "4Q7Mny7G48TgtAU6u3eqhT7FDqALB7LZ466AThn4G9jv7BBhx9pXbJz".from_base58().unwrap(), + b"Devoutly to be wish'd. To die, to sleep;" + ); + + assert_eq!( + "Efu1HHgBffNXqXSgamBAvVNBN28JgEtp2QBqZsTRvbn44DQFEL2YfVYnFrPAdBcEz25" + .from_base58() + .unwrap(), + b"To sleep, perchance to dream-ay, there's the rub:" + ); + } + + #[test] + fn test_to_base58_compatible_functional_tests() { + // The data was being prepared in python script + + assert_eq!( + b"To be, or not to be, that is the question:".to_base58(), + "2QjRKB7mHaXRjhUmgcQGAbDHPre2Uvq9ev4YiiFgLoUPrQdB52MuHoRwmB".as_bytes() + ); + + assert_eq!( + b"Whether 'tis nobler in the mind to suffer".to_base58(), + "LApxNT84PpjfwjYZyDdhQTNAuEp28SssymbKcj68fEc7wLh2qpkpXAuf".as_bytes() + ); + + assert_eq!( + b"The slings and arrows of outrageous fortune,".to_base58(), + "USm3fpdSjgtutT9UNHZgsaR4UBcHmgYfxcaVubFjhj9Tio5Nfq9XNV5puD7H".as_bytes() + ); + + assert_eq!( + b"Or to take arms against a sea of troubles".to_base58(), + "JRYvHV9zVEFpwLXQLjTs8VhnP1nPiBZUFdHA5into6ntyEPsLwpnR8Vp".as_bytes() + ); + + assert_eq!( + b"And by opposing end them. To die-to sleep,".to_base58(), + "26LXuFRSRgp2fUf8QhNjeEHjniK599smzB7pJsqf1XpLS9bkgd4d7gM9UX".as_bytes() + ); + + assert_eq!( + b"No more; and by a sleep to say we end".to_base58(), + "3dU1LpdBTnUsha3T3cGiEUZPTtzRfLhCA83k22CMvbzKV9oMb87".as_bytes() + ); + + assert_eq!( + b"The heart-ache and the thousand natural shocks".to_base58(), + "ADeyMxyacx916HoiijiCJRMqdjtWULxSE2eSz1t11rQbLSvVbhv6cCiwqKFAQav".as_bytes() + ); + + assert_eq!( + b"That flesh is heir to: 'tis a consummation".to_base58(), + "2QhwWNuP7oGHaHRjydcvqxLC31wKkZ12MWFBoXpe1wLJ15z6vSRuqUdNYd".as_bytes() + ); + + assert_eq!( + b"Devoutly to be wish'd. To die, to sleep;".to_base58(), + "4Q7Mny7G48TgtAU6u3eqhT7FDqALB7LZ466AThn4G9jv7BBhx9pXbJz".as_bytes() + ); + + assert_eq!( + b"To sleep, perchance to dream-ay, there's the rub:".to_base58(), + "Efu1HHgBffNXqXSgamBAvVNBN28JgEtp2QBqZsTRvbn44DQFEL2YfVYnFrPAdBcEz25".as_bytes() + ); + } + + #[test] + fn to_base58check() { + assert_eq!( + b"SOME_TOKEN_ID".to_mls_b58check(Some(vec![TOKEN_ID_PREFIX])), + "4D27mSFWbKGNea2eGBpjuCbEy".as_bytes() + ); + + // Took from js library: + // https://github.com/wzbg/base58check/blob/master/test.js + + assert_eq!( + [ + 0xf5, 0xf2, 0xd6, 0x24, 0xcf, 0xb5, 0xc3, 0xf6, 0x6d, 0x06, 0x12, 0x3d, 0x08, 0x29, + 0xd1, 0xc9, 0xce, 0xbf, 0x77, 0x0e + ] + .to_mls_b58check(Some(vec![0])), + "1PRTTaJesdNovgne6Ehcdu1fpEdX7913CK".as_bytes() + ); + + assert_eq!( + [ + 0x1E, 0x99, 0x42, 0x3A, 0x4E, 0xD2, 0x76, 0x08, 0xA1, 0x5A, 0x26, 0x16, 0xA2, 0xB0, + 0xE9, 0xE5, 0x2C, 0xED, 0x33, 0x0A, 0xC5, 0x30, 0xED, 0xCC, 0x32, 0xC8, 0xFF, 0xC6, + 0xA5, 0x26, 0xAE, 0xDD, + ] + .to_mls_b58check(Some(vec![0x80])), + "5J3mBbAH58CpQ3Y5RNJpUKPE62SQ5tfcvU2JpbnkeyhfsYB1Jcn".as_bytes() + ); + + assert_eq!( + [ + 0x27, 0xb5, 0x89, 0x1b, 0x01, 0xda, 0x2d, 0xb7, 0x4c, 0xde, 0x16, 0x89, 0xa9, 0x7a, + 0x2a, 0xcb, 0xe2, 0x3d, 0x5f, 0xb1 + ] + .to_mls_b58check(Some(vec![0])), + "14cxpo3MBCYYWCgF74SWTdcmxipnGUsPw3".as_bytes() + ); + + assert_eq!( + [ + 0x3a, 0xba, 0x41, 0x62, 0xc7, 0x25, 0x1c, 0x89, 0x12, 0x07, 0xb7, 0x47, 0x84, 0x05, + 0x51, 0xa7, 0x19, 0x39, 0xb0, 0xde, 0x08, 0x1f, 0x85, 0xc4, 0xe4, 0x4c, 0xf7, 0xc1, + 0x3e, 0x41, 0xda, 0xa6 + ] + .to_mls_b58check(Some(vec![0x80])), + "5JG9hT3beGTJuUAmCQEmNaxAuMacCTfXuw1R3FCXig23RQHMr4K".as_bytes() + ); + + assert_eq!( + [ + 0x08, 0x6e, 0xaa, 0x67, 0x78, 0x95, 0xf9, 0x2d, 0x4a, 0x6c, 0x5e, 0xf7, 0x40, 0xc1, + 0x68, 0x93, 0x2b, 0x5e, 0x3f, 0x44 + ] + .to_mls_b58check(Some(vec![0])), + "1mayif3H2JDC62S4N3rLNtBNRAiUUP99k".as_bytes() + ); + + assert_eq!( + [ + 0xed, 0xdb, 0xdc, 0x11, 0x68, 0xf1, 0xda, 0xea, 0xdb, 0xd3, 0xe4, 0x4c, 0x1e, 0x3f, + 0x8f, 0x5a, 0x28, 0x4c, 0x20, 0x29, 0xf7, 0x8a, 0xd2, 0x6a, 0xf9, 0x85, 0x83, 0xa4, + 0x99, 0xde, 0x5b, 0x19 + ] + .to_mls_b58check(Some(vec![0x80])), + "5Kd3NBUAdUnhyzenEwVLy9pBKxSwXvE9FMPyR4UKZvpe6E3AgLr".as_bytes() + ); + } + + #[test] + fn from_base58check() { + assert_eq!( + "3vQB7B6MrGQZaxCuFg4oh".from_mls_b58check(None).unwrap(), + b"hello world".to_vec() + ); + + // Took from js library: + // https://github.com/wzbg/base58check/blob/master/test.js + + assert_eq!( + "1PRTTaJesdNovgne6Ehcdu1fpEdX7913CK".from_mls_b58check(Some(vec![0])).unwrap(), + vec![ + 0xf5, 0xf2, 0xd6, 0x24, 0xcf, 0xb5, 0xc3, 0xf6, 0x6d, 0x06, 0x12, 0x3d, 0x08, 0x29, + 0xd1, 0xc9, 0xce, 0xbf, 0x77, 0x0e + ] + ); + + assert_eq!( + "5J3mBbAH58CpQ3Y5RNJpUKPE62SQ5tfcvU2JpbnkeyhfsYB1Jcn" + .from_mls_b58check(Some(vec![0x80])) + .unwrap(), + vec![ + 0x1E, 0x99, 0x42, 0x3A, 0x4E, 0xD2, 0x76, 0x08, 0xA1, 0x5A, 0x26, 0x16, 0xA2, 0xB0, + 0xE9, 0xE5, 0x2C, 0xED, 0x33, 0x0A, 0xC5, 0x30, 0xED, 0xCC, 0x32, 0xC8, 0xFF, 0xC6, + 0xA5, 0x26, 0xAE, 0xDD, + ] + ); + + assert_eq!( + "14cxpo3MBCYYWCgF74SWTdcmxipnGUsPw3".from_mls_b58check(Some(vec![0])).unwrap(), + vec![ + 0x27, 0xb5, 0x89, 0x1b, 0x01, 0xda, 0x2d, 0xb7, 0x4c, 0xde, 0x16, 0x89, 0xa9, 0x7a, + 0x2a, 0xcb, 0xe2, 0x3d, 0x5f, 0xb1 + ] + ); + + assert_eq!( + "5JG9hT3beGTJuUAmCQEmNaxAuMacCTfXuw1R3FCXig23RQHMr4K" + .from_mls_b58check(Some(vec![0x80])) + .unwrap(), + vec![ + 0x3a, 0xba, 0x41, 0x62, 0xc7, 0x25, 0x1c, 0x89, 0x12, 0x07, 0xb7, 0x47, 0x84, 0x05, + 0x51, 0xa7, 0x19, 0x39, 0xb0, 0xde, 0x08, 0x1f, 0x85, 0xc4, 0xe4, 0x4c, 0xf7, 0xc1, + 0x3e, 0x41, 0xda, 0xa6 + ] + ); + + assert_eq!( + "1mayif3H2JDC62S4N3rLNtBNRAiUUP99k".from_mls_b58check(Some(vec![0])).unwrap(), + vec![ + 0x08, 0x6e, 0xaa, 0x67, 0x78, 0x95, 0xf9, 0x2d, 0x4a, 0x6c, 0x5e, 0xf7, 0x40, 0xc1, + 0x68, 0x93, 0x2b, 0x5e, 0x3f, 0x44 + ] + ); + + assert_eq!( + "5Kd3NBUAdUnhyzenEwVLy9pBKxSwXvE9FMPyR4UKZvpe6E3AgLr" + .from_mls_b58check(Some(vec![0x80])) + .unwrap(), + vec![ + 0xed, 0xdb, 0xdc, 0x11, 0x68, 0xf1, 0xda, 0xea, 0xdb, 0xd3, 0xe4, 0x4c, 0x1e, 0x3f, + 0x8f, 0x5a, 0x28, 0x4c, 0x20, 0x29, 0xf7, 0x8a, 0xd2, 0x6a, 0xf9, 0x85, 0x83, 0xa4, + 0x99, 0xde, 0x5b, 0x19 + ] + ); + } + + #[test] + fn from_base58check_with_invalid_checksum() { + assert_eq!( + "j8YiVRUK8wrJ2wzLH7W6221".from_mls_b58check(Some(vec![TOKEN_ID_PREFIX])), + Err(FromBase58Error::InvalidChecksum) + ); + + assert_eq!( + "1PRTTaJesdNovgne6Ehcdu1fpEdX7913C1".from_mls_b58check(Some(vec![0])), + Err(FromBase58Error::InvalidChecksum) + ); + + assert_eq!( + "5J3mBbAH58CpQ3Y5RNJpUKPE62SQ5tfcvU2JpbnkeyhfsYB1Jc9" + .from_mls_b58check(Some(vec![0x80])), + Err(FromBase58Error::InvalidChecksum) + ); + + assert_eq!( + "14cxpo3MBCYYWCgF74SWTdcmxipnGUs153".from_mls_b58check(Some(vec![0])), + Err(FromBase58Error::InvalidChecksum) + ); + assert_eq!( + "5JG9hT3beGTJuUAmCQEmNaxAuMacCTfXuw1R3FCXig23RQH1234" + .from_mls_b58check(Some(vec![0x80])), + Err(FromBase58Error::InvalidChecksum) + ); + + assert_eq!( + "1mayif3H2JDC62S4N3rLNtBNRAiUUchek".from_mls_b58check(Some(vec![0])), + Err(FromBase58Error::InvalidChecksum) + ); + assert_eq!( + "5Kd3NBUAdUnhyzenEwVLy9pBKxSwXvE9FMPyR4UKZvpe6E3kehc" + .from_mls_b58check(Some(vec![0x80])), + Err(FromBase58Error::InvalidChecksum) + ); + } + + #[test] + #[should_panic] + fn from_base58check_with_invalid_length() { + "Wh4bh".from_mls_b58check(Some(vec![TOKEN_ID_PREFIX])).unwrap(); + } + + #[test] + fn base58check_loop() { + // Using encoding and decoding for 5 times because during these operations the buffer is growing. + // If we want to have more loops we have to check is it working with more than 128 bytes or not. + + let text = "To be, or not to be"; + + let mut buffer = text; + let mut enc; + // encode + for _ in 0..5 { + enc = buffer.as_bytes().to_mls_b58check(Some(vec![TOKEN_ID_PREFIX])); + buffer = sp_std::str::from_utf8(enc.as_slice()).unwrap(); + } + // decode back + let mut dec; + for _ in 0..5 { + dec = buffer.from_mls_b58check(Some(vec![TOKEN_ID_PREFIX])).unwrap(); + buffer = sp_std::str::from_utf8(dec.as_slice()).unwrap(); + } + assert_eq!(buffer, text); + } + + #[test] + fn base58check_bitcoin_test() { + // Took from bitcoin: + // https://github.com/bitcoin/bitcoin/blob/master/src/test/base58_tests.cpp + assert_eq!( + "3vQB7B6MrGQZaxCuFg4oh".from_mls_b58check(None).unwrap(), + b"hello world".to_vec() + ); + assert_eq!( + "3vQB7B6MrGQZaxCuFg4oi".from_mls_b58check(None), + Err(FromBase58Error::InvalidChecksum) + ); + assert_eq!( + "3vQB7B6MrGQZaxCuFg4oh0IOl".from_mls_b58check(None), + Err(FromBase58Error::InvalidBase58Character('0', 21)) + ); + assert_eq!( + "3vQB7B6MrGQZaxCuFg4oh\0".from_mls_b58check(None), + Err(FromBase58Error::InvalidBase58Character('\0', 21)) + ); + } +} diff --git a/libs/chainscript/Cargo.toml b/libs/chainscript/Cargo.toml index c49c2e5..7dd8c78 100644 --- a/libs/chainscript/Cargo.toml +++ b/libs/chainscript/Cargo.toml @@ -1,5 +1,5 @@ [package] -authors = ['Lukas Kuklinek '] +authors = ['Lukas Kuklinek '] description = 'An interpreter for bitcoin script and its dialects' edition = '2018' name = 'chainscript' diff --git a/node/Cargo.toml b/node/Cargo.toml index 977008b..0988d79 100644 --- a/node/Cargo.toml +++ b/node/Cargo.toml @@ -24,7 +24,6 @@ structopt = '0.3.8' node-template-runtime = {version = '3.0.0', path = '../runtime'} pallet-utxo-rpc = { path = "../pallets/utxo/rpc" } pallet-utxo-rpc-runtime-api = { path = "../pallets/utxo/rpc/runtime-api" } -pallet-utxo-tokens = { path = "../pallets/utxo/tokens" } log = "0.4.8" ureq = "2.2.0" diff --git a/node/src/chain_spec.rs b/node/src/chain_spec.rs index afdffd2..857f7ba 100644 --- a/node/src/chain_spec.rs +++ b/node/src/chain_spec.rs @@ -25,7 +25,7 @@ pub struct MltKeysInfo { pub sr25519_public_controller: sr25519::Public, pub sr25519_public_stash: sr25519::Public, pub ed25519_public: sp_core::ed25519::Public, - pub mlt_tokens: pallet_utxo::Value, + pub mlt_tokens: pallet_utxo::tokens::Value, } impl MltKeysInfo { diff --git a/node/src/command.rs b/node/src/command.rs index 542af4c..76d2c0b 100644 --- a/node/src/command.rs +++ b/node/src/command.rs @@ -70,7 +70,7 @@ struct MltKeysFromFile { } impl MltKeysFromFile { - fn into_mlt_keys_info(self, mlt_tokens: pallet_utxo::Value) -> MltKeysInfo { + fn into_mlt_keys_info(self, mlt_tokens: pallet_utxo::tokens::Value) -> MltKeysInfo { MltKeysInfo { name: self.name, sr25519_public_controller: sr25519::Public::from_h256(self.sr25519_public_controller), @@ -99,7 +99,7 @@ pub fn fetch_keys(auth_keys_url: &'static str) -> Result, Strin let users = users["users"].as_array().ok_or("invalid json to extract user list")?; let share_per_user = TEST_NET_MLT_ORIG_SUPPLY - .checked_div(users.len() as pallet_utxo::Value) + .checked_div(users.len() as pallet_utxo::tokens::Value) .ok_or("unable to share mlt orig supply evenly.")?; for user in users { diff --git a/pallets/utxo/Cargo.toml b/pallets/utxo/Cargo.toml index 8a0a0a8..6b1d6b6 100644 --- a/pallets/utxo/Cargo.toml +++ b/pallets/utxo/Cargo.toml @@ -22,14 +22,21 @@ std = [ hex-literal = "0.2.1" log = "0.4.8" serde = '1.0.119' -pallet-utxo-tokens = { path = "./tokens" } variant_count = '1.1' +[dev-dependencies] +rand = "0.4" + [dependencies.bech32] default-features = false path = '../../libs/bech32' version = '0.8.1' +[dependencies.base58_nostd] +default-features = false +path = '../../libs/base58_nostd' +version = '0.1.0' + [dependencies.chainscript] default-features = false path = '../../libs/chainscript' @@ -37,7 +44,7 @@ version = '0.1.0' [dependencies.codec] default-features = false -features = ['derive'] +features = ["derive", "chain-error"] package = 'parity-scale-codec' version = '2.0.0' diff --git a/pallets/utxo/README.md b/pallets/utxo/README.md index e679b3e..b98bce9 100644 --- a/pallets/utxo/README.md +++ b/pallets/utxo/README.md @@ -10,15 +10,15 @@ To run the test cases, just run command `cargo test`. 1. After running the core, declare the custom datatypes. GO to **Settings** > **Developer** tab and paste in the ff. JSON and then save: ```json { - "Value": "u128", - "Destination": { - "_enum": { - "Pubkey": "Pubkey", - "CreatePP": "DestinationCreatePP", - "CallPP": "DestinationCallPP", - "ScriptHash": "H256", - "LockForStaking": "DestinationStake", - "LockExtraForStaking": "DestinationStakeExtra" + "Value": "u128", + "Destination": { + "_enum": { + "Pubkey": "Pubkey", + "CreatePP": "DestinationCreatePP", + "CallPP": "DestinationCallPP", + "ScriptHash": "H256", + "LockForStaking": "DestinationStake", + "LockExtraForStaking": "DestinationStakeExtra" } }, "DestinationStake": { @@ -30,41 +30,60 @@ To run the test cases, just run command `cargo test`. "stash_account": "AccountId", "controller_account": "AccountId" }, - "DestinationCreatePP": { - "code": "Vec", - "data": "Vec" - }, - "DestinationCallPP": { - "dest_account": "AccountId", - "input_data": "Vec" - }, - "TransactionInput": { - "outpoint": "Hash", - "lock": "Vec", - "witness": "Vec" - }, - "TransactionOutput": { - "value": "Value", - "header": "TXOutputHeader", - "destination": "Destination" - }, - "TransactionOutputFor": "TransactionOutput", - "Transaction": { - "inputs": "Vec", - "outputs": "Vec", - "time_lock": "Compact" - }, - "TransactionFor": "Transaction", - "Address": "MultiAddress", - "LookupSource": "MultiAddress", - "TXOutputHeader": "u128", - "Difficulty": "U256", - "DifficultyAndTimestamp": { - "difficulty": "Difficulty", - "timestamp": "Moment" - }, - "Pubkey": "H256", - "Public": "H256" + "DestinationCreatePP": { + "code": "Vec", + "data": "Vec" + }, + "DestinationCallPP": { + "dest_account": "AccountId", + "input_data": "Vec" + }, + "TransactionInput": { + "outpoint": "Hash", + "lock": "Vec", + "witness": "Vec" + }, + "TokenId": { + "inner": "H160" + }, + "TokenTransferV1": { + "token_id": "TokenId", + "amount": "Value" + }, + "TokenIssuanceV1": { + "token_ticker": "String", + "amount_to_issue": "Value", + "number_of_decimals": "u8", + "metadata_uri": "String" + }, + "OutputData": { + "_enum": { + "TokenTransferV1": "TokenTransferV1", + "TokenIssuanceV1": "TokenIssuanceV1" + } + }, + "TransactionOutput": { + "value": "Value", + "destination": "Destination", + "data": "Option" + }, + "TransactionOutputFor": "TransactionOutput", + "Transaction": { + "inputs": "Vec", + "outputs": "Vec", + "time_lock": "Compact" + }, + "TransactionFor": "Transaction", + "Address": "MultiAddress", + "LookupSource": "MultiAddress", + "Difficulty": "U256", + "DifficultyAndTimestamp": { + "difficulty": "Difficulty", + "timestamp": "Moment" + }, + "Pubkey": "H256", + "Public": "H256", + "String": "Vec" } ``` 2. To confirm that Alice already has UTXO at genesis, go to **Developer** > **Chain state** > **Storage**. @@ -78,7 +97,7 @@ Click the **+** button on the right. It should show: header: 0 } ``` -3. Let's spend 50 of Alice's utxo to Bob. Go to **Developer** > **Extrinsics**. +3. Let's spend 50 of AlicFe's utxo to Bob. Go to **Developer** > **Extrinsics**. Choose `utxo` for _submit the following extrinsic_ dropdown. Input the following parameters (and then submit transaction): * outpoint: `0xe9ea4ce6bf71396302db8d08e7924b5be6a5b0913798bd38741c6c6e9811e864` diff --git a/pallets/utxo/rpc/Cargo.toml b/pallets/utxo/rpc/Cargo.toml index d609b1e..21bc854 100644 --- a/pallets/utxo/rpc/Cargo.toml +++ b/pallets/utxo/rpc/Cargo.toml @@ -6,7 +6,6 @@ edition = "2018" [dependencies] pallet-utxo-rpc-runtime-api = { path = "./runtime-api" } -pallet-utxo-tokens = {path = "../tokens"} jsonrpc-core = "18.0.0" jsonrpc-core-client = "18.0.0" jsonrpc-derive = "18.0.0" @@ -44,3 +43,9 @@ default-features = false git = 'https://github.com/paritytech/substrate.git' version = '4.0.0-dev' branch = "master" + +[dependencies.sp-core] +default-features = false +git = 'https://github.com/paritytech/substrate.git' +version = '4.0.0-dev' +branch = "master" \ No newline at end of file diff --git a/pallets/utxo/rpc/runtime-api/Cargo.toml b/pallets/utxo/rpc/runtime-api/Cargo.toml index b7c739e..02587b7 100644 --- a/pallets/utxo/rpc/runtime-api/Cargo.toml +++ b/pallets/utxo/rpc/runtime-api/Cargo.toml @@ -4,9 +4,6 @@ version = "0.1.0" authors = ["RBB Lab"] edition = "2018" -[dependencies] -pallet-utxo-tokens = { path = "../../tokens" } - [dependencies.serde] version = "1.0.104" optional = true @@ -36,6 +33,12 @@ git = 'https://github.com/paritytech/substrate.git' version = '4.0.0-dev' branch = "master" +[dependencies.sp-core] +default-features = false +git = 'https://github.com/paritytech/substrate.git' +version = '4.0.0-dev' +branch = "master" + [dev-dependencies] serde_json = "1.0.48" diff --git a/pallets/utxo/rpc/runtime-api/src/lib.rs b/pallets/utxo/rpc/runtime-api/src/lib.rs index ebaa307..40b77ab 100644 --- a/pallets/utxo/rpc/runtime-api/src/lib.rs +++ b/pallets/utxo/rpc/runtime-api/src/lib.rs @@ -16,15 +16,8 @@ // Author(s): A. Altonen, Anton Sinitsyn #![cfg_attr(not(feature = "std"), no_std)] -use frame_support::inherent::Vec; - sp_api::decl_runtime_apis! { pub trait UtxoApi { fn send() -> u32; - // What means Vec<(u64, Vec)> ? - // At the moment we have some problems with use serde in RPC, we can serialize and deserialize - // only simple types. This approach allow us to return Vec<(TokenId, TokenName)> instead of - // pallet_utxo_tokens::TokenListData - fn tokens_list() -> Vec<(u64, Vec)>; } } diff --git a/pallets/utxo/rpc/src/lib.rs b/pallets/utxo/rpc/src/lib.rs index 6945bad..5bb25cb 100644 --- a/pallets/utxo/rpc/src/lib.rs +++ b/pallets/utxo/rpc/src/lib.rs @@ -27,10 +27,6 @@ use std::sync::Arc; pub trait UtxoApi { #[rpc(name = "utxo_send")] fn send(&self, at: Option) -> Result; - - // What means Vec<(u64, Vec)> ? Have a look at utxo/rpc/runtime-api/src/lib.rs - #[rpc(name = "tokens_list")] - fn tokens_list(&self, at: Option) -> Result)>>; } /// A struct that implements the [`UtxoApi`]. @@ -79,18 +75,4 @@ where data: Some(format!("{:?}", e).into()), }) } - - fn tokens_list(&self, at: Option<::Hash>) -> Result)>> { - let api = self.client.runtime_api(); - let at = BlockId::hash(at.unwrap_or_else(|| - // If the block hash is not supplied assume the best block. - self.client.info().best_hash)); - - let runtime_api_result = api.tokens_list(&at); - runtime_api_result.map_err(|e| RpcError { - code: ErrorCode::ServerError(Error::StorageError as i64), - message: "Something wrong".into(), - data: Some(format!("{:?}", e).into()), - }) - } } diff --git a/pallets/utxo/src/header.rs b/pallets/utxo/src/header.rs deleted file mode 100644 index 6a71482..0000000 --- a/pallets/utxo/src/header.rs +++ /dev/null @@ -1,317 +0,0 @@ -// Copyright (c) 2021 RBB S.r.l -// opensource@mintlayer.org -// SPDX-License-Identifier: MIT -// Licensed under the MIT License; -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://spdx.org/licenses/MIT -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// Author(s): C. Yap, Anton Sinitsyn - -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; -use sp_core::sp_std::convert::TryFrom; - -use codec::{Decode, Encode}; - -pub type TXOutputHeader = u128; -pub type TokenID = u64; - -// Check one bit in a number -#[inline(always)] -fn check_bit(number: u128, pos: u32) -> bool { - (number & (1u128.overflowing_shl(pos).0)) != 0 -} - -#[inline(always)] -fn set_bit(number: u128, pos: u32) -> u128 { - number | (1u128.overflowing_shl(pos).0) -} - -// Copy number to bits field -fn fit_in_bits(number: u128, pos: u32, length: u32) -> u128 { - let mut result = 0u128; - for i in pos..pos + length { - if check_bit(number, i) { - result = set_bit(result, i - pos); - } - } - result -} - -fn move_bits(from: u128, f_offset: u32, f_length: u32, to_offset: u32) -> u128 { - let mut result = 0u128; - for i in f_offset..f_offset + f_length { - if check_bit(from, i) { - result = set_bit(result, i - f_offset + to_offset); - } - } - result -} - -#[derive(Debug)] -struct BitsField { - length: u32, - offset: u32, - pub data: u128, -} - -// Size of bit fields, total 72 bits -const TOKEN_TYPE_SIZE: u32 = 3; -const TOKEN_ID_SIZE: u32 = 64; -const VERSION_SIZE: u32 = 5; - -#[derive(Debug)] -pub struct OutputHeaderData { - token_type: BitsField, - token_id: BitsField, - version: BitsField, - reserve: BitsField, -} - -impl OutputHeaderData { - pub fn new(header: u128) -> OutputHeaderData { - let mut offset = 0; - - // Signature method - let token_type = BitsField { - length: TOKEN_TYPE_SIZE, - offset, - data: fit_in_bits(header, offset, TOKEN_TYPE_SIZE), - }; - offset += TOKEN_TYPE_SIZE; - - // Token ID - let token_id = BitsField { - length: TOKEN_ID_SIZE, - offset, - data: fit_in_bits(header, offset, TOKEN_ID_SIZE), - }; - offset += TOKEN_ID_SIZE; - - // Version number - let version = BitsField { - length: VERSION_SIZE, - offset, - data: fit_in_bits(header, offset, VERSION_SIZE), - }; - offset += VERSION_SIZE; - - // You can add another field here. Just do not forget to add offset - OutputHeaderData { - token_type, - token_id, - version, - reserve: BitsField { - length: u128::BITS - offset, - offset, - data: fit_in_bits(header, offset, u128::BITS - offset), - }, - } - } - - pub fn as_u128(&self) -> u128 { - // Easy one because these bits have a concrete place - let mut result = 0u128; - let mut offset = 0; - result += move_bits(self.token_type.data, 0, TOKEN_TYPE_SIZE, offset); - offset += TOKEN_TYPE_SIZE; - result += move_bits(self.token_id.data, 0, TOKEN_ID_SIZE, offset); - offset += TOKEN_ID_SIZE; - result += move_bits(self.version.data, 0, VERSION_SIZE, offset); - - result - } - - pub fn token_type(&self) -> Option { - TryFrom::try_from(self.token_type.data).ok() - } - - pub fn set_token_type(&mut self, token_id: TokenType) { - self.token_type.data = token_id as u128; - } - - pub fn token_id(&self) -> TokenID { - self.token_id.data as u64 - } - - pub fn set_token_id(&mut self, token_id: TokenID) { - self.token_id.data = token_id as u128; - } - - pub fn version(&self) -> u128 { - self.version.data - } - - pub fn set_version(&mut self, version: u64) { - self.version.data = version as u128; - } - - pub fn validate(&self) -> bool { - self.token_type().is_some() - } -} - -pub trait OutputHeaderHelper { - fn as_tx_output_header(&self) -> OutputHeaderData; -} - -impl OutputHeaderHelper for TXOutputHeader { - fn as_tx_output_header(&self) -> OutputHeaderData { - OutputHeaderData::new(*self) - } -} - -// https://stackoverflow.com/posts/57578431/revisions from Shepmaster -// whenever a new type/variant is supported, we don't have to code a lot of 'matches' boilerplate. -macro_rules! u128_to_enum { - ($(#[$meta:meta])* $vis:vis enum $name:ident { - $($(#[$vmeta:meta])* $vname:ident $(= $val:expr)?,)* - }) => { - $(#[$meta])* - $vis enum $name { - $($(#[$vmeta])* $vname $(= $val)?,)* - } - - impl TryFrom for $name { - type Error = &'static str; - - fn try_from(v: u128) -> Result { - match v { - $(x if x == $name::$vname as u128 => Ok($name::$vname),)* - _ => { - Err(stringify!(unsupported $name)) - }, - } - } - } - } -} - -u128_to_enum! { -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, Hash, Debug)] -pub enum TokenType { - MLT = 0, - Normal = 1, - CT = 2, - NFT = 3, - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn validate() { - // improper sig meth - assert_eq!(OutputHeaderData::new(0b11111_111u128).validate(), false); - // improper token type - assert_eq!(OutputHeaderData::new(0b11000_100u128).validate(), false); - - // Proper header - assert!(OutputHeaderData::new( - 0b10_0000000000000000000000000000000000000000000000000000000000000000_010u128 - ) - .validate()); - assert!(OutputHeaderData::new( - 0b01_0000000000000000000000000000000000000000000000000000000000000001_000u128 - ) - .validate()); - assert!(OutputHeaderData::new(0u128).validate()); - } - - #[test] - fn token_types() { - let x = 0b11011_000u128; // last 3 bits are 000, so token_type should be 0 or MLT. - let header = OutputHeaderData::new(x); - assert!(header.token_type().is_some()); - assert_eq!(header.token_type().unwrap(), TokenType::MLT); - - let x = 0b0000100_001; // last 3 bits are 001, so token_type should be Normal - assert_eq!( - OutputHeaderData::new(x).token_type().unwrap(), - TokenType::Normal - ); - - let x = 0b111110_010; // last 3 bits are 010, so token_type should be CT - assert_eq!( - OutputHeaderData::new(x).token_type().unwrap(), - TokenType::CT - ); - - let x = 0b111110_011; // last 3 bits are 011, so token_type should be NFT - assert_eq!( - OutputHeaderData::new(x).token_type().unwrap(), - TokenType::NFT - ); - - let x = 0b10_111; // last 3 bits is are, and it's not yet supported. - assert_eq!(OutputHeaderData::new(x).token_type(), None); - - // last 3 bits are 001. Convert to 000 for MLT. - let mut header = OutputHeaderData::new(185u128); - header.set_token_type(TokenType::MLT); - assert_eq!(header.as_u128(), 184); - - // last 3 bits of header are 000. Convert to 010 for CT. - header.set_token_type(TokenType::CT); - assert_eq!(header.as_u128(), 186); - } - - #[allow(dead_code)] - fn print_bits(number: u128) { - let mut space = 0; - for i in 0..128 { - if check_bit(number, 127 - i) { - print!("1"); - } else { - print!("0"); - } - space += 1; - if space == 4 { - space = 0; - print!("_"); - } - } - println!(""); - } - - #[test] - fn token_ids() { - const TOKENID_TEST_0: u64 = 0; - const TOKENID_TEST_1: u64 = 1; - const TOKENID_TEST_2: u64 = 2; - - // the middle 64 bits are 000000, so type is TOKENID_TEST_0. - let header = OutputHeaderData::new( - 0b1010_0000000000000000000000000000000000000000000000000000000000000000_110, - ); - assert_eq!(header.token_id(), TOKENID_TEST_0); - - // the middle 64 bits are 000001, so type is TOKENID_TEST_1. - let header = OutputHeaderData::new( - 0b1010_0000000000000000000000000000000000000000000000000000000000000001_110, - ); - assert_eq!(header.token_id(), TOKENID_TEST_1); - - // the first 64 bits are 000010, so type is TOKENID_TEST_1. - assert_eq!( - OutputHeaderData::new(0b000001_101).token_id(), - TOKENID_TEST_1 - ); - assert_eq!(OutputHeaderData::new(3u128).token_id(), TOKENID_TEST_0); - - let mut improper_header = OutputHeaderData::new(u128::MAX); - improper_header.set_token_id(TOKENID_TEST_2); - assert_eq!(improper_header.token_id(), TOKENID_TEST_2); - } -} diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index c927b83..559c699 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -17,26 +17,21 @@ #![cfg_attr(not(feature = "std"), no_std)] -pub use header::*; pub use pallet::*; -#[cfg(test)] -mod mock; - -#[cfg(test)] -mod tests; - -#[cfg(test)] -mod staking_tests; - #[cfg(feature = "runtime-benchmarks")] mod benchmarking; - -mod header; +#[cfg(test)] +mod mock; mod rewards; mod script; mod sign; pub mod staking; +#[cfg(test)] +mod staking_tests; +#[cfg(test)] +mod tests; +pub mod tokens; pub mod weights; use chainscript::Builder; @@ -55,13 +50,14 @@ pub mod pallet { use crate::rewards::reward_block_author; pub use crate::script::{BlockTime, RawBlockTime}; use crate::sign::{self, Scheme}; + // todo: This part isn't fully tested, left for the next PR + // use crate::tokens::{NftDataHash}; use crate::staking::{self, StakingHelper}; - use crate::{OutputHeaderData, OutputHeaderHelper, TXOutputHeader, TokenID, TokenType}; + use crate::tokens::{OutputData, TokenId, Value}; use bech32; use chainscript::Script; use codec::{Decode, Encode}; use core::marker::PhantomData; - use frame_support::weights::PostDispatchInfo; use frame_support::{ dispatch::{DispatchResultWithPostInfo, Vec}, pallet_prelude::*, @@ -72,7 +68,6 @@ pub mod pallet { }; use frame_system::pallet_prelude::*; use hex_literal::hex; - use pallet_utxo_tokens::TokenListData; use pp_api::ProgrammablePoolApi; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -83,20 +78,8 @@ pub mod pallet { testing::SR25519, H256, H512, }; - use sp_runtime::traits::{AtLeast32Bit, Zero}; - use sp_runtime::DispatchErrorWithPostInfo; - - pub type Value = u128; - pub type String = Vec; pub const MLT_UNIT: Value = 1_000 * 100_000_000; - pub struct Mlt(Value); - impl Mlt { - pub fn to_munit(&self) -> Value { - &self.0 * MLT_UNIT - } - } - #[pallet::error] pub enum Error { /// Account balance must be greater than or equal to the transfer amount. @@ -123,6 +106,8 @@ pub mod pallet { Unapproved, /// The source account would not survive the transfer and it needs to stay alive. WouldDie, + /// Thrown when there is an attempt to mint a duplicate collection. + NftCollectionExists, /// When occurs during LockExtraForStaking, use Destination::LockForStaking for first time staking. /// When occurs during unstaking, it means there's no coordination with pallet-staking @@ -161,8 +146,6 @@ pub mod pallet { pub trait Config: frame_system::Config + pallet_timestamp::Config { type Event: From> + IsType<::Event>; - type AssetId: Parameter + AtLeast32Bit + Default + Copy; - /// The overarching call type. type Call: Dispatchable + From> + IsSubType> + Clone; @@ -325,21 +308,20 @@ pub mod pallet { #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] pub struct TransactionOutput { pub(crate) value: Value, - pub(crate) header: TXOutputHeader, pub(crate) destination: Destination, + pub(crate) data: Option, } impl TransactionOutput { - /// By default the header is 0: + /// By default the data is None: /// token type for both the value and fee is MLT, /// and the signature method is BLS. - /// functions are available in TXOutputHeaderImpls to update the header. pub fn new_pubkey(value: Value, pubkey: H256) -> Self { let pubkey = sp_core::sr25519::Public::from_h256(pubkey); Self { value, - header: 0, destination: Destination::Pubkey(pubkey.into()), + data: None, } } @@ -360,12 +342,12 @@ pub mod pallet { ) -> Self { Self { value, - header: 0, destination: Destination::LockForStaking { stash_account, controller_account, session_key, }, + data: None, } } @@ -377,11 +359,11 @@ pub mod pallet { ) -> Self { Self { value, - header: 0, destination: Destination::LockExtraForStaking { stash_account, controller_account, }, + data: None, } } @@ -389,8 +371,8 @@ pub mod pallet { pub fn new_create_pp(value: Value, code: Vec, data: Vec) -> Self { Self { value, - header: 0, destination: Destination::CreatePP(code, data), + data: None, } } @@ -403,20 +385,8 @@ pub mod pallet { ) -> Self { Self { value, - header: 0, destination: Destination::CallPP(dest_account, fund, input), - } - } - - pub fn new_token(token_id: TokenID, value: Value, pub_key: H256) -> Self { - let pub_key = sp_core::sr25519::Public::from_h256(pub_key); - let mut header = OutputHeaderData::new(0); - header.set_token_id(token_id); - let header = header.as_u128(); - Self { - value, - header, - destination: Destination::Pubkey(pub_key.into()), + data: None, } } @@ -424,20 +394,19 @@ pub mod pallet { pub fn new_script_hash(value: Value, hash: H256) -> Self { Self { value, - header: 0, destination: Destination::ScriptHash(hash), + data: None, } } - } - impl TransactionOutput { - fn validate_header(&self) -> Result<(), &'static str> { - // Check signature and token id - self.header - .as_tx_output_header() - .validate() - .then(|| ()) - .ok_or("Incorrect header") + /// Create a new output with the data field. This is going to be paid to a public key. + pub fn new_p2pk_with_data(value: Value, pubkey: H256, data: OutputData) -> Self { + let pubkey = sp_core::sr25519::Public::from_h256(pubkey); + Self { + value, + destination: Destination::Pubkey(pubkey.into()), + data: Some(data), + } } } @@ -503,15 +472,6 @@ pub mod pallet { #[allow(type_alias_bounds)] pub type TransactionFor = Transaction; - #[pallet::storage] - #[pallet::getter(fn token_list)] - pub(super) type TokenList = StorageValue<_, TokenListData, ValueQuery>; - - #[pallet::storage] - #[pallet::getter(fn tokens_higher_id)] - pub(super) type TokensHigherID = StorageValue<_, TokenID, ValueQuery>; - - //TODO: For rename, to differentiate from rewarding a block author. #[pallet::storage] #[pallet::getter(fn reward_total)] pub(super) type RewardTotal = StorageValue<_, Value, ValueQuery>; @@ -524,6 +484,24 @@ pub mod pallet { #[pallet::getter(fn utxo_store)] pub(super) type UtxoStore = StorageMap<_, Identity, H256, TransactionOutputFor>; + #[pallet::storage] + #[pallet::getter(fn token_issuance_transactions)] + pub(super) type TokenIssuanceTransactions = + StorageMap<_, Identity, TokenId, TransactionFor, OptionQuery>; + + // When someone wants to issue a token we should calculate token_id and use it when the owner + // in other transactions will transfer the token. + #[pallet::storage] + #[pallet::getter(fn token_id_issuance)] + pub(super) type TokenIssuanceId = + StorageMap<_, Identity, /* outpoint */ H256, TokenId, OptionQuery>; + + // todo: This part isn't fully tested, left for the next PR + // #[pallet::storage] + // #[pallet::getter(fn nft_unique_data_hash)] + // pub(super) type NftUniqueDataHash = + // StorageMap<_, Identity, NftDataHash, /* UTXO */ H256, OptionQuery>; + /// Represents the validators' stakes. When a validator chooses to stop validating, /// the utxo here is transferred back to `UtxoStore`. #[pallet::storage] @@ -541,7 +519,6 @@ pub mod pallet { #[pallet::generate_deposit(pub(super) fn deposit_event)] #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { - TokenCreated(u64, T::AccountId), TransactionSuccess(TransactionFor), /// The block author has been rewarded with MLT Coins. @@ -564,6 +541,14 @@ pub mod pallet { } } + // todo: This part isn't fully tested, left for the next PR + // pub(crate) fn get_output_by_token_id( + // token_id: TokenId, + // ) -> Option> { + // let utxo_id = TokenIssuanceTransactions::::get(token_id)?; + // UtxoStore::::get(utxo_id) + // } + // Strips a transaction of its Signature fields by replacing value with ZERO-initialized fixed hash. pub fn get_simple_transaction( tx: &Transaction, @@ -658,7 +643,6 @@ pub mod pallet { tx.check_time_lock::(), "Time lock restrictions not satisfied" ); - // In order to avoid race condition in network we maintain a list of required utxos for a tx // Example of race condition: // Assume both alice and bob have 10 coins each and bob owes charlie 20 coins @@ -692,38 +676,243 @@ pub mod pallet { missing.is_empty().then(|| resolved).ok_or(missing) }; - let full_inputs: Vec<(crate::TokenID, TransactionOutputFor)> = tx + let full_inputs: Vec<(TokenId, TransactionOutputFor)> = tx .inputs .iter() - .filter_map(|input| >::get(&input.outpoint)) - .map(|output| (OutputHeaderData::new(output.header).token_id(), output)) + .filter_map(|input| Some((input.outpoint, >::get(&input.outpoint)?))) + .filter_map(|(outpoint, output)| match output.data { + Some(ref data) => match data { + OutputData::TokenTransferV1 { token_id, .. } => { + Some((token_id.clone(), output)) + } + OutputData::TokenIssuanceV1 { .. } => { + let token_id = >::get(outpoint)?; + Some((token_id, output)) + } + // todo: This part isn't fully tested, left for the next PR + // | OutputData::NftMintV1 { token_id, .. } + // OutputData::TokenBurnV1 { .. } => None, + }, + None => { + // We do not calculate MLT here + None + } + }) .collect(); - let input_vec: Vec<(crate::TokenID, Value)> = - full_inputs.iter().map(|output| (output.0, output.1.value)).collect(); + // + let mut total_value_of_input_tokens: BTreeMap = BTreeMap::new(); + let mut mlt_amount_in_inputs: Value = 0; + for input in &tx.inputs { + let output = >::get(&input.outpoint).ok_or("missing inputs")?; + match &output.data { + Some(OutputData::TokenIssuanceV1 { + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_uri, + }) => { + // We have to check is this token already issued? + let token_id = TokenIssuanceId::::get(input.outpoint) + .ok_or("token has never been issued")?; + ensure!( + token_ticker.is_ascii(), + "token ticker has none ascii characters" + ); + ensure!( + metadata_uri.is_ascii(), + "metadata uri has none ascii characters" + ); + ensure!(token_ticker.len() <= 5, "token ticker is too long"); + ensure!(!token_ticker.is_empty(), "token ticker can't be empty"); + ensure!(metadata_uri.len() <= 100, "token metadata uri is too long"); + ensure!(amount_to_issue > &0u128, "output value must be nonzero"); + ensure!(number_of_decimals <= &18, "too long decimals"); + // If token has just created we can't meet another amount here. + ensure!( + !total_value_of_input_tokens.contains_key(&token_id), + "this id can't be used for a token" + ); + total_value_of_input_tokens.insert(token_id.clone(), *amount_to_issue); + // But probably in this input we have a fee + mlt_amount_in_inputs = mlt_amount_in_inputs + .checked_add(output.value) + .ok_or("input value overflow")?; + } + Some(OutputData::TokenTransferV1 { + ref token_id, + amount, + .. + }) => { + ensure!( + TokenIssuanceTransactions::::contains_key(token_id), + "token has never been issued" + ); + total_value_of_input_tokens.insert( + token_id.clone(), + total_value_of_input_tokens + .get(token_id) + .unwrap_or(&0) + .checked_add(*amount) + .ok_or("input value overflow")?, + ); + // But probably in this input we have a fee + mlt_amount_in_inputs = mlt_amount_in_inputs + .checked_add(output.value) + .ok_or("input value overflow")?; + } - let out_vec: Vec<(crate::TokenID, Value)> = tx - .outputs - .iter() - .map(|output| { - ( - OutputHeaderData::new(output.header).token_id(), - output.value, - ) - }) - .collect(); + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::TokenBurnV1 { .. }) => { + // // Nothing to do here because tokens no longer exist. + // } + // Some(OutputData::NftMintV1 { + // ref token_id, + // data_hash, + // metadata_uri, + // }) => { + // // We have to check is this token already issued? + // ensure!( + // TokenIssuanceTransactions::::contains_key(token_id), + // "unable to use an input where NFT has not minted yet" + // ); + // // Check is this digital data unique? + // ensure!( + // NftUniqueDataHash::::contains_key(data_hash), + // "unable to use an input where NFT digital data was changed" + // ); + // ensure!( + // metadata_uri.is_ascii(), + // "metadata uri has none ascii characters" + // ); + // // If NFT has just created we can't meet another NFT part here. + // total_value_of_input_tokens.insert(token_id.clone(), 1); + // } + None => { + mlt_amount_in_inputs = mlt_amount_in_inputs + .checked_add(output.value) + .ok_or("input value overflow")?; + } + } + } + + let mut total_value_of_output_tokens: BTreeMap = BTreeMap::new(); + let mut mlt_amount_in_outputs: Value = 0; + for output in &tx.outputs { + match &output.data { + Some(OutputData::TokenIssuanceV1 { + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_uri, + }) => { + // We have to check is this token already issued? + let token_id = TokenId::new(&tx.inputs[0]); + + ensure!( + !TokenIssuanceTransactions::::contains_key(&token_id), + "token has already been issued" + ); + ensure!( + token_ticker.is_ascii(), + "token ticker has none ascii characters" + ); + ensure!( + metadata_uri.is_ascii(), + "metadata uri has none ascii characters" + ); + ensure!(token_ticker.len() <= 5, "token ticker is too long"); + ensure!(!token_ticker.is_empty(), "token ticker can't be empty"); + ensure!(metadata_uri.len() <= 100, "token metadata uri is too long"); + ensure!(amount_to_issue > &0u128, "output value must be nonzero"); + ensure!(number_of_decimals <= &18, "too long decimals"); + + // If token has just created we can't meet another amount here. + ensure!( + !total_value_of_output_tokens.contains_key(&token_id), + "this id can't be used for a new token" + ); + total_value_of_output_tokens.insert(token_id.clone(), *amount_to_issue); + // But probably in this input we have a fee + mlt_amount_in_outputs = mlt_amount_in_outputs + .checked_add(output.value) + .ok_or("input value overflow")?; + } + Some(OutputData::TokenTransferV1 { + ref token_id, + amount, + .. + }) => { + ensure!( + TokenIssuanceTransactions::::contains_key(token_id), + "input for the token not found" + ); + total_value_of_output_tokens.insert( + token_id.clone(), + total_value_of_output_tokens + .get(token_id) + .unwrap_or(&0) + .checked_add(*amount) + .ok_or("output value overflow")?, + ); + // But probably in this input we have a fee + mlt_amount_in_outputs = mlt_amount_in_outputs + .checked_add(output.value) + .ok_or("input value overflow")?; + } + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::TokenBurnV1 { .. }) => { + // // Nothing to do here because tokens no longer exist. + // } + // Some(OutputData::NftMintV1 { + // ref token_id, + // data_hash, + // metadata_uri, + // }) => { + // // We have to check is this token already issued? + // ensure!( + // !TokenIssuanceTransactions::::contains_key(token_id), + // "token has already been issued" + // ); + // + // // Check is this digital data unique? + // ensure!( + // !>::contains_key(data_hash), + // "digital data has already been minted" + // ); + // ensure!( + // metadata_uri.is_ascii(), + // "metadata uri has none ascii characters" + // ); + // // If NFT has just created we can't meet another NFT part here. + // total_value_of_output_tokens.insert(token_id.clone(), 1); + // } + None => { + mlt_amount_in_outputs = mlt_amount_in_outputs + .checked_add(output.value) + .ok_or("output value overflow")?; + } + } + } // Check for token creation - let tokens_list = >::get(); for output in tx.outputs.iter() { - let tid = OutputHeaderData::new(output.header).token_id(); + let tid = match output.data { + Some(OutputData::TokenTransferV1 { ref token_id, .. }) => token_id.clone(), + Some(OutputData::TokenIssuanceV1 { .. }) => TokenId::new(&tx.inputs[0]), + None => continue, + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::NftMintV1 { .. }) + // | Some(OutputData::TokenBurnV1 { .. }) + // | None => continue, + }; // If we have input and output for the same token it's not a problem if full_inputs.iter().find(|&x| (x.0 == tid) && (x.1 != *output)).is_some() { continue; } else { - // But when we don't have an input for token but token id exist in TokenList + // But when we don't have an input for token but token id exist ensure!( - tokens_list.iter().find(|&x| x.id == tid).is_none(), + !>::contains_key(tid), "no inputs for the token id" ); } @@ -734,17 +923,26 @@ pub mod pallet { // Check that outputs are valid for (output_index, output) in tx.outputs.iter().enumerate() { - // Check the header is valid - let res = output.validate_header(); - if let Err(e) = res { - log::error!("Header error: {}", e); + match output.data { + Some(OutputData::TokenIssuanceV1 { + amount_to_issue, .. + }) => ensure!(amount_to_issue > 0, "output value must be nonzero"), + Some(OutputData::TokenTransferV1 { amount, .. }) => { + ensure!(amount > 0, "output value must be nonzero") + } + None => ensure!(output.value > 0, "output value must be nonzero"), + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::TokenBurnV1 { amount_to_burn, .. }) => { + // ensure!(amount_to_burn > 0, "output value must be nonzero") + // } + // Some(OutputData::NftMintV1 { .. }) => { + // // Nothing to check + // } } - ensure!(res.is_ok(), "header error. Please check the logs."); - ensure!(output.value > 0, "output value must be nonzero"); let hash = tx.outpoint(output_index as u64); new_utxos.push(hash.as_fixed_bytes().to_vec()); - match &output.destination { + match output.destination { Destination::CreatePP(_, _) => { ensure!(!>::contains_key(hash), "output already exists"); log::info!("TODO validate CreatePP as output"); @@ -765,39 +963,62 @@ pub mod pallet { // if all spent UTXOs are available, check the math and signatures if let Ok(input_utxos) = &input_utxos { // We have to check sum of input tokens is less or equal to output tokens. - let mut inputs_sum: BTreeMap = BTreeMap::new(); - let mut outputs_sum: BTreeMap = BTreeMap::new(); - - for x in input_vec { - let value = - x.1.checked_add(*inputs_sum.get(&x.0).unwrap_or(&0)) - .ok_or("input value overflow")?; - inputs_sum.insert(x.0, value); - } - for x in out_vec { - let value = - x.1.checked_add(*outputs_sum.get(&x.0).unwrap_or(&0)) - .ok_or("output value overflow")?; - outputs_sum.insert(x.0, value); - } + ensure!( + mlt_amount_in_outputs <= mlt_amount_in_inputs, + "output value must not exceed input value" + ); - let mut new_token_exist = false; - for output_token in &outputs_sum { - match inputs_sum.get(&output_token.0) { - Some(input_value) => ensure!( - input_value >= &output_token.1, - "output value must not exceed input value" - ), + let mut issuance_counter = 0; + for (token_id, token_value) in &total_value_of_output_tokens { + match total_value_of_input_tokens.get(&token_id) { + Some(input_value) => { + ensure!( + input_value == token_value, + "output value must not exceed input value" + ) + } + // We have an output, but we have not an input None => { - // If the transaction has one an output with a new token ID - if new_token_exist { - frame_support::fail!("input for the token not found") - } else { - new_token_exist = true; + // find TransactionOutput for this token_id + let output = &tx.outputs.iter().find(|x| match x.data { + Some(ref output_data) => { + output_data.id(&tx.inputs[0]).as_ref() == Some(token_id) + } + None => false, + }); + + match output { + Some(output) => match output.data { + // todo: This part isn't fully tested, left for the next PR + // | Some(OutputData::TokenBurnV1 { .. }) => + // Some(OutputData::NftMintV1 { .. }) + Some(OutputData::TokenIssuanceV1 { .. }) => { + // If we make a new token then okay, this is not a problem + issuance_counter += 1; + continue; + } + None | Some(OutputData::TokenTransferV1 { .. }) => { + // But we can't send a token without input + frame_support::fail!("input for the token not found2") + } + }, + // This situation should never happen, but let's cover it + None => frame_support::fail!("corrupted output data"), } } } } + ensure!( + issuance_counter <= 1, + "too many issuance in one transaction" + ); + if issuance_counter == 1 { + // The sender should pay not less than 100 MLT for issuance + ensure!( + mlt_amount_in_inputs >= crate::tokens::Mlt(100).to_munit(), + "insufficient fee" + ); + } for (index, (input, input_utxo)) in tx.inputs.iter().zip(input_utxos).enumerate() { match &input_utxo.destination { @@ -816,7 +1037,7 @@ pub mod pallet { ensure!(ok, "signature must be valid"); } Destination::CreatePP(_, _) => { - log::info!("TODO validate spending of CreatePP"); + log::info!("TODO validate spending of OP_CREATE"); } Destination::CallPP(_, _, _) => { let spend = @@ -841,14 +1062,11 @@ pub mod pallet { } // Reward at the moment only in MLT - reward = if inputs_sum.contains_key(&(TokenType::MLT as TokenID)) - && outputs_sum.contains_key(&(TokenType::MLT as TokenID)) - { - inputs_sum[&(TokenType::MLT as TokenID)] - .checked_sub(outputs_sum[&(TokenType::MLT as TokenID)]) - .ok_or("reward underflow")? - } else { - *inputs_sum.get(&(TokenType::MLT as TokenID)).ok_or("fee doesn't exist")? + reward = mlt_amount_in_inputs + .checked_sub(mlt_amount_in_outputs) + .ok_or("reward underflow")?; + if reward >= u64::MAX.into() { + frame_support::fail!("reward exceed allowed amount"); } } @@ -884,6 +1102,40 @@ pub mod pallet { let hash = tx.outpoint(index as u64); match &output.destination { + Destination::Pubkey(_) | Destination::ScriptHash(_) => { + let hash = tx.outpoint(index as u64); + log::debug!("inserting to UtxoStore {:?} as key {:?}", output, hash); + >::insert(hash, output); + match &output.data { + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::NftMintV1 { + // token_id, + // data_hash, + // .. + // }) => { + // // We have to control that digital data of NFT is unique. + // // Otherwise, anybody else might make a new NFT with exactly the same hash. + // >::insert(data_hash, hash); + // // Also, we should provide possibility of find an output that by token_id. + // // This output is a place where token was created. It allow us to check that a token or + // // a NFT have not created yet. + // >::insert(token_id, hash); + // } + Some(OutputData::TokenIssuanceV1 { .. }) => { + let token_id = TokenId::new(&tx.inputs[0]); + // Link output hash + >::insert(hash, &token_id); + // For MLS-01 we save a relation between token_id and the tx where + // token was created. + >::insert(&token_id, &tx); + } + // For the security reason we are implementing all cases + // todo: This part isn't fully tested, left for the next PR + // Some(OutputData::TokenBurnV1 { .. }) + // | + Some(OutputData::TokenTransferV1 { .. }) | None => continue, + } + } Destination::CreatePP(script, data) => { log::debug!("inserting to UtxoStore {:?} as key {:?}", output, hash); >::insert(hash, output); @@ -894,10 +1146,6 @@ pub mod pallet { >::insert(hash, output); call::(caller, acct_id, hash, output.value, *fund, data); } - Destination::Pubkey(_) | Destination::ScriptHash(_) => { - log::debug!("inserting to UtxoStore {:?} as key {:?}", output, hash); - >::insert(hash, output); - } Destination::LockForStaking { .. } => { staking::lock_for_staking::(hash, output)?; } @@ -920,63 +1168,7 @@ pub mod pallet { Ok(().into()) } - pub fn token_create( - caller: &T::AccountId, - public: H256, - input_for_fee: TransactionInput, - token_name: String, - token_ticker: String, - supply: Value, - ) -> Result> { - ensure!(token_name.len() <= 25, Error::::Unapproved); - ensure!(token_ticker.len() <= 5, Error::::Unapproved); - ensure!(!supply.is_zero(), Error::::MinBalanceZero); - - // Take a free TokenID - let token_id = - >::get().checked_add(1).ok_or("All tokens IDs has taken")?; - - // Input with MLT FEE - let fee = UtxoStore::::get(input_for_fee.outpoint).ok_or(Error::::Unapproved)?.value; - ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); - - // Save in UTXO - let instance = crate::TokenInstance::new(token_id, token_name, token_ticker, supply); - let mut tx = Transaction { - inputs: crate::vec![ - // Fee an input equal 100 MLT - input_for_fee, - ], - outputs: crate::vec![ - // Output a new tokens - TransactionOutput::new_token(token_id, supply, public), - ], - time_lock: Default::default(), - }; - - // We shall make an output to return odd funds - if fee > Mlt(100).to_munit() { - tx.outputs.push(TransactionOutput::new_pubkey( - fee - Mlt(100).to_munit(), - public, - )); - } - - // Save in Store - >::mutate(|x| { - if x.iter().find(|&x| x.id == token_id).is_none() { - x.push(instance.clone()) - } else { - panic!("the token has already existed with the same id") - } - }); - - // Success - spend::(caller, &tx)?; - Ok(token_id) - } - - /// Pick the UTXOs of `caller` from UtxoStore that satify request `value` + /// Pick the UTXOs of `caller` from UtxoStore that satisfy request `value` /// /// Return a list of UTXOs that satisfy the request /// Return empty vector if caller doesn't have enough UTXO @@ -1024,28 +1216,6 @@ pub mod pallet { Ok(().into()) } - #[pallet::weight(::WeightInfo::token_create(768_usize.saturating_add(token_name.len()) as u32))] - pub fn token_create( - origin: OriginFor, - public: H256, - input_for_fee: TransactionInput, - token_name: String, - token_ticker: String, - supply: Value, - ) -> DispatchResultWithPostInfo { - let caller = &ensure_signed(origin)?; - let token_id = token_create::( - caller, - public, - input_for_fee, - token_name, - token_ticker, - supply, - )?; - Self::deposit_event(Event::::TokenCreated(token_id, caller.clone())); - Ok(().into()) - } - #[pallet::weight(::WeightInfo::send_to_address(16_u32.saturating_add(address.len() as u32)))] pub fn send_to_address( origin: OriginFor, @@ -1096,7 +1266,8 @@ pub mod pallet { TransactionOutput { value, destination: dest, - header: Default::default(), + // todo: We need to check what kind of token over here + data: None, }, TransactionOutput::new_pubkey(total - value, H256::from(pubkey_raw)), ], @@ -1176,16 +1347,28 @@ pub mod pallet { } } -use pallet_utxo_tokens::{TokenInstance, TokenListData}; - impl crate::Pallet { pub fn send() -> u32 { 1337 } - pub fn tokens_list() -> TokenListData { - >::get() - } + // todo: This part isn't fully tested, left for the next PR + // pub fn nft_read( + // nft_id: &core::primitive::str, + // ) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)> { + // match crate::pallet::get_output_by_token_id::( + // crate::tokens::TokenId::from_string(&nft_id).ok()?, + // )? + // .data + // { + // Some(crate::tokens::OutputData::NftMintV1 { + // data_hash, + // metadata_uri, + // .. + // }) => Some((metadata_uri, data_hash.encode())), + // _ => None, + // } + // } } fn coin_picker(outpoints: &Vec) -> Result, DispatchError> { diff --git a/pallets/utxo/src/mock.rs b/pallets/utxo/src/mock.rs index d2b80f8..bcedc41 100644 --- a/pallets/utxo/src/mock.rs +++ b/pallets/utxo/src/mock.rs @@ -19,6 +19,8 @@ use pallet_utxo::staking::StakingHelper; use pallet_utxo::TransactionOutput; use pp_api::ProgrammablePoolApi; +use crate::tokens::Value; +use crate::MLT_UNIT; use frame_support::dispatch::DispatchResultWithPostInfo; use frame_support::{dispatch::Vec, weights::Weight}; use frame_support::{ @@ -63,10 +65,14 @@ thread_local! { pub const ALICE_PHRASE: &str = "news slush supreme milk chapter athlete soap sausage put clutch what kitten"; +// 1 / 10 of TEST_NET_MLT_ORIG_SUPPLY +pub const ALICE_GENESIS_BALANCE: Value = MLT_UNIT * 400_000_000_00; + pub fn genesis_utxo() -> (TransactionOutput, H256) { let keystore = KeyStore::new(); let alice_pub_key = create_pub_key(&keystore, ALICE_PHRASE); - let output = TransactionOutput::::new_pubkey(100, H256::from(alice_pub_key)); + let output = + TransactionOutput::::new_pubkey(ALICE_GENESIS_BALANCE, H256::from(alice_pub_key)); let hash = BlakeTwo256::hash_of(&(&output, 0u64, "genesis")); (output, hash) } @@ -339,7 +345,6 @@ impl pallet_utxo::Config for Test { type Call = Call; type WeightInfo = crate::weights::WeightInfo; type ProgrammablePool = MockPool; - type AssetId = u64; type RewardReductionFraction = RewardReductionFraction; type RewardReductionPeriod = RewardReductionPeriod; @@ -368,7 +373,10 @@ pub fn alice_test_ext() -> TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_utxo::GenesisConfig:: { - genesis_utxos: vec![TransactionOutput::new_pubkey(100, H256::from(alice_pub_key))], + genesis_utxos: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE, + H256::from(alice_pub_key), + )], locked_utxos: vec![], } .assimilate_storage(&mut t) @@ -390,7 +398,10 @@ pub fn alice_test_ext_and_keys() -> (TestExternalities, Public, Public) { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_utxo::GenesisConfig:: { - genesis_utxos: vec![TransactionOutput::new_pubkey(100, H256::from(alice_pub_key))], + genesis_utxos: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE, + H256::from(alice_pub_key), + )], locked_utxos: vec![], } .assimilate_storage(&mut t) diff --git a/pallets/utxo/src/rewards.rs b/pallets/utxo/src/rewards.rs index ce78223..e6e915a 100644 --- a/pallets/utxo/src/rewards.rs +++ b/pallets/utxo/src/rewards.rs @@ -16,8 +16,8 @@ // Author(s): C. Yap use crate::{ - convert_to_h256, BlockAuthor, Config, Event, Pallet, RewardTotal, TransactionOutput, UtxoStore, - Value, + convert_to_h256, tokens::Value, BlockAuthor, Config, Event, Pallet, RewardTotal, + TransactionOutput, UtxoStore, }; use frame_support::traits::Get; diff --git a/pallets/utxo/src/staking.rs b/pallets/utxo/src/staking.rs index e466366..06b8900 100644 --- a/pallets/utxo/src/staking.rs +++ b/pallets/utxo/src/staking.rs @@ -16,8 +16,8 @@ // Author(s): C. Yap use crate::{ - convert_to_h256, Config, Destination, Error, Event, LockedUtxos, Pallet, RewardTotal, - StakingCount, TransactionOutput, UtxoStore, Value, + convert_to_h256, tokens::Value, Config, Destination, Error, Event, LockedUtxos, Pallet, + RewardTotal, StakingCount, TransactionOutput, UtxoStore, }; use frame_support::{ dispatch::{DispatchResultWithPostInfo, Vec}, @@ -164,7 +164,7 @@ pub(crate) fn withdraw(stash_account: T::AccountId) -> DispatchResult pub mod validation { use super::*; use crate::staking::utils::get_all_locked_utxo_outpoints; - use crate::{OutputHeaderHelper, TokenType, TransactionOutputFor}; + use crate::TransactionOutputFor; /// to validate `LockForStaking` and `LockExtraForStaking` pub fn validate_staking_ops( @@ -177,10 +177,7 @@ pub mod validation { ); ensure!( - matches!( - tx.header.as_tx_output_header().token_type(), - Some(TokenType::MLT) - ), + tx.data.is_none(), "only MLT tokens are supported for staking" ); diff --git a/pallets/utxo/src/staking_tests.rs b/pallets/utxo/src/staking_tests.rs index fcf20e1..d05355f 100644 --- a/pallets/utxo/src/staking_tests.rs +++ b/pallets/utxo/src/staking_tests.rs @@ -17,12 +17,68 @@ use crate::{ mock::*, Destination, Error, LockedUtxos, StakingCount, Transaction, TransactionInput, - TransactionOutput, UtxoStore, + TransactionOutput, UtxoStore, MLT_UNIT, }; use codec::Encode; use frame_support::{assert_err, assert_ok, sp_io::crypto}; use sp_core::{sp_std::vec, testing::SR25519, H256}; +// JUST FOR SEEKING BUG IN FUNCTIONAL TEST +// todo: Remove this +#[test] +fn staking_first_time() { + let (mut test_ext, keys_and_hashes) = multiple_keys_test_ext(); + test_ext.execute_with(|| { + let (karl_pub_key, karl_genesis) = keys_and_hashes[1]; + let (alice_pub_key, _) = keys_and_hashes[0]; + let (greg_pub_key, _) = keys_and_hashes[2]; + + let utxo = UtxoStore::::get(karl_genesis).expect("tom's utxo does not exist"); + let tx1 = Transaction { + inputs: vec![TransactionInput::new_empty(karl_genesis)], + outputs: vec![TransactionOutput::new_pubkey(100, H256::from(alice_pub_key))], + time_lock: Default::default(), + } + .sign(&[utxo], 0, &karl_pub_key) + .expect("karl's pub key not found"); + let utxo = &tx1.outputs[0]; + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx1.clone())); + + let tx2 = Transaction { + inputs: vec![TransactionInput::new_empty(tx1.outpoint(0))], + outputs: vec![ + // KARL (index 1) wants to be a validator. He will use GREG (index 2) as the controller account. + // minimum value to stake is 10, + TransactionOutput::new_lock_for_staking( + 90, // 40000 * MLT_UNIT, + H256::from(greg_pub_key), + H256::from(greg_pub_key), + vec![2, 1], + ), + TransactionOutput::new_pubkey( + 10, /*9999 * MLT_UNIT*/ + H256::from(karl_pub_key), + ), + ], + time_lock: Default::default(), + } + .sign(&[utxo.clone()], 0, &alice_pub_key) + .expect("Alice's pub key not found"); + let new_utxo_hash = tx2.outpoint(1); + + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx2)); + assert!(UtxoStore::::contains_key(new_utxo_hash)); + assert!(StakingCount::::contains_key(H256::from(greg_pub_key))); + assert!(StakingCount::::contains_key(H256::from( + alice_pub_key + ))); + assert_eq!( + StakingCount::::get(H256::from(greg_pub_key)), + Some((1, 90)) + ); + }) +} + #[test] fn simple_staking() { let (mut test_ext, keys_and_hashes) = multiple_keys_test_ext(); @@ -100,30 +156,41 @@ fn less_than_minimum_stake() { #[test] fn non_mlt_staking() { + use crate::tokens::OutputData; + let (mut test_ext, keys_and_hashes) = multiple_keys_test_ext(); test_ext.execute_with(|| { let (karl_pub_key, karl_genesis) = keys_and_hashes[1]; let (greg_pub_key, _) = keys_and_hashes[2]; - let mut tx = Transaction { + + let utxo = UtxoStore::::get(karl_genesis).expect("kar's utxo does not exist"); + + let tx = Transaction { inputs: vec![TransactionInput::new_empty(karl_genesis)], outputs: vec![ // KARL (index 1) wants to be a validator. He will use GREG (index 2) as the controller account. // minimum value to stake is 10, but KARL only staked 5. TransactionOutput { - value: 5, - header: 1, // not an MLT token + value: 10, destination: Destination::LockForStaking { stash_account: H256::from(karl_pub_key), controller_account: H256::from(greg_pub_key), session_key: vec![2, 1], }, + data: Some(OutputData::TokenIssuanceV1 { + token_ticker: "Token".as_bytes().to_vec(), + amount_to_issue: 5_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 12, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }), }, - TransactionOutput::new_pubkey(90, H256::from(karl_pub_key)), + TransactionOutput::new_pubkey(80, H256::from(karl_pub_key)), ], time_lock: Default::default(), - }; - let karl_sig = crypto::sr25519_sign(SR25519, &karl_pub_key, &tx.encode()).unwrap(); - tx.inputs[0].witness = karl_sig.0.to_vec(); + } + .sign(&[utxo], 0, &karl_pub_key) + .expect("karl's pub key not found"); assert_err!( Utxo::spend(Origin::signed(H256::zero()), tx), @@ -319,7 +386,6 @@ fn non_validator_withdrawing() { fn withdrawing_before_expected_period() { let (mut test_ext, keys_and_hashes) = multiple_keys_test_ext(); test_ext.execute_with(|| { - // ALICE (index 0) wants to stop validating. let (alice_pub_key, _) = keys_and_hashes[0]; diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 0785b5e..9cd3445 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -16,8 +16,8 @@ // Author(s): C. Yap use crate::{ - mock::*, BlockTime, Destination, RawBlockTime, RewardTotal, TokenList, Transaction, - TransactionInput, TransactionOutput, UtxoStore, Value, + mock::*, tokens::Value, BlockTime, Destination, RawBlockTime, RewardTotal, Transaction, + TransactionInput, TransactionOutput, UtxoStore, }; use chainscript::{opcodes::all as opc, Builder}; use codec::Encode; @@ -26,9 +26,10 @@ use frame_support::{ sp_io::crypto, sp_runtime::traits::{BlakeTwo256, Hash}, }; -use pallet_utxo_tokens::TokenInstance; -use proptest::prelude::*; + use crate::script::test::gen_block_time_real; +use crate::tokens::OutputData; +use proptest::prelude::*; use sp_core::{sp_std::vec, sr25519::Public, testing::SR25519, H256, H512}; fn tx_input_gen_no_signature() -> (TransactionOutput, TransactionInput) { @@ -81,14 +82,20 @@ fn test_script_preimage() { let (utxo0, input0) = tx_input_gen_no_signature(); let tx1 = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_script_hash(50, script_hash)], + outputs: vec![TransactionOutput::new_script_hash( + ALICE_GENESIS_BALANCE - 50, + script_hash, + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); let tx2 = Transaction { inputs: vec![TransactionInput::new_script(tx1.outpoint(0), script, witness_script)], - outputs: vec![TransactionOutput::new_script_hash(20, H256::zero())], + outputs: vec![TransactionOutput::new_script_hash( + ALICE_GENESIS_BALANCE - 120, + H256::zero(), + )], time_lock: Default::default(), }; @@ -105,8 +112,14 @@ fn test_unchecked_2nd_output() { let tx1 = Transaction { inputs: vec![input0], outputs: vec![ - TransactionOutput::new_pubkey(30, H256::from(alice_pub_key)), - TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 30, + H256::from(alice_pub_key), + ), + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 50, + H256::from(alice_pub_key), + ), ], time_lock: Default::default(), } @@ -131,7 +144,10 @@ fn test_simple_tx() { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 50, + H256::from(alice_pub_key), + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); @@ -143,7 +159,10 @@ fn test_simple_tx() { assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); assert!(UtxoStore::::contains_key(new_utxo_hash)); - assert_eq!(50, UtxoStore::::get(new_utxo_hash).unwrap().value); + assert_eq!( + ALICE_GENESIS_BALANCE - 50, + UtxoStore::::get(new_utxo_hash).unwrap().value + ); }) } @@ -171,6 +190,8 @@ fn attack_with_sending_to_own_account() { #[test] fn attack_with_empty_transactions() { alice_test_ext().execute_with(|| { + // We should use the real input because. Otherwise, appears another error + let (_, input) = tx_input_gen_no_signature(); assert_err!( Utxo::spend(Origin::signed(H256::zero()), Transaction::default()), // empty tx "no inputs" @@ -180,7 +201,7 @@ fn attack_with_empty_transactions() { Utxo::spend( Origin::signed(H256::zero()), Transaction { - inputs: vec![TransactionInput::default()], // an empty tx + inputs: vec![input], // an empty tx outputs: vec![], time_lock: Default::default() } @@ -277,7 +298,7 @@ fn attack_by_overspending() { let tx = Transaction { inputs: vec![input0], outputs: vec![ - TransactionOutput::new_pubkey(100, H256::from(alice_pub_key)), + TransactionOutput::new_pubkey(ALICE_GENESIS_BALANCE, H256::from(alice_pub_key)), // Creates 2 new utxo out of thin air TransactionOutput::new_pubkey(2, H256::from(alice_pub_key)), ], @@ -304,7 +325,10 @@ fn tx_from_alice_to_karl() { inputs: vec![input0], outputs: vec![ TransactionOutput::new_pubkey(10, H256::from(karl_pub_key)), - TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + ), ], time_lock: Default::default(), } @@ -317,7 +341,10 @@ fn tx_from_alice_to_karl() { // then send rest of the tokens to karl (proving that the first tx was successful) let tx = Transaction { inputs: vec![TransactionInput::new_empty(new_utxo_hash)], - outputs: vec![TransactionOutput::new_pubkey(90, H256::from(karl_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(karl_pub_key), + )], time_lock: Default::default(), } .sign_unchecked(&[new_utxo], 0, &alice_pub_key); @@ -331,22 +358,64 @@ fn tx_from_alice_to_karl() { fn test_reward() { execute_with_alice(|alice_pub_key| { let (utxo0, input0) = tx_input_gen_no_signature(); + + // Check the default parameters + let utxos = UtxoStore::::get(input0.outpoint).unwrap(); + assert_eq!(utxos.value, ALICE_GENESIS_BALANCE); + let reward = RewardTotal::::get(); + assert_eq!(reward, 0); + + // Make a new transaction let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(90, H256::from(alice_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let utxo_hash = tx.outpoint(0); // if the previous spend succeeded, there should be one utxo - // that has a value of 90 and a reward that has a value of 10 - let utxos = UtxoStore::::iter_values().next().unwrap(); + // that has a value of ALICE_GENESIS_BALANCE - 90 and a reward that has a value of 90 + let utxos = UtxoStore::::get(utxo_hash).unwrap(); + let reward = RewardTotal::::get(); + assert_eq!(utxos.value, ALICE_GENESIS_BALANCE - 90); + assert_eq!(reward, 90); + }) +} + +#[test] +fn test_reward_overflow() { + execute_with_alice(|alice_pub_key| { + let (utxo0, input0) = tx_input_gen_no_signature(); + + // Check the default parameters + let utxos = UtxoStore::::get(input0.outpoint).unwrap(); + assert_eq!(utxos.value, ALICE_GENESIS_BALANCE); let reward = RewardTotal::::get(); + assert_eq!(reward, 0); - assert_eq!(utxos.value, 90); - assert_eq!(reward, 10); + // Make a new transaction where + // Input balance: 4_000_000_000_000_000_000_000 + // u64::MAX: 18_446_744_073_709_551_615 + // the difference: 3_981_553_255_926_290_448_385 + let tx = Transaction { + inputs: vec![input0], + outputs: vec![TransactionOutput::new_pubkey( + 3_981_553_255_926_290_448_385, + H256::from(alice_pub_key), + )], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0], 0, &alice_pub_key); + assert_err!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "reward exceed allowed amount" + ); }) } @@ -356,7 +425,10 @@ fn test_script() { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(90, H256::from(alice_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); @@ -371,7 +443,10 @@ fn test_time_lock_tx() { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(90, H256::from(alice_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + )], time_lock: BlockTime::Blocks(10).as_raw().unwrap(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); @@ -390,7 +465,10 @@ fn test_time_lock_script_fail() { let script_hash: H256 = BlakeTwo256::hash(script.as_ref()); let tx1 = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_script_hash(90, script_hash)], + outputs: vec![TransactionOutput::new_script_hash( + ALICE_GENESIS_BALANCE - 90, + script_hash, + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); @@ -401,7 +479,10 @@ fn test_time_lock_script_fail() { // the time lock restrictions imposed by the scripting system. let tx2 = Transaction { inputs: vec![TransactionInput::new_script(outpoint, script, Default::default())], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice_pub_key))], + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 150, + H256::from(alice_pub_key), + )], time_lock: Default::default(), }; assert_err!( @@ -411,67 +492,6 @@ fn test_time_lock_script_fail() { }) } -#[test] -fn test_tokens() { - use crate::TokensHigherID; - - let (mut test_ext, alice_pub_key, karl_pub_key) = alice_test_ext_and_keys(); - test_ext.execute_with(|| { - // Let's create a new test token - let token_id = >::get() - .checked_add(1) - .ok_or("All tokens IDs has taken") - .unwrap(); - // Let's make a tx for a new token: - // * We need at least one input for the fee and one output for a new token. - // * TokenID for a new token has to be unique. - let instance = - TokenInstance::new(token_id, b"New token test".to_vec(), b"NTT".to_vec(), 1000); - let (utxo0, input0) = tx_input_gen_no_signature(); - let first_tx = Transaction { - // 100 MLT - inputs: vec![input0], - outputs: vec![ - // 100 a new tokens - TransactionOutput::new_token(token_id, instance.supply, H256::from(alice_pub_key)), - // 20 MLT to be paid as a fee, 80 MLT returning - TransactionOutput::new_pubkey(80, H256::from(alice_pub_key)), - ], - time_lock: Default::default(), - } - .sign_unchecked(&[utxo0], 0, &alice_pub_key); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), first_tx.clone())); - - // Store a new TokenInstance to the Storage - >::mutate(|x| { - if x.iter().find(|&x| x.id == token_id).is_none() { - x.push(instance.clone()) - } else { - panic!("the token has already existed with the same id") - } - }); - dbg!(&>::get()); - - // alice sends 1000 tokens to karl and the rest back to herself 10 tokens - let utxo_hash_mlt = first_tx.outpoint(1); - let utxo_hash_token = first_tx.outpoint(0); - let prev_utxos = [first_tx.outputs[1].clone(), first_tx.outputs[0].clone()]; - - let tx = Transaction { - inputs: vec![ - TransactionInput::new_empty(utxo_hash_mlt), - TransactionInput::new_empty(utxo_hash_token), - ], - outputs: vec![TransactionOutput::new_token(token_id, 10, H256::from(karl_pub_key))], - time_lock: Default::default(), - } - .sign_unchecked(&prev_utxos, 0, &alice_pub_key) - .sign_unchecked(&prev_utxos, 1, &alice_pub_key); - - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - }); -} - #[test] fn attack_double_spend_by_tweaking_input() { execute_with_alice(|alice_pub_key| { @@ -481,7 +501,10 @@ fn attack_double_spend_by_tweaking_input() { let drop_script_hash = BlakeTwo256::hash(drop_script.as_ref()); let tx0 = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_script_hash(50, drop_script_hash)], + outputs: vec![TransactionOutput::new_script_hash( + ALICE_GENESIS_BALANCE - 50, + drop_script_hash, + )], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice_pub_key); @@ -496,8 +519,11 @@ fn attack_double_spend_by_tweaking_input() { }) .collect(); let tx1 = Transaction { - inputs: inputs, - outputs: vec![TransactionOutput::new_pubkey(500, H256::from(alice_pub_key))], + inputs, + outputs: vec![TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 500, + H256::from(alice_pub_key), + )], time_lock: Default::default(), }; assert_err!( @@ -526,7 +552,7 @@ fn test_send_to_address() { assert_err!( Utxo::send_to_address( Origin::signed(H256::from(alice_pub_key)), - 10_000_000, + ALICE_GENESIS_BALANCE * 10, addr.as_bytes().to_vec(), ), "Caller doesn't have enough UTXOs", @@ -629,7 +655,7 @@ proptest! { let script_hash: H256 = BlakeTwo256::hash(script.as_ref()); let tx1 = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_script_hash(90, script_hash)], + outputs: vec![TransactionOutput::new_script_hash(ALICE_GENESIS_BALANCE - 90, script_hash)], time_lock: Default::default(), } .sign_unchecked(&[utxo0], 0, &alice); @@ -638,7 +664,7 @@ proptest! { let tx2 = Transaction { inputs: vec![TransactionInput::new_script(outpoint, script, Default::default())], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice))], + outputs: vec![TransactionOutput::new_pubkey(ALICE_GENESIS_BALANCE - u32::MAX as Value, H256::from(alice))], time_lock: tx_lock_time, }; Utxo::spend(Origin::signed(H256::zero()), tx2) @@ -662,12 +688,11 @@ proptest! { std::cmp::min_by_key(time0, time1, RawBlockTime::as_u64), std::cmp::max_by_key(time0, time1, RawBlockTime::as_u64), ); - let (res0, res1) = execute_with_alice(|alice| { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice))], + outputs: vec![TransactionOutput::new_pubkey(ALICE_GENESIS_BALANCE - 50, H256::from(alice))], time_lock: tx_lock_time, } .sign_unchecked(&[utxo0], 0, &alice); @@ -700,7 +725,7 @@ proptest! { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice))], + outputs: vec![TransactionOutput::new_pubkey(ALICE_GENESIS_BALANCE - 50, H256::from(alice))], time_lock: now, } .sign_unchecked(&[utxo0], 0, &alice); @@ -730,7 +755,7 @@ proptest! { let (utxo0, input0) = tx_input_gen_no_signature(); let tx = Transaction { inputs: vec![input0], - outputs: vec![TransactionOutput::new_pubkey(50, H256::from(alice))], + outputs: vec![TransactionOutput::new_pubkey(ALICE_GENESIS_BALANCE - 50, H256::from(alice))], time_lock: time, } .sign_unchecked(&[utxo0], 0, &alice); @@ -740,3 +765,1145 @@ proptest! { }); } } + +// Testing token creation: +// use crate::tokens::{NftDataHash, TokenId}; +use crate::tokens::TokenId; +use rand::Rng; + +fn build_random_vec(len: usize) -> Vec { + let mut rng = rand::thread_rng(); + let mut vec = Vec::with_capacity(len); + for _ in 0..len { + vec.push(rng.gen::()); + } + vec +} + +#[test] +// Simple creation of tokens +fn test_token_issuance() { + execute_with_alice(|alice_pub_key| { + let (utxo0, input0) = tx_input_gen_no_signature(); + let output_new = TransactionOutput { + value: ALICE_GENESIS_BALANCE, + destination: Destination::Pubkey(alice_pub_key), + data: Some(OutputData::TokenIssuanceV1 { + //token_id: TokenId::new_asset(first_input_hash), + token_ticker: "BensT".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }), + }; + let tx = Transaction { + inputs: vec![input0], + outputs: vec![output_new], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0], 0, &alice_pub_key); + let new_utxo_hash = tx.outpoint(0); + let (_, init_utxo) = genesis_utxo(); + // submit tx - in the test it makes a new UTXO. Checks before that this UTXO has not created yet. + // After calling `Utxo::spend`, we should check that Storages successfully changed. + // If it successfully wrote a new UTXO in the Storage, tx goes through all verifications correctly. + assert!(UtxoStore::::contains_key(H256::from(init_utxo))); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); + assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); + // Checking a new UTXO + assert!(UtxoStore::::contains_key(new_utxo_hash)); + + match UtxoStore::::get(new_utxo_hash).expect("The new output not found").data { + Some(OutputData::TokenIssuanceV1 { + //token_id, + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_uri, + }) => { + //assert_eq!(TokenId::new_asset(first_input_hash), token_id); + assert_eq!(1_000_000_000, amount_to_issue); + assert_eq!("BensT".as_bytes().to_vec(), token_ticker); + assert_eq!(2, number_of_decimals); + assert_eq!("mintlayer.org".as_bytes().to_vec(), metadata_uri); + } + _ => panic!("Transaction data is corrupted"), + } + }); +} + +// todo: This part isn't fully tested, left for the next PR +// #[test] +// // Simple creation of NFT +// fn test_nft_mint() { +// execute_with_alice(|alice_pub_key| { +// let (utxo0, input0) = tx_input_gen_no_signature(); +// let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); +// let data_hash = NftDataHash::Raw(vec![1, 2, 3, 4, 5]); +// let output = TransactionOutput { +// value: 0, +// destination: Destination::Pubkey(alice_pub_key), +// data: Some(OutputData::NftMintV1 { +// token_id: TokenId::new_asset(first_input_hash), +// data_hash: data_hash.clone(), +// metadata_uri: "mintlayer.org".as_bytes().to_vec(), +// }), +// }; +// let tx = Transaction { +// inputs: vec![input0], +// outputs: vec![output], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[utxo0], 0, &alice_pub_key); +// let new_utxo_hash = tx.outpoint(0); +// let (_, init_utxo) = genesis_utxo(); +// assert!(UtxoStore::::contains_key(H256::from(init_utxo))); +// assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); +// assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); +// assert!(UtxoStore::::contains_key(new_utxo_hash)); +// assert_eq!( +// data_hash, +// UtxoStore::::get(new_utxo_hash) +// .unwrap() +// .data +// .map(|x| match x { +// OutputData::NftMintV1 { data_hash, .. } => data_hash, +// _ => NftDataHash::Raw(Vec::new()), +// }) +// .unwrap_or(NftDataHash::Raw(Vec::new())) +// ); +// }) +// } +// +// #[test] +// // NFT might be only unique, we can't create a few nft for one item +// fn test_nft_unique() { +// execute_with_alice(|alice_pub_key| { +// let (utxo0, input0) = tx_input_gen_no_signature(); +// let first_input_hash = BlakeTwo256::hash(&input0.outpoint.as_ref()); +// +// let mut nft_data = OutputData::NftMintV1 { +// token_id: TokenId::new_asset(first_input_hash), +// data_hash: NftDataHash::Hash32([255; 32]), +// metadata_uri: "mintlayer.org".as_bytes().to_vec(), +// }; +// let tx = Transaction { +// inputs: vec![input0.clone()], +// outputs: vec![ +// TransactionOutput { +// value: 0, +// destination: Destination::Pubkey(alice_pub_key), +// data: Some(nft_data.clone()), +// }, +// TransactionOutput::new_pubkey(50, H256::from(alice_pub_key)), +// ], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); +// let new_utxo_hash = tx.outpoint(1); +// let (_, init_utxo) = genesis_utxo(); +// // Submit +// assert!(UtxoStore::::contains_key(H256::from(init_utxo))); +// assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); +// assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); +// // Checking a new UTXO +// assert!(UtxoStore::::contains_key(new_utxo_hash)); +// let new_utxo = tx.outputs[1].clone(); +// +// if let OutputData::NftMintV1 { +// ref mut token_id, .. +// } = nft_data +// { +// *token_id = TokenId::new_asset(H256::random()); +// } +// let tx = Transaction { +// inputs: vec![TransactionInput::new_empty(new_utxo_hash.clone())], +// outputs: vec![TransactionOutput { +// value: 0, +// destination: Destination::Pubkey(alice_pub_key), +// data: Some(nft_data.clone()), +// }], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[new_utxo], 0, &alice_pub_key); +// // Submit +// assert!(UtxoStore::::contains_key(H256::from(new_utxo_hash))); +// frame_support::assert_err_ignore_postinfo!( +// Utxo::spend(Origin::signed(H256::zero()), tx), +// "digital data has already been minted" +// ); +// }); +// } + +// This macro using for the fast creation and sending a tx +macro_rules! test_tx { + ($data: ident, $checking: tt, $err: expr) => { + execute_with_alice(|alice_pub_key| { + let (utxo0, input0) = tx_input_gen_no_signature(); + let output_new = TransactionOutput { + value: ALICE_GENESIS_BALANCE - 1, + destination: Destination::Pubkey(alice_pub_key), + data: Some($data.clone()), + }; + let tx = Transaction { + inputs: vec![input0], + outputs: vec![output_new], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0], 0, &alice_pub_key); + let new_utxo_hash = tx.outpoint(0); + let (_, init_utxo) = genesis_utxo(); + // Send + assert!(UtxoStore::::contains_key(H256::from(init_utxo))); + // We can check what error we are expecting + if stringify!($checking) == "Err" { + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + $err + ); + assert!(UtxoStore::::contains_key(H256::from(init_utxo))); + assert!(!UtxoStore::::contains_key(new_utxo_hash)); + } else if stringify!($checking) == "Ok" { + // We can check is that success + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx)); + assert!(!UtxoStore::::contains_key(H256::from(init_utxo))); + assert!(UtxoStore::::contains_key(new_utxo_hash)); + } + }); + }; +} + +#[test] +fn test_tokens_issuance_empty_ticker() { + // Ticker empty + let data = OutputData::TokenIssuanceV1 { + token_ticker: vec![], + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }; + test_tx!(data, Err, "token ticker can't be empty"); +} + +#[test] +fn test_tokens_issuance_too_big_ticker() { + // Ticker too long + let data = OutputData::TokenIssuanceV1 { + token_ticker: Vec::from([b"A"[0]; 10_000]), + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }; + test_tx!(data, Err, "token ticker is too long"); +} + +#[test] +fn test_tokens_issuance_amount_zero() { + // Amount to issue is zero + let data = OutputData::TokenIssuanceV1 { + token_ticker: b"BensT".to_vec(), + amount_to_issue: 0, + number_of_decimals: 2, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }; + test_tx!(data, Err, "output value must be nonzero"); +} + +#[test] +fn test_tokens_issuance_too_big_decimals() { + // Number of decimals more than 18 numbers + let data = OutputData::TokenIssuanceV1 { + token_ticker: b"BensT".to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 19, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }; + test_tx!(data, Err, "too long decimals"); +} + +#[test] +fn test_tokens_issuance_empty_metadata() { + // metadata_uri empty + let data = OutputData::TokenIssuanceV1 { + token_ticker: b"BensT".to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 18, + metadata_uri: vec![], + }; + test_tx!(data, Ok, ""); +} + +#[test] +fn test_tokens_issuance_too_long_metadata() { + // metadata_uri too long + let data = OutputData::TokenIssuanceV1 { + token_ticker: b"BensT".to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 18, + metadata_uri: Vec::from([0u8; 10_000]), + }; + test_tx!(data, Err, "token metadata uri is too long"); +} + +#[test] +fn test_tokens_issuance_with_junk_data() { + // The data field of the maximum allowed length filled with random garbage + let mut rng = rand::thread_rng(); + let garbage = build_random_vec(100); + let data = OutputData::TokenIssuanceV1 { + token_ticker: vec![0, 255, 254, 2, 1], + amount_to_issue: rng.gen::() as u128, + number_of_decimals: 18, + metadata_uri: garbage.clone(), + }; + test_tx!(data, Err, "token ticker has none ascii characters"); +} + +#[test] +fn test_tokens_issuance_with_corrupted_uri() { + let mut rng = rand::thread_rng(); + let garbage = build_random_vec(100); + // garbage uri + let data = OutputData::TokenIssuanceV1 { + token_ticker: b"BensT".to_vec(), + amount_to_issue: rng.gen::() as u128, + number_of_decimals: 18, + metadata_uri: garbage, + }; + test_tx!(data, Err, "metadata uri has none ascii characters"); +} + +#[test] +fn test_two_token_creation_in_one_tx() { + execute_with_alice(|alice_pub_key| { + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: b"Enric".to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: b"Ben".to_vec(), + amount_to_issue: 2_000_000_000, + number_of_decimals: 3, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0], 0, &alice_pub_key); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "this id can't be used for a new token" + ); + }); +} + +// Let's wrap common acts +fn test_tx_issuance_for_transfer(expecting_err_msg: &'static str, test_func: F) +where + F: Fn(TokenId, Public, Public, H256, TransactionOutput) -> Transaction, +{ + let (mut test_ext, alice_pub_key, karl_pub_key) = alice_test_ext_and_keys(); + test_ext.execute_with(|| { + // Alice issue 1_000_000_000 MLS-01, and send them to Karl + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0.clone()], + outputs: vec![ + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + ), + TransactionOutput::new_p2pk_with_data( + 10, + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "BensT".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 2, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + let token_id = TokenId::new(&tx.inputs[0]); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + + let token_utxo_hash = tx.outpoint(1); + let token_utxo: TransactionOutput = tx.outputs[1].clone(); + // Call a test func + let tx = test_func( + token_id, + alice_pub_key, + karl_pub_key, + token_utxo_hash, + token_utxo, + ); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + expecting_err_msg + ); + }); +} + +#[test] +fn test_token_transfer_with_wrong_token_id() { + let test_fun = Box::new( + move |_token_id, + alice_pub_key, + karl_pub_key, + token_utxo_hash, + token_utxo: TransactionOutput| { + let input = TransactionInput::new_empty(token_utxo_hash); + Transaction { + inputs: vec![input.clone()], + outputs: vec![TransactionOutput::new_p2pk_with_data( + ALICE_GENESIS_BALANCE - u64::MAX as Value, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: TokenId::new(&input), + amount: 100_000_000, + }, + )], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key) + }, + ); + test_tx_issuance_for_transfer("input for the token not found", test_fun); +} + +#[test] +fn test_token_transfer_exceed_amount_tokens() { + let test_fun = Box::new( + move |token_id, + alice_pub_key, + karl_pub_key, + token_utxo_hash, + token_utxo: TransactionOutput| { + Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id, + amount: 1_000_000_001, + }, + )], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key) + }, + ); + test_tx_issuance_for_transfer("output value must not exceed input value", test_fun); +} + +#[test] +fn test_token_transfer_exceed_amount_mlt() { + let test_fun = Box::new( + move |token_id: TokenId, + alice_pub_key, + karl_pub_key, + token_utxo_hash, + token_utxo: TransactionOutput| { + Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![TransactionOutput::new_p2pk_with_data( + 1_000_000_000, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 1_000_000_000, + }, + )], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key) + }, + ); + test_tx_issuance_for_transfer("output value must not exceed input value", test_fun); +} + +#[test] +fn test_token_transfer_send_part_others_burn() { + let test_fun = Box::new( + move |token_id: TokenId, + alice_pub_key, + karl_pub_key, + token_utxo_hash, + token_utxo: TransactionOutput| { + Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![ + // Send only 30%, let's forget about another 70% of tokens + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 300_000_000, + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key) + }, + ); + test_tx_issuance_for_transfer("output value must not exceed input value", test_fun); +} + +#[test] +fn test_token_transfer() { + let (mut test_ext, alice_pub_key, karl_pub_key) = alice_test_ext_and_keys(); + test_ext.execute_with(|| { + // Alice issue 1_000_000_000 MLS-01, and send them to Karl + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0], + outputs: vec![ + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - 90, + H256::from(alice_pub_key), + ), + TransactionOutput::new_p2pk_with_data( + 90, + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "BensT".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 2, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + let token_id = TokenId::new(&tx.inputs[0]); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let token_utxo_hash = tx.outpoint(1); + let token_utxo = tx.outputs[1].clone(); + + // Let's send 300_000_000 and rest back + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 300_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 700_000_000, + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let alice_tokens_utxo_hash = tx.outpoint(0); + let karl_tokens_utxo_hash = tx.outpoint(1); + let karl_tokens_utxo = tx.outputs[1].clone(); + assert!(!UtxoStore::::contains_key(H256::from( + token_utxo_hash + ))); + assert!(UtxoStore::::contains_key(alice_tokens_utxo_hash)); + assert!(UtxoStore::::contains_key(karl_tokens_utxo_hash)); + + // should be success + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(karl_tokens_utxo_hash)], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 400_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenTransferV1 { + token_id: token_id.clone(), + amount: 300_000_000, + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[karl_tokens_utxo], 0, &karl_pub_key); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + assert_eq!( + 300_000_000, + UtxoStore::::get(alice_tokens_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { amount, .. } => amount, + _ => 0, + }) + .unwrap_or(0) + ); + + let new_alice_tokens_utxo_hash = tx.outpoint(0); + assert!(UtxoStore::::contains_key(new_alice_tokens_utxo_hash)); + assert_eq!( + 400_000_000, + UtxoStore::::get(new_alice_tokens_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { amount, .. } => amount, + _ => 0, + }) + .unwrap_or(0) + ); + }); +} + +// todo: This part isn't fully tested, left for the next PR +// #[test] +// fn test_nft_transferring() { +// let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); +// test_ext.execute_with(|| { +// let token_id = TokenId::new_asset(H256::random()); +// // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself +// let (utxo0, input0) = tx_input_gen_no_signature(); +// let data_hash = NftDataHash::Raw(build_random_vec(32)); +// let tx = Transaction { +// inputs: vec![input0], +// outputs: vec![ +// TransactionOutput::new_pubkey(90, H256::from(alice_pub_key)), +// TransactionOutput::new_p2pk_with_data( +// 10, +// H256::from(karl_pub_key), +// OutputData::NftMintV1 { +// token_id: token_id.clone(), +// data_hash: data_hash.clone(), +// metadata_uri: "mintlayer.org".as_bytes().to_vec(), +// }, +// ), +// ], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); +// assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); +// let token_utxo_hash = tx.outpoint(1); +// let token_utxo = tx.outputs[1].clone(); +// +// // Let's fail on wrong token id +// let tx = Transaction { +// inputs: vec![TransactionInput::new_empty(token_utxo_hash)], +// outputs: vec![TransactionOutput::new_p2pk_with_data( +// 0, +// H256::from(alice_pub_key), +// OutputData::TokenTransferV1 { +// token_id: TokenId::new_asset(H256::random()), +// amount: 1_00_000_000, +// }, +// )], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); +// frame_support::assert_err_ignore_postinfo!( +// Utxo::spend(Origin::signed(H256::zero()), tx), +// "input for the token not found" +// ); +// // Let's fail on exceed token amount +// let tx = Transaction { +// inputs: vec![TransactionInput::new_empty(token_utxo_hash)], +// outputs: vec![TransactionOutput::new_p2pk_with_data( +// 0, +// H256::from(alice_pub_key), +// OutputData::TokenTransferV1 { +// token_id: token_id.clone(), +// amount: 1_000_000_001, +// }, +// )], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); +// frame_support::assert_err_ignore_postinfo!( +// Utxo::spend(Origin::signed(H256::zero()), tx), +// "output value must not exceed input value" +// ); +// +// // Let's send a big amount of MLT with the correct tokens +// let tx = Transaction { +// inputs: vec![TransactionInput::new_empty(token_utxo_hash)], +// outputs: vec![TransactionOutput::new_p2pk_with_data( +// 1_000_000_000, +// H256::from(alice_pub_key), +// OutputData::TokenTransferV1 { +// token_id: token_id.clone(), +// amount: 1_000_000_000, +// }, +// )], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); +// frame_support::assert_err_ignore_postinfo!( +// Utxo::spend(Origin::signed(H256::zero()), tx), +// "output value must not exceed input value" +// ); +// +// // should be success +// let tx = Transaction { +// inputs: vec![TransactionInput::new_empty(token_utxo_hash)], +// outputs: vec![TransactionOutput::new_p2pk_with_data( +// 0, +// H256::from(alice_pub_key), +// OutputData::TokenTransferV1 { +// token_id: token_id.clone(), +// amount: 1, +// }, +// )], +// time_lock: Default::default(), +// } +// .sign_unchecked(&[token_utxo], 0, &karl_pub_key); +// assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); +// let nft_utxo_hash = tx.outpoint(0); +// assert!(!UtxoStore::::contains_key(H256::from( +// token_utxo_hash +// ))); +// assert!(UtxoStore::::contains_key(nft_utxo_hash)); +// assert_eq!( +// data_hash, +// crate::get_output_by_token_id::(token_id.clone()) +// .unwrap() +// .data +// .map(|x| match x { +// OutputData::NftMintV1 { data_hash, .. } => data_hash, +// _ => NftDataHash::Raw(Vec::new()), +// }) +// .unwrap_or(NftDataHash::Raw(Vec::new())) +// ); +// }); +// } + +#[test] +// Test tx where Input with token and without MLT, output has token (without MLT) +fn test_token_creation_with_insufficient_fee() { + let (mut test_ext, alice_pub_key, karl_pub_key) = alice_test_ext_and_keys(); + test_ext.execute_with(|| { + // Alice issue 1000 MLS-01, and send them to Karl and the rest back to herself + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0], + outputs: vec![ + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - u64::MAX as Value, + H256::from(karl_pub_key), + ), + TransactionOutput::new_p2pk_with_data( + crate::tokens::Mlt(99).to_munit(), + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "BensT".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let token_utxo_hash = tx.outpoint(1); + let token_utxo = tx.outputs[1].clone(); + let tx = Transaction { + inputs: vec![ + // Use here token issuance for example + TransactionInput::new_empty(token_utxo_hash), + ], + outputs: vec![TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: b"Enric".to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }, + )], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo], 0, &karl_pub_key); + frame_support::assert_err_ignore_postinfo!( + Utxo::spend(Origin::signed(H256::zero()), tx), + "insufficient fee" + ); + }); +} + +#[test] +fn test_transfer_and_issuance_in_one_tx() { + let (mut test_ext, alice_pub_key, karl_pub_key) = alice_test_ext_and_keys(); + test_ext.execute_with(|| { + // Alice issue 1_000_000_000 MLS-01, and send them to Karl + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0], + outputs: vec![ + TransactionOutput::new_pubkey( + ALICE_GENESIS_BALANCE - crate::tokens::Mlt(1000).to_munit(), + H256::from(alice_pub_key), + ), + TransactionOutput::new_p2pk_with_data( + crate::tokens::Mlt(1000).to_munit(), + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "BensT".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + let first_issuance_token_id = TokenId::new(&tx.inputs[0]); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let token_utxo_hash = tx.outpoint(1); + let token_utxo = tx.outputs[1].clone(); + + // Let's send 300_000_000 and rest back and create another token + let tx = Transaction { + inputs: vec![TransactionInput::new_empty(token_utxo_hash)], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: first_issuance_token_id.clone(), + amount: 300_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenTransferV1 { + token_id: first_issuance_token_id.clone(), + amount: 700_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "Token".as_bytes().to_vec(), + amount_to_issue: 5_000_000_000, + // Should be not more than 18 numbers + number_of_decimals: 12, + metadata_uri: "mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[token_utxo.clone()], 0, &karl_pub_key); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let alice_transfer_utxo_hash = tx.outpoint(0); + let karl_transfer_utxo_hash = tx.outpoint(1); + let karl_issuance_utxo_hash = tx.outpoint(2); + assert!(!UtxoStore::::contains_key(H256::from( + token_utxo_hash + ))); + assert!(UtxoStore::::contains_key(alice_transfer_utxo_hash)); + assert!(UtxoStore::::contains_key(karl_transfer_utxo_hash)); + assert!(UtxoStore::::contains_key(karl_issuance_utxo_hash)); + + // Let's check token transfer + UtxoStore::::get(alice_transfer_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { token_id, amount } => { + assert_eq!(token_id, first_issuance_token_id); + assert_eq!(amount, 300_000_000); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + + UtxoStore::::get(karl_transfer_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { token_id, amount } => { + assert_eq!(token_id, first_issuance_token_id); + assert_eq!(amount, 700_000_000); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + + // Let's check token issuance + UtxoStore::::get(karl_issuance_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenIssuanceV1 { + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_uri, + } => { + assert_eq!(token_ticker, "Token".as_bytes().to_vec()); + assert_eq!(amount_to_issue, 5_000_000_000); + assert_eq!(number_of_decimals, 12); + assert_eq!(metadata_uri, "mintlayer.org".as_bytes().to_vec()); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + }); +} + +#[test] +fn test_transfer_for_multiple_tokens() { + let (mut test_ext, alice_pub_key, karl_pub_key) = alice_test_ext_and_keys(); + test_ext.execute_with(|| { + // + // Issue token 1 and send all tokens to Karl + // + let (utxo0, input0) = tx_input_gen_no_signature(); + let tx = Transaction { + inputs: vec![input0], + outputs: vec![TransactionOutput::new_p2pk_with_data( + ALICE_GENESIS_BALANCE - crate::tokens::Mlt(100).to_munit(), + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "TKN1".as_bytes().to_vec(), + amount_to_issue: 1_000_000_000, + number_of_decimals: 2, + metadata_uri: "tkn1.mintlayer.org".as_bytes().to_vec(), + }, + )], + time_lock: Default::default(), + } + .sign_unchecked(&[utxo0.clone()], 0, &alice_pub_key); + let tkn1_token_id = TokenId::new(&tx.inputs[0]); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let tkn1_utxo_hash = tx.outpoint(0); + let tkn1_utxo = tx.outputs[0].clone(); + // + // Issue token 2 and send all tokens to Alice + // + let input1 = TransactionInput::new_empty(tkn1_utxo_hash); + let tx = Transaction { + inputs: vec![input1], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn1_token_id.clone(), + amount: 1_000_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + ALICE_GENESIS_BALANCE - crate::tokens::Mlt(100).to_munit(), + H256::from(alice_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "TKN2".as_bytes().to_vec(), + amount_to_issue: 2_000_000_000, + number_of_decimals: 4, + metadata_uri: "tkn2.mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&[tkn1_utxo.clone()], 0, &karl_pub_key); + let tkn2_token_id = TokenId::new(&tx.inputs[0]); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let tkn1_utxo_hash = tx.outpoint(0); + let tkn2_utxo_hash = tx.outpoint(1); + // + // Issue token 3 and send all tokens to Karl + // + let input1 = TransactionInput::new_empty(tkn1_utxo_hash); + let input2 = TransactionInput::new_empty(tkn2_utxo_hash); + let prev_utxos = [tx.outputs[0].clone(), tx.outputs[1].clone()]; + let tx = Transaction { + inputs: vec![input1, input2], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn1_token_id.clone(), + amount: 1_000_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(karl_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn2_token_id.clone(), + amount: 2_000_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + ALICE_GENESIS_BALANCE - crate::tokens::Mlt(100).to_munit(), + H256::from(karl_pub_key), + OutputData::TokenIssuanceV1 { + token_ticker: "TKN3".as_bytes().to_vec(), + amount_to_issue: 3_000_000_000, + number_of_decimals: 6, + metadata_uri: "tkn3.mintlayer.org".as_bytes().to_vec(), + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&prev_utxos, 0, &alice_pub_key) + .sign_unchecked(&prev_utxos, 1, &alice_pub_key); + let tkn3_token_id = TokenId::new(&tx.inputs[0]); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let tkn1_utxo_hash = tx.outpoint(0); + let tkn2_utxo_hash = tx.outpoint(1); + let tkn3_utxo_hash = tx.outpoint(2); + + // + // Transfer 3 kinds of tokens to Alice and check them all + // + let input1 = TransactionInput::new_empty(tkn1_utxo_hash); + let input2 = TransactionInput::new_empty(tkn2_utxo_hash); + let input3 = TransactionInput::new_empty(tkn3_utxo_hash); + let prev_utxos = [tx.outputs[0].clone(), tx.outputs[1].clone(), tx.outputs[2].clone()]; + let tx = Transaction { + inputs: vec![input1, input2, input3], + outputs: vec![ + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn1_token_id.clone(), + amount: 1_000_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + 0, + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn2_token_id.clone(), + amount: 2_000_000_000, + }, + ), + TransactionOutput::new_p2pk_with_data( + ALICE_GENESIS_BALANCE - crate::tokens::Mlt(100).to_munit(), + H256::from(alice_pub_key), + OutputData::TokenTransferV1 { + token_id: tkn3_token_id.clone(), + amount: 3_000_000_000, + }, + ), + ], + time_lock: Default::default(), + } + .sign_unchecked(&prev_utxos, 0, &karl_pub_key) + .sign_unchecked(&prev_utxos, 1, &karl_pub_key) + .sign_unchecked(&prev_utxos, 2, &karl_pub_key); + assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + let tkn1_utxo_hash = tx.outpoint(0); + let tkn2_utxo_hash = tx.outpoint(1); + let tkn3_utxo_hash = tx.outpoint(2); + // Check tkn1 + UtxoStore::::get(tkn1_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { token_id, amount } => { + assert_eq!(token_id, tkn1_token_id); + assert_eq!(amount, 1_000_000_000); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + // Check tkn2 + UtxoStore::::get(tkn2_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { token_id, amount } => { + assert_eq!(token_id, tkn2_token_id); + assert_eq!(amount, 2_000_000_000); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + // Check tkn3 + UtxoStore::::get(tkn3_utxo_hash) + .unwrap() + .data + .map(|x| match x { + OutputData::TokenTransferV1 { token_id, amount } => { + assert_eq!(token_id, tkn3_token_id); + assert_eq!(amount, 3_000_000_000); + } + _ => { + panic!("corrupted data"); + } + }) + .unwrap(); + }); +} + +#[test] +fn test_immutable_tx_format() { + // todo: Testing the compatibility of the old version with the new one - not done yet +} + +#[test] +fn test_burn_tokens() { + // todo: Burn tokens has not tested yet +} + +#[test] +fn test_token_id() { + // todo: Testing token id - not done yet +} diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs new file mode 100644 index 0000000..4e62f43 --- /dev/null +++ b/pallets/utxo/src/tokens.rs @@ -0,0 +1,121 @@ +#![cfg_attr(not(feature = "std"), no_std)] + +use crate::{TransactionInput, MLT_UNIT}; +use base58_nostd::{FromBase58, FromBase58Error, ToBase58, TOKEN_ID_PREFIX}; +use codec::{Decode, Encode}; +use frame_support::ensure; +use frame_support::{dispatch::Vec, RuntimeDebug}; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; +use sp_core::Hasher; +use sp_core::H160; +use sp_runtime::traits::BlakeTwo256; +use sp_std::vec; + +const LENGTH_BYTES_TO_REPRESENT_ID: usize = 20; + +pub type Value = u128; + +pub struct Mlt(pub Value); +impl Mlt { + pub fn to_munit(&self) -> Value { + self.0 * MLT_UNIT + } +} + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +pub struct TokenId { + inner: H160, +} + +impl TokenId { + // Token id depends on signed or unsigned the same input + pub fn new(first_input: &TransactionInput) -> TokenId { + let first_input_hash = BlakeTwo256::hash(first_input.encode().as_slice()); + TokenId { + // We are loosing the first bytes of H256 over here and using 20 the last bytes + inner: H160::from(first_input_hash), + } + } + + pub fn to_string(&self) -> Vec { + self.inner.as_bytes().to_mls_b58check(Some(vec![TOKEN_ID_PREFIX])).to_vec() + } + + pub fn from_string(data: &str) -> Result { + let data = data.from_mls_b58check(Some(vec![TOKEN_ID_PREFIX])).map_err(|x| match x { + FromBase58Error::InvalidBase58Character { .. } => "Invalid Base58 character", + FromBase58Error::InvalidBase58Length => "Invalid Base58 length", + FromBase58Error::InvalidChecksum => "Invalid checksum", + FromBase58Error::InvalidPrefix => "Invalid token id", + })?; + Ok(TokenId { + inner: TokenId::hash160_from_bytes(data.as_slice())?, + }) + } + + fn hash160_from_bytes(bytes: &[u8]) -> Result { + ensure!( + bytes.len() == LENGTH_BYTES_TO_REPRESENT_ID, + "Unexpected length of the asset ID" + ); + let mut buffer = [0u8; 20]; + buffer.copy_from_slice(bytes); + Ok(H160::from(buffer)) + } +} + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +pub enum OutputData { + // TokenTransfer data to another user. If it is a token, then the token data must also be transferred to the recipient. + #[codec(index = 1)] + TokenTransferV1 { token_id: TokenId, amount: Value }, + // A new token creation + #[codec(index = 2)] + TokenIssuanceV1 { + // token_id: TokenId, + token_ticker: Vec, + amount_to_issue: Value, + // Should be not more than 18 numbers + number_of_decimals: u8, + metadata_uri: Vec, + }, + // todo: This part isn't fully tested, left for the next PR + + // // Burning a token or NFT + // #[codec(index = 3)] + // TokenBurnV1 { + // token_id: TokenId, + // amount_to_burn: Value, + // }, + // // A new NFT creation + // #[codec(index = 4)] + // NftMintV1 { + // token_id: TokenId, + // data_hash: NftDataHash, + // metadata_uri: Vec, + // }, +} + +// todo: This part isn't fully tested, left for the next PR +// #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +// #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +// pub enum NftDataHash { +// #[codec(index = 1)] +// Hash32([u8; 32]), +// #[codec(index = 2)] +// Raw(Vec), +// // Or any type that you want to implement +// } + +impl OutputData { + pub(crate) fn id(&self, first_input: &TransactionInput) -> Option { + match self { + OutputData::TokenTransferV1 { ref token_id, .. } => Some(token_id.clone()), + // OutputData::NftMintV1 { .. } | + OutputData::TokenIssuanceV1 { .. } => Some(TokenId::new(first_input)), + } + } +} diff --git a/pallets/utxo/tokens/Cargo.toml b/pallets/utxo/tokens/Cargo.toml deleted file mode 100644 index f78e558..0000000 --- a/pallets/utxo/tokens/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "pallet-utxo-tokens" -version = "0.1.0" -authors = ["RBB Lab"] -edition = "2018" - -[dependencies] -hex-literal = "0.2.1" -log = "0.4.8" - -[dependencies.frame-support] -default-features = false -git = 'https://github.com/paritytech/substrate.git' -version = '4.0.0-dev' -branch = "master" - -[dependencies.serde] -version = "1.0.104" -features = ["derive"] - -[dependencies.codec] -package = "parity-scale-codec" -version = "2.0.0" -default-features = false -features = ["derive"] diff --git a/pallets/utxo/tokens/Readme.md b/pallets/utxo/tokens/Readme.md deleted file mode 100644 index 412d360..0000000 --- a/pallets/utxo/tokens/Readme.md +++ /dev/null @@ -1,24 +0,0 @@ -# Token creation - -Call the extrinsic: -```bash -* Creator - Alice -* Pubkey - 0x2e1e60ac02d5a716b300e83b04bb4ddd48360ea119f5024f0ea7b2b1c1578a52 -* Input - we will take Fee over here -* Token name - any value -* Token ticker - any value -* Supply - any value -``` - -# Request the tokens list - -Call the RPC: - -```bash -curl http://localhost:9933 -H "Content-Type:application/json;charset=utf-8" -d '{ - "jsonrpc":"2.0", - "id":1, - "method":"tokens_list", - "params": [] -}' -``` \ No newline at end of file diff --git a/pallets/utxo/tokens/src/lib.rs b/pallets/utxo/tokens/src/lib.rs deleted file mode 100644 index 8c08575..0000000 --- a/pallets/utxo/tokens/src/lib.rs +++ /dev/null @@ -1,44 +0,0 @@ -#![cfg_attr(not(feature = "std"), no_std)] - -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; - -use codec::{Decode, Encode}; -use frame_support::{dispatch::Vec, RuntimeDebug}; - -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash)] -pub struct TokenInstance { - pub id: u64, - pub name: Vec, - pub ticker: Vec, - pub supply: u128, - // We can add another fields like: - // pub number_format: NumberFormat, - // pub image: UUID, - // pub transaction: XXX, -} - -impl Default for TokenInstance { - fn default() -> Self { - Self { - id: 0, - name: Vec::new(), - ticker: Vec::new(), - supply: 0, - } - } -} - -impl TokenInstance { - pub fn new(id: u64, name: Vec, ticker: Vec, supply: u128) -> Self { - Self { - id, - name, - ticker, - supply, - } - } -} - -pub type TokenListData = Vec; diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 2f61151..1c0c972 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -17,7 +17,6 @@ branch = "master" [dependencies] log = "0.4.14" -pallet-utxo-tokens = { path = "../pallets/utxo/tokens" } [dependencies.codec] default-features = false diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 81bf088..7456285 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -326,7 +326,6 @@ impl pallet_utxo::Config for Runtime { type Call = Call; type WeightInfo = pallet_utxo::weights::WeightInfo; type ProgrammablePool = pallet_pp::Pallet; - type AssetId = u64; type RewardReductionFraction = RewardReductionFraction; type RewardReductionPeriod = RewardReductionPeriod; @@ -703,12 +702,6 @@ impl_runtime_apis! { fn send() -> u32 { Utxo::send() } - - // What means Vec<(u64, Vec)> ? Have a look at utxo/rpc/runtime-api/src/lib.rs - fn tokens_list() -> Vec<(u64, Vec)> { - let list = Utxo::tokens_list(); - list.into_iter().map(|x| (x.id, x.name)).collect() - } } impl pallet_contracts_rpc_runtime_api::ContractsApi< diff --git a/runtime/src/staking.rs b/runtime/src/staking.rs index deffd1f..2abe1a1 100644 --- a/runtime/src/staking.rs +++ b/runtime/src/staking.rs @@ -51,7 +51,7 @@ where fn bond( controller_account: StakeAccountId, stash_account: StakeAccountId, - value: pallet_utxo::Value, + value: pallet_utxo::tokens::Value, ) -> DispatchResult { let controller_lookup: LookupSourceOf = T::Lookup::unlookup(controller_account.clone()); let reward_destination = pallet_staking::RewardDestination::Staked; diff --git a/test/functional/custom-types.json b/test/functional/custom-types.json index 713b513..0e06aac 100644 --- a/test/functional/custom-types.json +++ b/test/functional/custom-types.json @@ -43,6 +43,59 @@ [ "LockExtraForStaking", "DestinationStakeExtra" ] ] }, + "NftDataHash": { + "type": "struct", + "type_mapping": [ + [ "Hash32", "[u8; 32]" ], + [ "Raw", "Vec" ] + ] + }, + "TokenId": { + "type": "struct", + "type_mapping": [ + [ "inner", "H160" ] + ] + }, + "TokenTransferV1": { + "type": "struct", + "type_mapping": [ + [ "token_id", "TokenId" ], + [ "amount", "Value" ] + ] + }, + "TokenIssuanceV1": { + "type": "struct", + "type_mapping": [ + [ "token_ticker", "Vec" ], + [ "amount_to_issue", "Value" ], + [ "number_of_decimals", "u8" ], + [ "metadata_uri", "Vec" ] + ] + }, + "TokenBurnV1": { + "type": "struct", + "type_mapping": [ + [ "token_id", "TokenId" ], + [ "amount_to_burn", "Value" ] + ] + }, + "NftMintV1": { + "type": "struct", + "type_mapping": [ + [ "token_id", "TokenId" ], + [ "data_hash", "NftDataHash" ], + [ "metadata_uri", "Vec" ] + ] + }, + "OutputData": { + "type": "enum", + "type_mapping": [ + [ "TokenTransfer", "TokenTransferV1" ], + [ "TokenIssuance", "TokenIssuanceV1" ], + [ "TokenBurn", "TokenBurnV1" ], + [ "NftMint", "NftMintV1" ] + ] + }, "TransactionInput": { "type": "struct", "type_mapping": [ @@ -55,8 +108,8 @@ "type": "struct", "type_mapping": [ [ "value", "Value" ], - [ "header", "TXOutputHeader"], - [ "destination", "Destination" ] + [ "destination", "Destination" ], + [ "data", "Option"] ] }, "Transaction": { @@ -79,10 +132,8 @@ "Address": "MultiAddress", "LookupSource": "MultiAddress", "Value": "u128", - "TXOutputHeader": "u128", "value": "Value", "pub_key": "H256", - "header": "TXOutputHeader", "Difficulty": "U256", "DifficultyAndTimestamp": { "type": "struct", diff --git a/test/functional/example_test.py b/test/functional/example_test.py index 5792c72..bb29f69 100755 --- a/test/functional/example_test.py +++ b/test/functional/example_test.py @@ -113,14 +113,20 @@ def run_test(self): outputs=[ utxo.Output( value=50, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), utxo.Output( value=100, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), + # This output prevent reward overflow + utxo.Output( + value=3981553255926290448385, # genesis amount - u64::MAX + destination=utxo.DestPubkey(alice.public_key), + data=None + ) ] ).sign(alice, [utxos[0][1]]) @@ -135,8 +141,8 @@ def run_test(self): outputs=[ utxo.Output( value=60, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), ] ).sign(alice, [tx1.outputs[1]]) diff --git a/test/functional/feature_alice_bob_test.py b/test/functional/feature_alice_bob_test.py index 00bf8d9..1de62a7 100755 --- a/test/functional/feature_alice_bob_test.py +++ b/test/functional/feature_alice_bob_test.py @@ -73,8 +73,8 @@ def run_test(self): outputs=[ utxo.Output( value=50, - header=0, - destination=utxo.DestPubkey(bob.public_key) + destination=utxo.DestPubkey(bob.public_key), + data=None ), ] ).sign(alice, [utxos[0][1]]) @@ -88,13 +88,13 @@ def run_test(self): outputs=[ utxo.Output( value=30, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), utxo.Output( value=20, - header=0, - destination=utxo.DestPubkey(bob.public_key) + destination=utxo.DestPubkey(bob.public_key), + data=None ), ] ).sign(bob, tx1.outputs) diff --git a/test/functional/feature_smart_contract_test.py b/test/functional/feature_smart_contract_test.py index 45b5ce0..d155788 100755 --- a/test/functional/feature_smart_contract_test.py +++ b/test/functional/feature_smart_contract_test.py @@ -77,17 +77,23 @@ def run_test(self): outputs=[ utxo.Output( value=50, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), utxo.Output( value=10, - header=0, destination=utxo.DestCreatePP( code=os.path.join(os.path.dirname(__file__), "code.wasm"), data=[0xed, 0x4b, 0x9d, 0x1b], # default() constructor selector - ) + ), + data=None ), + # This output prevent reward overflow + utxo.Output( + value=3981553255926290448385, # = genesis amount - u64::MAX + destination=utxo.DestPubkey(alice.public_key), + data=None + ) ] ).sign(alice, [initial_utxo[1]]) @@ -122,17 +128,17 @@ def run_test(self): outputs=[ utxo.Output( value=49, - header=0, - destination=utxo.DestPubkey(alice.public_key) + destination=utxo.DestPubkey(alice.public_key), + data=None ), utxo.Output( value=1, - header=0, destination=utxo.DestCallPP( dest_account=acc_id, fund=False, input_data=bytes.fromhex(msg_data.to_hex()[2:]), - ) + ), + data=None ), ] ).sign(alice, [tx0.outputs[0]], [0]) diff --git a/test/functional/feature_staking_diff_addresses.py b/test/functional/feature_staking_diff_addresses.py index 036beef..7b6faac 100755 --- a/test/functional/feature_staking_diff_addresses.py +++ b/test/functional/feature_staking_diff_addresses.py @@ -82,18 +82,18 @@ def run_test(self): outputs=[ utxo.Output( value=40000 * COIN, - header=0, - destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0x7e0dd8c53a47b22451dc3a73b29d72a2ce1405a4191f3c31ff927fea7b0514182f81ffc984364cc85499595eaefc509a06710c5277dcd22ebd7464917dfd9230') + destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0x7e0dd8c53a47b22451dc3a73b29d72a2ce1405a4191f3c31ff927fea7b0514182f81ffc984364cc85499595eaefc509a06710c5277dcd22ebd7464917dfd9230'), + data=None ), utxo.Output( value=40001 * COIN, - header=0, - destination=utxo.DestLockForStaking(dave_stash.public_key, dave.public_key,'0x0699553a3c5bfa89e41d94a45ceb9103ae9f87089b4a70de4c2a3eb922e1b9362fe0d8868ae4c9d5a9fba98d29b45d2c2630f4936077999f9334da1cca2e37e9') + destination=utxo.DestLockForStaking(dave_stash.public_key, dave.public_key,'0x0699553a3c5bfa89e41d94a45ceb9103ae9f87089b4a70de4c2a3eb922e1b9362fe0d8868ae4c9d5a9fba98d29b45d2c2630f4936077999f9334da1cca2e37e9'), + data=None ), utxo.Output( value=39999919999 * COIN, - header=0, - destination=utxo.DestPubkey(charlie.public_key) + destination=utxo.DestPubkey(charlie.public_key), + data=None ) ] ).sign(alice, [utxos[0][1]]) diff --git a/test/functional/feature_staking_extra.py b/test/functional/feature_staking_extra.py index 899b759..ad45f0d 100755 --- a/test/functional/feature_staking_extra.py +++ b/test/functional/feature_staking_extra.py @@ -80,9 +80,15 @@ def run_test(self): outputs=[ utxo.Output( value=40000 * COIN, - header=0, - destination=utxo.DestLockExtraForStaking(alice_stash.public_key, alice.public_key) + destination=utxo.DestLockExtraForStaking(alice_stash.public_key, alice.public_key), + data=None ), + # This output prevent reward overflow + utxo.Output( + value=3981553255926290448385, # genesis amount - u64::MAX + destination=utxo.DestPubkey(alice.public_key), + data=None + ) ] ).sign(alice_stash, [utxos[0][1]]) (_,_,events) = client.submit(alice_stash, tx1) diff --git a/test/functional/feature_staking_extra_not_validator.py b/test/functional/feature_staking_extra_not_validator.py index 6b24aa3..7aa0b9c 100755 --- a/test/functional/feature_staking_extra_not_validator.py +++ b/test/functional/feature_staking_extra_not_validator.py @@ -85,8 +85,8 @@ def run_test(self): outputs=[ utxo.Output( value=40000 * COIN, - header=0, - destination=utxo.DestLockExtraForStaking(charlie_stash.public_key, charlie.public_key) + destination=utxo.DestLockExtraForStaking(charlie_stash.public_key, charlie.public_key), + data=None ), ] ).sign(charlie_stash, [utxos[0][1]]) diff --git a/test/functional/feature_staking_extra_wrong_controller.py b/test/functional/feature_staking_extra_wrong_controller.py index 67dcf1e..b46d52e 100755 --- a/test/functional/feature_staking_extra_wrong_controller.py +++ b/test/functional/feature_staking_extra_wrong_controller.py @@ -80,8 +80,8 @@ def run_test(self): outputs=[ utxo.Output( value=40000 * COIN, - header=0, - destination=utxo.DestLockExtraForStaking(alice_stash.public_key, bob.public_key) + destination=utxo.DestLockExtraForStaking(alice_stash.public_key, bob.public_key), + data=None ), ] ).sign(alice_stash, [utxos[0][1]]) diff --git a/test/functional/feature_staking_first_time.py b/test/functional/feature_staking_first_time.py index 9adf2db..48aa722 100755 --- a/test/functional/feature_staking_first_time.py +++ b/test/functional/feature_staking_first_time.py @@ -77,9 +77,15 @@ def run_test(self): outputs=[ utxo.Output( value=50000 * COIN, - header=0, - destination=utxo.DestPubkey(charlie_stash.public_key) + destination=utxo.DestPubkey(charlie_stash.public_key), + data=None ), + utxo.Output( + value=39999949950 * COIN, + destination=utxo.DestPubkey(alice.public_key), + data=None + ), + ] ).sign(alice, [utxos[0][1]]) client.submit(alice, tx1) @@ -92,17 +98,17 @@ def run_test(self): outputs=[ utxo.Output( value=40000 * COIN, - header=0, - destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0xa03bcfaac6ebdc26bb9c256c51b08f9c1c6d4569f48710a42939168d1d7e5b6086b20e145e97158f6a0b5bff2994439d3320543c8ff382d1ab3e5eafffaf1a18') + destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0xa03bcfaac6ebdc26bb9c256c51b08f9c1c6d4569f48710a42939168d1d7e5b6086b20e145e97158f6a0b5bff2994439d3320543c8ff382d1ab3e5eafffaf1a18'), + data=None ), utxo.Output( value=9999 * COIN, - header=0, - destination=utxo.DestPubkey(charlie_stash.public_key) + destination=utxo.DestPubkey(charlie_stash.public_key), + data=None ), ] - ).sign(charlie_stash, tx1.outputs) - (_,_,events) = client.submit(charlie_stash, tx2) + ).sign(charlie_stash, [tx1.outputs[0]]) + client.submit(charlie_stash, tx2) # there should already be 3 staking, adding Charlie in the list. assert_equal( len(list(client.staking_count())), 3 ) diff --git a/test/functional/feature_staking_less_than_minimum.py b/test/functional/feature_staking_less_than_minimum.py index a0e6ca7..e23d78f 100755 --- a/test/functional/feature_staking_less_than_minimum.py +++ b/test/functional/feature_staking_less_than_minimum.py @@ -77,8 +77,8 @@ def run_test(self): outputs=[ utxo.Output( value=50000 * COIN, - header=0, - destination=utxo.DestPubkey(charlie_stash.public_key) + destination=utxo.DestPubkey(charlie_stash.public_key), + data=None ), ] ).sign(alice, [utxos[0][1]]) @@ -92,13 +92,13 @@ def run_test(self): outputs=[ utxo.Output( value=4000 * COIN, - header=0, - destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0xa03bcfaac6ebdc26bb9c256c51b08f9c1c6d4569f48710a42939168d1d7e5b6086b20e145e97158f6a0b5bff2994439d3320543c8ff382d1ab3e5eafffaf1a18') + destination=utxo.DestLockForStaking(charlie_stash.public_key, charlie.public_key,'0xa03bcfaac6ebdc26bb9c256c51b08f9c1c6d4569f48710a42939168d1d7e5b6086b20e145e97158f6a0b5bff2994439d3320543c8ff382d1ab3e5eafffaf1a18'), + data=None ), utxo.Output( value=45999 * COIN, - header=0, - destination=utxo.DestPubkey(charlie_stash.public_key) + destination=utxo.DestPubkey(charlie_stash.public_key), + data=None ), ] ).sign(charlie_stash, tx1.outputs) diff --git a/test/functional/test_framework/mintlayer/utxo.py b/test/functional/test_framework/mintlayer/utxo.py index 9cbdc3f..8b40da5 100644 --- a/test/functional/test_framework/mintlayer/utxo.py +++ b/test/functional/test_framework/mintlayer/utxo.py @@ -242,15 +242,15 @@ def get_ss58_address(self): class Output(): - def __init__(self, value, header, destination): + def __init__(self, value, destination, data): self.value = value - self.header = header self.destination = destination + self.data = data @staticmethod def load(obj): dest = Destination.load(obj['destination']) - return Output(obj['value'], obj['header'], dest) + return Output(obj['value'], dest, obj['data']) def type_string(self): return 'TransactionOutput' @@ -258,8 +258,8 @@ def type_string(self): def json(self): return { 'value': self.value, - 'header': self.header, 'destination': self.destination.json(), + 'data': self.data, }