Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 39 additions & 14 deletions hathorlib/token_creation_tx.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
See the License for the specific language governing permissions and
limitations under the License.
"""

from enum import IntEnum
from struct import error as StructError, pack
from typing import Tuple

Expand All @@ -32,9 +32,13 @@
# Signal bist (B), version (B), inputs len (B), outputs len (B)
_SIGHASH_ALL_FORMAT_STRING = '!BBBB'


# used when (de)serializing token information
# version 1 expects only token name and symbol
TOKEN_INFO_VERSION = 1
# version 1 is the default behavior
class TokenVersion(IntEnum):
NATIVE = 0
DEPOSIT = 1
FEE = 2


class TokenCreationTransaction(Transaction):
Expand All @@ -43,11 +47,19 @@ def __init__(self) -> None:
# for this special tx, its own hash is used as the created token uid. We're artificially
# creating the tokens list here
self.tokens = []
self.token_version: TokenVersion = TokenVersion.DEPOSIT
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't we create a method to set the token version?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The token version is been set in the create_from_struct similar to the other props of the tx


def __str__(self) -> str:
return ('TokenCreationTransaction(nonce=%d, timestamp=%s, version=%s, weight=%f, hash=%s, '
'token_name=%s, token_symbol=%s)' % (self.nonce, self.timestamp, int(self.version),
self.weight, self.hash_hex, self.token_name, self.token_symbol))
return (
f'TokenCreationTransaction(nonce={self.nonce}, '
f'timestamp={self.timestamp}, '
f'version={int(self.version)}, '
f'weight={self.weight:.6f}, '
f'hash={self.hash_hex}, '
f'token_name={self.token_name}, '
f'token_symbol={self.token_symbol}, '
f'token_version={self.token_version})'
)

def update_hash(self) -> None:
""" When we update the hash, we also have to update the tokens uid list
Expand Down Expand Up @@ -78,7 +90,12 @@ def get_funds_fields_from_struct(self, buf: bytes) -> bytes:
self.outputs.append(txout)

# token name and symbol
self.token_name, self.token_symbol, buf = TokenCreationTransaction.deserialize_token_info(buf)
(
self.token_name,
self.token_symbol,
self.token_version,
buf
) = TokenCreationTransaction.deserialize_token_info(buf)

return buf

Expand Down Expand Up @@ -148,31 +165,35 @@ def serialize_token_info(self) -> bytes:
encoded_symbol = self.token_symbol.encode('utf-8')

ret = b''
ret += int_to_bytes(TOKEN_INFO_VERSION, 1)
ret += int_to_bytes(self.token_version, 1)
ret += int_to_bytes(len(encoded_name), 1)
ret += encoded_name
ret += int_to_bytes(len(encoded_symbol), 1)
ret += encoded_symbol

return ret

@classmethod
def deserialize_token_info(cls, buf: bytes) -> Tuple[str, str, bytes]:
""" Gets the token name and symbol from serialized format
def deserialize_token_info(cls, buf: bytes) -> Tuple[str, str, TokenVersion, bytes]:
""" Gets the token name, symbol and version from serialized format
"""
(token_info_version,), buf = unpack('!B', buf)
if token_info_version != TOKEN_INFO_VERSION:
raise ValueError('unknown token info version: {}'.format(token_info_version))
(raw_token_version,), buf = unpack('!B', buf)
try:
token_version = TokenVersion(raw_token_version)
except ValueError:
raise ValueError('unknown token version: {}'.format(raw_token_version))

(name_len,), buf = unpack('!B', buf)
name, buf = unpack_len(name_len, buf)

(symbol_len,), buf = unpack('!B', buf)
symbol, buf = unpack_len(symbol_len, buf)

# Token name and symbol can be only utf-8 valid strings for now
decoded_name = decode_string_utf8(name, 'Token name')
decoded_symbol = decode_string_utf8(symbol, 'Token symbol')

return decoded_name, decoded_symbol, buf
return decoded_name, decoded_symbol, token_version, buf

def verify_token_info(self) -> None:
""" Validates token info
Expand All @@ -190,6 +211,10 @@ def verify_token_info(self) -> None:
if clean_token_string(self.token_symbol) == clean_token_string(settings.HATHOR_TOKEN_SYMBOL):
raise TransactionDataError('Invalid token symbol ({})'.format(self.token_symbol))

# Can't create the token with NATIVE version
if self.token_version == TokenVersion.NATIVE:
raise TransactionDataError('Invalid token version ({})'.format(self.token_version))

def is_nft_creation_standard(self) -> bool:
"""Returns True if it's a standard NFT creation transaction"""
# We will check the outputs to validate that we have an NFT standard creation
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

[tool.poetry]
name = "hathorlib"
version = "0.11.0"
version = "0.12.0"
description = "Hathor Network base objects library"
authors = ["Hathor Team <contact@hathor.network>"]
license = "Apache-2.0"
Expand Down
36 changes: 35 additions & 1 deletion tests/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def test_token_creation_basics(self):
str(tx),
'TokenCreationTransaction(nonce=33518441, timestamp=1578090723, version=2, weight=20.645186, '
'hash=00000828d80dd4cd809c959139f7b4261df41152f4cce65a8777eb1c3a1f9702, '
'token_name=ToTheMoon, token_symbol=🚀)'
'token_name=ToTheMoon, token_symbol=🚀, token_version=1)'
)
self.assertEqual(
repr(tx),
Expand All @@ -107,6 +107,40 @@ def test_token_creation_basics(self):
tx.update_hash()
self.assertFalse(tx.verify_pow())

def test_token_creation_with_fee_version(self):
"""Test TokenCreationTransaction with token_version=2 (FEE)"""
from hathorlib.token_creation_tx import TokenVersion

# Using the same structure as test_token_creation_basics but with token_version=2
data = bytes.fromhex('00020104000005551d7740fd7d3c0acc50b5677fdd844f1225985aa431e1712af2a2fd'
'8900006a473045022100a445edb5cd6c79a0a7b5ed837582fd65b8d511ee60b64fd076'
'e07bd8f63f75a202202dca24320bffc4c3ca2a07cdfff38f7c839bde70ed49ef634ac6'
'588972836cab2103bfa995d676e3c0ed7b863c74cfef9683fab3163b42b6f21442326a'
'023fc57fba0000264800001976a9146876f9578221fdb678d4e8376503098a9228b132'
'88ac00004e2001001976a914031761ef85a24603203c97e75af355b83209f08f88ac00'
'00000181001976a9149f091256cb98649c7c35df0aad44d7805710691e88ac00000002'
'81001976a914b1d7a5ee505ad4d3b93ea1a5162ba83d5049ec4e88ac0209546f546865'
'4d6f6f6e04f09f9a804034a52aec6cece75e0fc0e30200001a72272f48339fcc5d5ec5'
'deaf197855964b0eb912e8c6eefe00928b6cf600001055641c20b71871ed2c5c7d4096'
'a34f40888d79c25bce74421646e732dc01ff730d')
tx = TokenCreationTransaction.create_from_struct(data)

# Verify the token version is FEE (2)
self.assertEqual(tx.token_version, TokenVersion.FEE)

# Verify the transaction can be serialized and deserialized correctly
self.assertEqual(data, bytes(tx))

# Verify basic transaction properties
self.assertTrue(tx.is_transaction)
self.assertFalse(tx.is_block)

# Verify the string representation includes token_version=2
str_repr = str(tx)
self.assertIn('token_version=2', str_repr)
self.assertIn('token_name=ToTheMoon', str_repr)
self.assertIn('token_symbol=🚀', str_repr)

def test_script_basics(self):
create_output_script(decode_address('HVZjvL1FJ23kH3buGNuttVRsRKq66WHUVZ'))

Expand Down