diff --git a/.gitignore b/.gitignore index e69de29b..475f0f40 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1,165 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# Output file +output.txt \ No newline at end of file diff --git a/main.py b/main.py deleted file mode 100644 index 8f3c43ef..00000000 --- a/main.py +++ /dev/null @@ -1,457 +0,0 @@ -import argparse -import ecdsa -import hashlib -import os -import json -import time - -from sha3 import sha3_224 -from Crypto.Hash import RIPEMD160 - -TARGET = "0000ffff00000000000000000000000000000000000000000000000000000000" -TARGET_HEX = 0x0000ffff00000000000000000000000000000000000000000000000000000000 - -class MemPool: - def __init__(self, root_dir): - self.root_dir = root_dir - self.transaction_files = [os.path.join(self.root_dir, file) for file in os.listdir(self.root_dir) if file.endswith('.json')] - self.transactions = [Transaction(file) for file in self.transaction_files] - self.valid_transactions = [transaction.json_transaction for transaction in self.transactions if transaction.is_valid()] - -# scriptpubkey_type can be: p2sh, p2pkh, v0_p2wsh, v1_p2tr, v0_p2wpkh. - -def get_filename_without_extension(file_path): - # Get the base filename from the path - filename = os.path.basename(file_path) - # Remove the extension - filename_without_extension = os.path.splitext(filename)[0] - return filename_without_extension - -def valid_transaction_syntax(json_transaction): - required = ["version", "locktime", "vin", "vout"] - - for field in required: - if field not in json_transaction: - print('Required field is missing') - return False - - if not isinstance(json_transaction["version"], int): - print('Invalid data type') - return False - - if not isinstance(json_transaction["locktime"], int): - print('Invalid data type') - return False - - if not isinstance(json_transaction["vin"], list): - print('Invalid data type') - return False - - if not isinstance(json_transaction["vout"], list): - print('Invalid data type') - return False - - # Check inputs - for input in json_transaction['vin']: - if not isinstance(input, dict): - print('Invalid data type') - return False - - if 'txid' not in input or 'vout' not in input: - print('Invalid data type') - return False - - # Check outputs - - for output in json_transaction['vout']: - if not isinstance(output, dict): - print('Invalid data type') - return False - - if 'scriptpubkey' not in output or 'value' not in output: - print('Invalid data type') - return False - - return True - -def non_empty_vin_vout(vin, vout): - # Make sure neither in or out lists are empty - if not vin: - print("vin is empty") - return False - if not vout: - print("vout is empty") - return False - - return True - -def serialize_input(tx_input, override=None): - serialized_input = [] - serialized_input += [bytes.fromhex(tx_input["txid"])[::-1]] # Reversed txid - serialized_input += [tx_input["vout"].to_bytes(4, byteorder="little")] - - if override is None: - serialized_input += [serialize_script(bytes.fromhex(tx_input["scriptsig"]))] - elif override is True: - serialized_input += [serialize_script(bytes.fromhex(tx_input["prevout"]["scriptpubkey"]))] - elif override is False: - serialized_input += [serialize_script(bytes.fromhex(""))] - - serialized_input += [tx_input["sequence"].to_bytes(4, byteorder="little")] - - return b''.join(serialized_input) - -def encode_int(i, nbytes, encoding='little'): - return i.to_bytes(nbytes, encoding) - -def serialize_script(script): - return b''.join([encode_varint(len(script)), script]) - -def serialize_output(output): - serialized_output = [] - - serialized_output += [output["value"].to_bytes(8, byteorder="little")] - serialized_output += [serialize_script(bytes.fromhex(output["scriptpubkey"]))] - - return b''.join(serialized_output) - -def encode_varint(i): - if i < 0xfd: - return bytes([i]) - elif i < 0x10000: - return b'\xfd' + i.to_bytes(2, 'little') - elif i < 0x100000000: - return b'\xfe' + i.to_bytes(4, 'little') - elif i < 0x10000000000000000: - return b'\xff' + i.to_bytes(8, 'little') - else: - raise ValueError("integer too large: %d" % (i, )) - -def serialize_transaction(transaction, index=-1, sighash_type=1, segwit=False): - # for now for p2pkh - message = [] - message += [transaction["version"].to_bytes(4, byteorder="little")] - - if segwit: - message += [b'\x00\x01'] # segwit marker - - # inputs - message += [encode_varint(len(transaction["vin"]))] - - inputs = transaction["vin"] - outputs = transaction["vout"] - - if index == -1: - message += [serialize_input(tx_in) for tx_in in inputs] - else: - message += [serialize_input(tx_in, index == i) for i, tx_in in enumerate(inputs)] - - # outputs - message += [encode_varint(len(transaction["vout"]))] - message += [serialize_output(tx_out) for tx_out in outputs] - - # witness - if segwit: - for tx_in in inputs: - message += [encode_varint(len(tx_in["witness"]))] - - for item in tx_in["witness"]: - item_bytes = bytes.fromhex(item) - message += [encode_varint(len(item_bytes)), item_bytes] - - # encode rest of data - message += [transaction["locktime"].to_bytes(4, byteorder="little")] - hash_type = 1 - message += [hash_type.to_bytes(4, 'little') if index != -1 else b''] # 1 = SIGHASH_ALL - - return b''.join(message) - -def parse_der_signature(der_signature_with_hash_type): - # Remove the hash_type from the DER signature - der_signature = der_signature_with_hash_type[:-2] - - # Parse the DER signature - der_bytes = bytes.fromhex(der_signature) - r_length = der_bytes[3] - r = int.from_bytes(der_bytes[4:4 + r_length], 'big') - s_length_index = 4 + r_length + 1 - s_length = der_bytes[s_length_index] - s = int.from_bytes(der_bytes[s_length_index + 1:s_length_index + 1 + s_length], 'big') - hash_type = der_bytes[-1] - - return r, s, hash_type - -def verify_p2pkh_transaction(input_idx, json_transaction): - ################# - # Pubkey script # - ################# - - input_tx = json_transaction["vin"][input_idx] - - # Extract data from input transaction - script_sig_asm = input_tx["scriptsig_asm"] - - # Parse scriptSig ASM to extract signature and public key - script_parts = script_sig_asm.split(" ") - signature_hex = script_parts[1] - public_key_hex = script_parts[3] - - r, s, hash_type = parse_der_signature(signature_hex) - - r_hex = hex(r)[2:] - s_hex = hex(s)[2:] - - der_len = len(signature_hex[:-2]) - signature_len = len(r_hex + s_hex) + 2 * 6 - - if der_len != signature_len: - return False - - signature = bytes.fromhex(r_hex + s_hex) - - public_key = bytes.fromhex(public_key_hex) - - scriptpubkey = bytes.fromhex(input_tx['prevout']['scriptpubkey']) - pubkey_hash = scriptpubkey[3:23] - - hashed_public_key = hashlib.sha256(public_key).digest() - - ripemd160 = RIPEMD160.new() - ripemd160.update(hashed_public_key) - pubkey_hash_calculated = ripemd160.digest() - - if pubkey_hash != pubkey_hash_calculated: - return False - - - #################### - # Signature script # - #################### - - data_signed = serialize_transaction(json_transaction, input_idx, int(hash_type)) - data_hash = hashlib.sha256(data_signed).digest() - - # Verify the signature - verifying_key = ecdsa.VerifyingKey.from_string(public_key, curve=ecdsa.SECP256k1) - try: - verifying_key.verify(signature, data_hash, hashlib.sha256) - except ecdsa.BadSignatureError: - return False - - return True - - -class Transaction: - def __init__(self, transaction_json_file): - # Parse transaction. - with open(transaction_json_file) as transaction: - json_transaction = json.load(transaction) - - # check jestli je valid - if valid_transaction_syntax(json_transaction): - self.transaction_name = get_filename_without_extension(transaction_json_file) - self.version = json_transaction['version'] - self.locktime = json_transaction['locktime'] - self.vin = json_transaction['vin'] - self.vout = json_transaction['vout'] - self.json_transaction = json_transaction - else: - print('Invalid transaction syntax') - - def is_valid(self): - if not non_empty_vin_vout(self.vin, self.vout): - return False - - input_sum = 0 - for input in self.vin: - input_sum = input_sum + input['prevout']['value'] - - output_sum = 0 - for output in self.vout: - output_sum = output_sum + output['value'] - - if input_sum < output_sum: - return False - - input_idx = 0 - for input in self.vin: - if 'scriptsig' in input: - scriptsig = input['scriptsig'] - - scriptpubkey_type = input['prevout']['scriptpubkey_type'] - - if scriptsig == "" or scriptpubkey_type not in ["p2pkh", "p2sh"]: - return False - - if scriptpubkey_type == 'p2pkh': - if not verify_p2pkh_transaction(input_idx, self.json_transaction): - return False - else: - return False - else: - return False - - input_idx += 1 - - return True - -def is_valid_block_hash(block_hash, target): - if block_hash == "": - return False - - return block_hash < target - -def calculate_bits(target_hex): - leading_zeros = len(target_hex) - len(target_hex.lstrip('0')) - exponent = (len(target_hex) - 1) // 2 - - coefficient_hex = target_hex[leading_zeros:].rstrip('0') - coefficient = int(coefficient_hex or '0', 16) - - bits = (exponent << 24) + coefficient - - return bits - -def block_mining(transaction_hashes, version=4): - # Calculate Merkle root hash of transactions - merkle_root_hashed = calculate_merkle_root(transaction_hashes) - prev_block_hash = "0000000000000000000000000000000000000000000000000000000000000000" - nonce = 0 - bits = calculate_bits(TARGET) - timestamp = int(time.time()) - - block_hash = "" - - block_header = [] - - while not is_valid_block_hash(block_hash, TARGET): - # Construct block header - block_header = [] - block_header += [version.to_bytes(4, byteorder='little')] - block_header += [bytes.fromhex(prev_block_hash)[::-1]] - block_header += [bytes.fromhex(merkle_root_hashed)] - block_header += [timestamp.to_bytes(4, byteorder='little')] - block_header += [bits.to_bytes(4, byteorder='little')] - block_header += [nonce.to_bytes(4, byteorder='little')] - - # Double sha256 and reverse - block_hash = hashlib.sha256(hashlib.sha256(b''.join(block_header)).digest()).digest() - block_hash = block_hash[::-1].hex() - nonce += 1 - - return b''.join(block_header) - -def calculate_txid(transaction_content, coinbase=False): - # Serialize the transaction content - if coinbase: - serialized_transaction = serialize_transaction(transaction_content, segwit=True) #json.dumps(transaction_content, sort_keys=True).encode() - else: - serialized_transaction = serialize_transaction(transaction_content) #json.dumps(transaction_content, sort_keys=True).encode() - - # Calculate double SHA-256 hash - hash_result = hashlib.sha256(hashlib.sha256(serialized_transaction).digest()).digest() - - # Reverse byte order to obtain txid - txid = hash_result[::-1].hex() - - return txid - -def parse_arguments(): - parser = argparse.ArgumentParser(description='Simulation of the mining process of a block') - parser.add_argument('--mempool', type=str, required=True, help='Path to the directory containing the JSON files with transactions.') - return parser.parse_args() - -def calculate_merkle_root(transactions): - transaction_hashes = [] - # reverse - for tx in transactions: - tx_bytes = bytes.fromhex(tx) - reversed_tx_bytes = tx_bytes[::-1] - transaction_hashes.append(reversed_tx_bytes.hex()) - - while len(transaction_hashes) > 1: - new_hashes = [] - - for i in range(0, len(transaction_hashes), 2): - if (i + 1 == len(transaction_hashes)): - new_hash = hashlib.sha256(hashlib.sha256(bytes.fromhex(transaction_hashes[i] + transaction_hashes[i])).digest()).hexdigest() - else: - new_hash = hashlib.sha256(hashlib.sha256(bytes.fromhex(transaction_hashes[i] + transaction_hashes[i + 1])).digest()).hexdigest() - new_hashes.append(new_hash) - - transaction_hashes = new_hashes - - return transaction_hashes[0] - -def calculate_witness_commitment(wtxids): - merkle_root = calculate_merkle_root(wtxids) - merkle_root_bytes = bytes.fromhex(merkle_root) - witness_reserved_value = '0000000000000000000000000000000000000000000000000000000000000000' - witness_reserved_value_bytes = bytes.fromhex(witness_reserved_value) - return hashlib.sha256(hashlib.sha256(b''.join([merkle_root_bytes,witness_reserved_value_bytes])).digest()).hexdigest() - -def get_wtxid(transaction_hash): - tx_bytes = bytes.fromhex(transaction_hash) - reversed_tx_bytes = tx_bytes[::-1] - return reversed_tx_bytes.hex() - -if __name__ == '__main__': - args = parse_arguments() - - if args.mempool is None: - # TODO error - pass - - mempool = MemPool(args.mempool) - - coinbase_transaction = { - "version": 2, - "locktime": 0xffffffff, - "vin": [ - { - "txid": "0000000000000000000000000000000000000000000000000000000000000000", - "vout": 0xffffffff, - "sequence": 0xffffffff, - "is_coinbase": True, - "scriptsig": "160014fd91039e25b0827748473fce351afd8ead4ecdce", - "scriptsig_asm": "OP_PUSHBYTES_22 0014fd91039e25b0827748473fce351afd8ead4ecdce", - "witness": [ - "0000000000000000000000000000000000000000000000000000000000000000", - ] - } - ], - "vout": [ - { - "scriptpubkey": "0014ad4cc1cc859c57477bf90d0f944360d90a3998bf", - "scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 ad4cc1cc859c57477bf90d0f944360d90a3998bf", - "scriptpubkey_type": "v0_p2wpkh", - "scriptpubkey_address": "bc1q44xvrny9n3t5w7lep58egsmqmy9rnx9lt6u0tc", - "value": 100000 - }, - { - "scriptpubkey": "", - "scriptpubkey_type": "op_return", - "value": 0 - } - ] - } - - block_transactions = [coinbase_transaction] + mempool.valid_transactions - - transaction_hashes = [calculate_txid(coinbase_transaction)] + [calculate_txid(json_transaction) for json_transaction in block_transactions[1:]] - block_hash = block_mining(transaction_hashes).hex() - - wtxids = ["0000000000000000000000000000000000000000000000000000000000000000"] + transaction_hashes[1:] - - witness_commitment = calculate_witness_commitment(wtxids) - scriptpubkey_wc = '6a24aa21a9ed' + witness_commitment - - coinbase_transaction["vout"][1]["scriptpubkey"] = scriptpubkey_wc - - coinbase_serialized = serialize_transaction(coinbase_transaction, segwit=True) - - print(block_hash) - print(coinbase_serialized.hex()) - for transaction in transaction_hashes: - print(transaction) - diff --git a/requirements.txt b/requirements.txt index 5dbfbe23..ed2a25ec 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ ecdsa==0.19.0 pycryptodome==3.20.0 -sha3==0.2.1 +#sha3==0.2.1 \ No newline at end of file diff --git a/run.sh b/run.sh index dc1f0b31..f0c97c6f 100755 --- a/run.sh +++ b/run.sh @@ -4,8 +4,4 @@ #venv/bin/pip install --upgrade pip #venv/bin/pip install -r requirements.txt #source /venv/bin/activate -python3 -m venv venv -venv/bin/pip install --upgrade pip -venv/bin/pip install -r requirements.txt -source venv/bin/activate -python3 main.py --mempool=mempool > output.txt +python3 src/main.py --mempool=mempool > output.txt \ No newline at end of file diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/coinbase_transaction.py b/src/coinbase_transaction.py new file mode 100644 index 00000000..af2a18cb --- /dev/null +++ b/src/coinbase_transaction.py @@ -0,0 +1,31 @@ +COINBASE_TRANSACTION = { + "version": 2, + "locktime": 0xffffffff, + "vin": [ + { + "txid": "0000000000000000000000000000000000000000000000000000000000000000", + "vout": 0xffffffff, + "sequence": 0xffffffff, + "is_coinbase": True, + "scriptsig": "160014fd91039e25b0827748473fce351afd8ead4ecdce", + "scriptsig_asm": "OP_PUSHBYTES_22 0014fd91039e25b0827748473fce351afd8ead4ecdce", + "witness": [ + "0000000000000000000000000000000000000000000000000000000000000000", + ] + } + ], + "vout": [ + { + "scriptpubkey": "0014ad4cc1cc859c57477bf90d0f944360d90a3998bf", + "scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 ad4cc1cc859c57477bf90d0f944360d90a3998bf", + "scriptpubkey_type": "v0_p2wpkh", + "scriptpubkey_address": "bc1q44xvrny9n3t5w7lep58egsmqmy9rnx9lt6u0tc", + "value": 100000 + }, + { + "scriptpubkey": "", + "scriptpubkey_type": "op_return", + "value": 0 + } + ] + } \ No newline at end of file diff --git a/src/constants.py b/src/constants.py new file mode 100644 index 00000000..0923a8cf --- /dev/null +++ b/src/constants.py @@ -0,0 +1,2 @@ +TARGET = "0000ffff00000000000000000000000000000000000000000000000000000000" +SIGHASH_ALL = 1 \ No newline at end of file diff --git a/src/main.py b/src/main.py new file mode 100644 index 00000000..060a2102 --- /dev/null +++ b/src/main.py @@ -0,0 +1,50 @@ +import argparse +import sys +import os + +current_script_directory = os.path.dirname(__file__) +project_root_directory = os.path.join(current_script_directory, '..') + +if project_root_directory not in sys.path: + sys.path.append(project_root_directory) + +from src.coinbase_transaction import COINBASE_TRANSACTION +from src.mempool import MemPool +from src.mining import calculate_witness_commitment, block_mining +from src.serialize import serialize_transaction +from src.transaction import calculate_txid + +def parse_arguments(): + parser = argparse.ArgumentParser(description='Simulation of the mining process of a block') + parser.add_argument('--mempool', type=str, required=True, help='Path to the directory containing the JSON files with transactions.') + return parser.parse_args() + +if __name__ == '__main__': + args = parse_arguments() + + if args.mempool is None: + # TODO error + pass + + mempool = MemPool(args.mempool) + + # TODO pokracovani + + block_transactions = [COINBASE_TRANSACTION] + mempool.valid_transactions + + transaction_hashes = [calculate_txid(COINBASE_TRANSACTION)] + [calculate_txid(json_transaction) for json_transaction in block_transactions[1:]] + block_hash = block_mining(transaction_hashes).hex() + + wtxids = ["0000000000000000000000000000000000000000000000000000000000000000"] + transaction_hashes[1:] + + witness_commitment = calculate_witness_commitment(wtxids) + scriptpubkey_wc = '6a24aa21a9ed' + witness_commitment + + COINBASE_TRANSACTION["vout"][1]["scriptpubkey"] = scriptpubkey_wc + + coinbase_serialized = serialize_transaction(COINBASE_TRANSACTION, segwit=True) + + print(block_hash) + print(coinbase_serialized.hex()) + for transaction in transaction_hashes: + print(transaction) diff --git a/src/mempool.py b/src/mempool.py new file mode 100644 index 00000000..1231f62a --- /dev/null +++ b/src/mempool.py @@ -0,0 +1,10 @@ +import os + +from src.transaction import Transaction + +class MemPool: + def __init__(self, root_dir): + self.root_dir = root_dir + self.transaction_files = [os.path.join(self.root_dir, file) for file in os.listdir(self.root_dir) if file.endswith('.json')] + self.transactions = [Transaction(file) for file in self.transaction_files] + self.valid_transactions = [transaction.json_transaction for transaction in self.transactions if transaction.is_valid()] diff --git a/src/mining.py b/src/mining.py new file mode 100644 index 00000000..19ad30b4 --- /dev/null +++ b/src/mining.py @@ -0,0 +1,79 @@ +import hashlib +import time + +from src.constants import TARGET + +def calculate_witness_commitment(wtxids): + merkle_root = calculate_merkle_root(wtxids) + merkle_root_bytes = bytes.fromhex(merkle_root) + witness_reserved_value = '0000000000000000000000000000000000000000000000000000000000000000' + witness_reserved_value_bytes = bytes.fromhex(witness_reserved_value) + return hashlib.sha256(hashlib.sha256(b''.join([merkle_root_bytes,witness_reserved_value_bytes])).digest()).hexdigest() + +def calculate_merkle_root(transactions): + transaction_hashes = [] + # reverse + for tx in transactions: + tx_bytes = bytes.fromhex(tx) + reversed_tx_bytes = tx_bytes[::-1] + transaction_hashes.append(reversed_tx_bytes.hex()) + + while len(transaction_hashes) > 1: + new_hashes = [] + + for i in range(0, len(transaction_hashes), 2): + if (i + 1 == len(transaction_hashes)): + new_hash = hashlib.sha256(hashlib.sha256(bytes.fromhex(transaction_hashes[i] + transaction_hashes[i])).digest()).hexdigest() + else: + new_hash = hashlib.sha256(hashlib.sha256(bytes.fromhex(transaction_hashes[i] + transaction_hashes[i + 1])).digest()).hexdigest() + new_hashes.append(new_hash) + + transaction_hashes = new_hashes + + return transaction_hashes[0] + +def is_valid_block_hash(block_hash, target): + if block_hash == "": + return False + + return block_hash < target + +def calculate_bits(target_hex): + leading_zeros = len(target_hex) - len(target_hex.lstrip('0')) + exponent = (len(target_hex) - 1) // 2 + + coefficient_hex = target_hex[leading_zeros:].rstrip('0') + coefficient = int(coefficient_hex or '0', 16) + + bits = (exponent << 24) + coefficient + + return bits + +def block_mining(transaction_hashes, version=4): + # Calculate Merkle root hash of transactions + merkle_root_hashed = calculate_merkle_root(transaction_hashes) + prev_block_hash = "0000000000000000000000000000000000000000000000000000000000000000" + nonce = 0 + bits = calculate_bits(TARGET) + timestamp = int(time.time()) + + block_hash = "" + + block_header = [] + + while not is_valid_block_hash(block_hash, TARGET): + # Construct block header + block_header = [] + block_header += [version.to_bytes(4, byteorder='little')] + block_header += [bytes.fromhex(prev_block_hash)[::-1]] + block_header += [bytes.fromhex(merkle_root_hashed)] + block_header += [timestamp.to_bytes(4, byteorder='little')] + block_header += [bits.to_bytes(4, byteorder='little')] + block_header += [nonce.to_bytes(4, byteorder='little')] + + # Double sha256 and reverse + block_hash = hashlib.sha256(hashlib.sha256(b''.join(block_header)).digest()).digest() + block_hash = block_hash[::-1].hex() + nonce += 1 + + return b''.join(block_header) diff --git a/src/op_codes.py b/src/op_codes.py new file mode 100644 index 00000000..46570cc9 --- /dev/null +++ b/src/op_codes.py @@ -0,0 +1,204 @@ +# The following code is taken from: https://github.com/SummerOfBitcoin/code-challenge-2024-Nesopie/blob/840aeb58dd68cba3fba33f96d71ea0790c1840a8/src/features/script/op_codes.ts + +OP_CODES = { + """ 'OP_0': b'\x00', + 'OP_PUSHBYTES_1': b'\x01', + 'OP_PUSHBYTES_2': b'\x02', + 'OP_PUSHBYTES_3': b'\x03', + 'OP_PUSHBYTES_4': b'\x04', + 'OP_PUSHBYTES_5': b'\x05', + 'OP_PUSHBYTES_6': b'\x06', + 'OP_PUSHBYTES_7': b'\x07', + 'OP_PUSHBYTES_8': b'\x08', + 'OP_PUSHBYTES_9': b'\x09', + 'OP_PUSHBYTES_10': b'\x0a', + 'OP_PUSHBYTES_11': b'\x0b', + 'OP_PUSHBYTES_12': b'\x0c', + 'OP_PUSHBYTES_13': b'\x0d', + 'OP_PUSHBYTES_14': b'\x0e', + 'OP_PUSHBYTES_15': b'\x0f', + 'OP_PUSHBYTES_16': b'\x10', + 'OP_PUSHBYTES_17': b'\x11', + 'OP_PUSHBYTES_18': b'\x12', + 'OP_PUSHBYTES_19': b'\x13', + 'OP_PUSHBYTES_20': b'\x14', + 'OP_PUSHBYTES_21': b'\x15', + 'OP_PUSHBYTES_22': b'\x16', + 'OP_PUSHBYTES_23': b'\x17', + 'OP_PUSHBYTES_24': b'\x18', + 'OP_PUSHBYTES_25': b'\x19', + 'OP_PUSHBYTES_26': b'\x1a', + 'OP_PUSHBYTES_27': b'\x1b', + 'OP_PUSHBYTES_28': b'\x1c', + 'OP_PUSHBYTES_29': b'\x1d', + 'OP_PUSHBYTES_30': b'\x1e', + 'OP_PUSHBYTES_31': b'\x1f', + 'OP_PUSHBYTES_32': b'\x20', + 'OP_PUSHBYTES_33': b'\x21', + 'OP_PUSHBYTES_34': b'\x22', + 'OP_PUSHBYTES_35': b'\x23', + 'OP_PUSHBYTES_36': b'\x24', + 'OP_PUSHBYTES_37': b'\x25', + 'OP_PUSHBYTES_38': b'\x26', + 'OP_PUSHBYTES_39': b'\x27', + 'OP_PUSHBYTES_40': b'\x28', + 'OP_PUSHBYTES_41': b'\x29', + 'OP_PUSHBYTES_42': b'\x2a', + 'OP_PUSHBYTES_43': b'\x2b', + 'OP_PUSHBYTES_44': b'\x2c', + 'OP_PUSHBYTES_45': b'\x2d', + 'OP_PUSHBYTES_46': b'\x2e', + 'OP_PUSHBYTES_47': b'\x2f', + 'OP_PUSHBYTES_48': b'\x30', + 'OP_PUSHBYTES_49': b'\x31', + 'OP_PUSHBYTES_50': b'\x32', + 'OP_PUSHBYTES_51': b'\x33', + 'OP_PUSHBYTES_52': b'\x34', + 'OP_PUSHBYTES_53': b'\x35', + 'OP_PUSHBYTES_54': b'\x36', + 'OP_PUSHBYTES_55': b'\x37', + 'OP_PUSHBYTES_56': b'\x38', + 'OP_PUSHBYTES_57': b'\x39', + 'OP_PUSHBYTES_58': b'\x3a', + 'OP_PUSHBYTES_59': b'\x3b', + 'OP_PUSHBYTES_60': b'\x3c', + 'OP_PUSHBYTES_61': b'\x3d', + 'OP_PUSHBYTES_62': b'\x3e', + 'OP_PUSHBYTES_63': b'\x3f', + 'OP_PUSHBYTES_64': b'\x40', + 'OP_PUSHBYTES_65': b'\x41', + 'OP_PUSHBYTES_66': b'\x42', + 'OP_PUSHBYTES_67': b'\x43', + 'OP_PUSHBYTES_68': b'\x44', + 'OP_PUSHBYTES_69': b'\x45', + 'OP_PUSHBYTES_70': b'\x46', + 'OP_PUSHBYTES_71': b'\x47', + 'OP_PUSHBYTES_72': b'\x48', + 'OP_PUSHBYTES_73': b'\x49', + 'OP_PUSHBYTES_74': b'\x4a', + 'OP_PUSHBYTES_75': b'\x4b', """ + 'OP_PUSHDATA1': b'\x4c', + 'OP_PUSHDATA2': b'\x4d', + 'OP_PUSHDATA4': b'\x4e', + 'OP_1NEGATE': b'\x4f', + 'OP_RESERVED': b'\x50', + 'OP_1': b'\x51', + 'OP_2': b'\x52', + 'OP_3': b'\x53', + 'OP_4': b'\x54', + 'OP_5': b'\x55', + 'OP_6': b'\x56', + 'OP_7': b'\x57', + 'OP_8': b'\x58', + 'OP_9': b'\x59', + 'OP_10': b'\x5a', + 'OP_11': b'\x5b', + 'OP_12': b'\x5c', + 'OP_13': b'\x5d', + 'OP_14': b'\x5e', + 'OP_15': b'\x5f', + 'OP_16': b'\x60', + 'OP_PUSHNUM_1': b'\x51', + 'OP_PUSHNUM_2': b'\x52', + 'OP_PUSHNUM_3': b'\x53', + 'OP_PUSHNUM_4': b'\x54', + 'OP_PUSHNUM_5': b'\x55', + 'OP_PUSHNUM_6': b'\x56', + 'OP_PUSHNUM_7': b'\x57', + 'OP_PUSHNUM_8': b'\x58', + 'OP_PUSHNUM_9': b'\x59', + 'OP_PUSHNUM_10': b'\x5a', + 'OP_PUSHNUM_11': b'\x5b', + 'OP_PUSHNUM_12': b'\x5c', + 'OP_PUSHNUM_13': b'\x5d', + 'OP_PUSHNUM_14': b'\x5e', + 'OP_PUSHNUM_15': b'\x5f', + 'OP_PUSHNUM_16': b'\x60', + # Flow control + 'OP_NOP': b'\x61', + 'OP_IF': b'\x63', + 'OP_NOTIF': b'\x64', + 'OP_ELSE': b'\x67', + 'OP_ENDIF': b'\x68', + 'OP_VERIFY': b'\x69', + 'OP_RETURN': b'\x6a', + # Stack operations + 'OP_TOALTSTACK': b'\x6b', + 'OP_FROMALTSTACK': b'\x6c', + 'OP_2DROP': b'\x6d', + 'OP_2DUP': b'\x6e', + 'OP_3DUP': b'\x6f', + 'OP_2OVER': b'\x70', + 'OP_2ROT': b'\x71', + 'OP_2SWAP': b'\x72', + 'OP_IFDUP': b'\x73', + 'OP_DEPTH': b'\x74', + 'OP_DROP': b'\x75', + 'OP_DUP': b'\x76', + 'OP_NIP': b'\x77', + 'OP_OVER': b'\x78', + 'OP_PICK': b'\x79', + 'OP_ROLL': b'\x7a', + 'OP_ROT': b'\x7b', + 'OP_SWAP': b'\x7c', + 'OP_TUCK': b'\x7d', + # Bitwise logic + 'OP_INVERT': b'\x83', + 'OP_AND': b'\x84', + 'OP_OR': b'\x85', + 'OP_XOR': b'\x86', + 'OP_EQUAL': b'\x87', + 'OP_EQUALVERIFY': b'\x88', + # Arithmetic operations + 'OP_1ADD': b'\x8b', + 'OP_1SUB': b'\x8c', + 'OP_NEGATE': b'\x8f', + 'OP_ABS': b'\x90', + 'OP_NOT': b'\x91', + 'OP_0NOTEQUAL': b'\x92', + 'OP_ADD': b'\x93', + 'OP_SUB': b'\x94', + 'OP_MUL': b'\x95', # disabled + 'OP_DIV': b'\x96', # disabled + 'OP_MOD': b'\x97', # disabled + 'OP_LSHIFT': b'\x98', # disabled + 'OP_RSHIFT': b'\x99', # disabled + 'OP_BOOLAND': b'\x9a', + 'OP_BOOLOR': b'\x9b', + 'OP_NUMEQUAL': b'\x9c', + 'OP_NUMEQUALVERIFY': b'\x9d', + 'OP_NUMNOTEQUAL': b'\x9e', + 'OP_LESSTHAN': b'\x9f', + 'OP_GREATERTHAN': b'\xa0', + 'OP_LESSTHANOREQUAL': b'\xa1', + 'OP_GREATERTHANOREQUAL': b'\xa2', + 'OP_MIN': b'\xa3', + 'OP_MAX': b'\xa4', + 'OP_WITHIN': b'\xa5', + # Crypto operations + 'OP_RIPEMD160': b'\xa6', + 'OP_SHA1': b'\xa7', + 'OP_SHA256': b'\xa8', + 'OP_HASH160': b'\xa9', + 'OP_HASH256': b'\xaa', + 'OP_CODESEPARATOR': b'\xab', + 'OP_CHECKSIG': b'\xac', + 'OP_CHECKSIGVERIFY': b'\xad', + 'OP_CHECKMULTISIG': b'\xae', + 'OP_CHECKMULTISIGVERIFY': b'\xaf', + # Reserved or non-standard opcodes + 'OP_NOP1': b'\xb0', # reserved + 'OP_CHECKLOCKTIMEVERIFY': b'\xb1', + 'OP_CHECKSEQUENCEVERIFY': b'\xb2', + 'OP_CSV': b'\xb2', # same as OP_CHECKSEQUENCEVERIFY + 'OP_NOP4': b'\xb3', # reserved + 'OP_NOP5': b'\xb4', # reserved + 'OP_NOP6': b'\xb5', # reserved + 'OP_NOP7': b'\xb6', # reserved + 'OP_NOP8': b'\xb7', # reserved + 'OP_NOP9': b'\xb8', # reserved + 'OP_NOP10': b'\xb9', # reserved + 'OP_CHECKSIGADD': b'\xba', + 'OP_CHECKSIGADDVERIFY': b'\xbb', + +} diff --git a/src/script.py b/src/script.py new file mode 100644 index 00000000..471a34ee --- /dev/null +++ b/src/script.py @@ -0,0 +1,427 @@ +from dataclasses import dataclass +from typing import List, Any, Union +import hashlib +import ecdsa +from src.op_codes import OP_CODES + +class InvalidScriptException(Exception): + """Custom exception for Script execution errors""" + pass + +class Stack: + def __init__(self): + self._items: List[bytes] = [] + + def push(self, item: bytes) -> None: + self._items.append(item) + + def pop(self) -> bytes: + if not self._items: + raise InvalidScriptException("Attempted to pop from empty stack") + return self._items.pop() + + def peek(self) -> bytes: + if not self._items: + raise InvalidScriptException("Attempted to peek empty stack") + return self._items[-1] + + def size(self) -> int: + return len(self._items) + + def is_empty(self) -> bool: + return len(self._items) == 0 + +class Script: + def __init__(self, script: bytes, json_transaction: dict = None, input_index: int = 0): + self.script = script + self.stack = Stack() + self.alt_stack = Stack() + self.if_stack: List[bool] = [] + self.transaction = json_transaction # Store JSON transaction + self.input_index = input_index + + def create_signature_hash(self, hash_type: int) -> bytes: + """ + Create the signature hash for the transaction based on the hash type. + This is what gets signed/verified in OP_CHECKSIG. + """ + if not self.transaction: + raise InvalidScriptException("No transaction context provided for signature verification") + + # Create a copy of the transaction + tx_copy = self.transaction.copy() + + # Clear all input scripts + for inp in tx_copy['vin']: + inp['scriptsig'] = '' + + # Handle different hash types + if hash_type & 0x1F == 0x01: # SIGHASH_ALL + # Most common, signs all inputs and outputs + # Current input gets the subscript + tx_copy['vin'][self.input_index]['scriptsig'] = self.script.hex() + + elif hash_type & 0x1F == 0x02: # SIGHASH_NONE + # Signs all inputs, but no outputs + tx_copy['vout'] = [] + # Zero out sequence numbers of other inputs + for i in range(len(tx_copy['vin'])): + if i != self.input_index: + tx_copy['vin'][i]['sequence'] = 0 + + elif hash_type & 0x1F == 0x03: # SIGHASH_SINGLE + # Signs all inputs and only the output with same index + if self.input_index >= len(tx_copy['vout']): + raise InvalidScriptException("SIGHASH_SINGLE invalid output index") + # Keep only the output at the same index + output = tx_copy['vout'][self.input_index] + tx_copy['vout'] = [{'value': -1, 'scriptpubkey': ''}] * self.input_index + tx_copy['vout'].append(output) + # Zero out sequence numbers of other inputs + for i in range(len(tx_copy['vin'])): + if i != self.input_index: + tx_copy['vin'][i]['sequence'] = 0 + + if hash_type & 0x80: # SIGHASH_ANYONECANPAY + # Only sign the current input + current_input = tx_copy['vin'][self.input_index] + tx_copy['vin'] = [current_input] + self.input_index = 0 + + # Serialize the modified transaction + serialized = self.serialize_transaction(tx_copy) + + # Add hash type + serialized += hash_type.to_bytes(4, 'little') + + # Double SHA256 + return hashlib.sha256(hashlib.sha256(serialized).digest()).digest() + + def serialize_transaction(self, tx: dict) -> bytes: + """Serialize a transaction for signing/verification""" + result = bytearray() + + # Version + result.extend(tx['version'].to_bytes(4, 'little')) + + # Number of inputs + result.extend(len(tx['vin']).to_bytes(1, 'little')) + + # Inputs + for inp in tx['vin']: + # Previous transaction hash (reverse byte order) + prev_tx = bytes.fromhex(inp['txid'])[::-1] + result.extend(prev_tx) + + # Previous output index + result.extend(inp['vout'].to_bytes(4, 'little')) + + # Script + script_sig = bytes.fromhex(inp['scriptsig']) if inp['scriptsig'] else b'' + result.extend(len(script_sig).to_bytes(1, 'little')) + result.extend(script_sig) + + # Sequence + result.extend(inp['sequence'].to_bytes(4, 'little')) + + # Number of outputs + result.extend(len(tx['vout']).to_bytes(1, 'little')) + + # Outputs + for out in tx['vout']: + # Amount in satoshis + result.extend(out['value'].to_bytes(8, 'little')) + + # Script + script_pubkey = bytes.fromhex(out['scriptpubkey']) + result.extend(len(script_pubkey).to_bytes(1, 'little')) + result.extend(script_pubkey) + + # Locktime + result.extend(tx['locktime'].to_bytes(4, 'little')) + + return bytes(result) + + def execute(self) -> bool: + """Execute the script and return True if it executed successfully""" + try: + i = 0 + while i < len(self.script): + # Skip execution if we're in a false IF block + if self.if_stack and not self.if_stack[-1]: + op = self.script[i:i+1] + if op == OP_CODES['OP_ENDIF']: + self.if_stack.pop() + elif op == OP_CODES['OP_ELSE']: + self.if_stack[-1] = not self.if_stack[-1] + i += 1 + continue + + op = self.script[i:i+1] + + # Handle data push operations + if op not in OP_CODES.values(): + length = int.from_bytes(op, 'little') + if length > 75: # Use OP_PUSHDATA operations for larger chunks + raise InvalidScriptException(f"Invalid push operation length: {length}") + data = self.script[i+1:i+1+length] + self.stack.push(data) + i += length + 1 + continue + + # Handle opcodes + op_name = list(OP_CODES.keys())[list(OP_CODES.values()).index(op)] + i += self._execute_opcode(op_name) + + # Script executed successfully if stack is not empty and top value is true + if self.stack.is_empty(): + return False + return self.stack.pop() != b'\x00' + + except Exception as e: + raise InvalidScriptException(f"Script execution failed: {str(e)}") + + def _execute_opcode(self, op_name: str) -> int: + """Execute a single opcode and return how many bytes to advance""" + + # Constants + if op_name == 'OP_0': + self.stack.push(b'\x00') + return 1 + elif op_name == 'OP_1NEGATE': + self.stack.push(b'\xff') + return 1 + elif op_name.startswith('OP_') and op_name[3:].isdigit(): + n = int(op_name[3:]) + self.stack.push(bytes([n])) + return 1 + + # Flow Control + elif op_name == 'OP_IF': + if self.stack.is_empty(): + self.if_stack.append(False) + else: + value = self.stack.pop() + self.if_stack.append(value != b'\x00') + return 1 + elif op_name == 'OP_NOTIF': + if self.stack.is_empty(): + self.if_stack.append(True) + else: + value = self.stack.pop() + self.if_stack.append(value == b'\x00') + return 1 + elif op_name == 'OP_ELSE': + if not self.if_stack: + raise InvalidScriptException("OP_ELSE without OP_IF") + self.if_stack[-1] = not self.if_stack[-1] + return 1 + elif op_name == 'OP_ENDIF': + if not self.if_stack: + raise InvalidScriptException("OP_ENDIF without OP_IF") + self.if_stack.pop() + return 1 + + # Stack Operations + elif op_name == 'OP_DUP': + self.op_dup() + return 1 + elif op_name == 'OP_DROP': + self.stack.pop() + return 1 + elif op_name == 'OP_SWAP': + if self.stack.size() < 2: + raise InvalidScriptException("Stack too small for OP_SWAP") + a = self.stack.pop() + b = self.stack.pop() + self.stack.push(a) + self.stack.push(b) + return 1 + elif op_name == 'OP_ROT': + if self.stack.size() < 3: + raise InvalidScriptException("Stack too small for OP_ROT") + a = self.stack.pop() + b = self.stack.pop() + c = self.stack.pop() + self.stack.push(b) + self.stack.push(a) + self.stack.push(c) + return 1 + + # Arithmetic and Logical Operations + elif op_name == 'OP_ADD': + if self.stack.size() < 2: + raise InvalidScriptException("Stack too small for OP_ADD") + a = int.from_bytes(self.stack.pop(), 'little', signed=True) + b = int.from_bytes(self.stack.pop(), 'little', signed=True) + result = (a + b).to_bytes(4, 'little', signed=True) + self.stack.push(result) + return 1 + elif op_name == 'OP_SUB': + if self.stack.size() < 2: + raise InvalidScriptException("Stack too small for OP_SUB") + a = int.from_bytes(self.stack.pop(), 'little', signed=True) + b = int.from_bytes(self.stack.pop(), 'little', signed=True) + result = (b - a).to_bytes(4, 'little', signed=True) + self.stack.push(result) + return 1 + + # Crypto Operations + elif op_name == 'OP_HASH160': + self.op_hash160() + return 1 + elif op_name == 'OP_CHECKSIG': + return self.op_checksig() + elif op_name == 'OP_CHECKMULTISIG': + return self.op_checkmultisig() + + # Comparison Operations + elif op_name == 'OP_EQUALVERIFY': + self.op_equalverify() + return 1 + elif op_name == 'OP_EQUAL': + if self.stack.size() < 2: + raise InvalidScriptException("Stack too small for OP_EQUAL") + a = self.stack.pop() + b = self.stack.pop() + self.stack.push(b'\x01' if a == b else b'\x00') + return 1 + + raise InvalidScriptException(f"Unimplemented opcode: {op_name}") + + def op_dup(self) -> None: + """Duplicate the top stack item""" + if self.stack.is_empty(): + raise InvalidScriptException("Cannot DUP empty stack") + self.stack.push(self.stack.peek()) + + def op_hash160(self) -> None: + """SHA256 followed by RIPEMD160""" + if self.stack.is_empty(): + raise InvalidScriptException("Cannot HASH160 empty stack") + value = self.stack.pop() + sha256 = hashlib.sha256(value).digest() + ripemd160 = hashlib.new('ripemd160', sha256).digest() + self.stack.push(ripemd160) + + def op_equalverify(self) -> None: + """Verify top two stack items are equal""" + if self.stack.size() < 2: + raise InvalidScriptException("Stack too small for EQUALVERIFY") + a = self.stack.pop() + b = self.stack.pop() + if a != b: + raise InvalidScriptException("EQUALVERIFY failed") + + def op_checksig(self) -> int: + """ + Verify a signature against a public key + Returns number of bytes consumed + """ + if self.stack.size() < 2: + raise InvalidScriptException("Stack too small for CHECKSIG") + + pubkey = self.stack.pop() + signature = self.stack.pop() + + try: + # Extract DER signature and hash type + if len(signature) < 1: + raise InvalidScriptException("Empty signature") + + der_sig = signature[:-1] # Remove hash type byte + hash_type = signature[-1] + + # Create verifying key from public key bytes + try: + vk = ecdsa.VerifyingKey.from_string( + pubkey, + curve=ecdsa.SECP256k1, + hashfunc=hashlib.sha256 + ) + except Exception as e: + raise InvalidScriptException(f"Invalid public key: {str(e)}") + + # Create signature hash based on hash type + sig_hash = self.create_signature_hash(hash_type) + + # Verify the signature + try: + verified = vk.verify(der_sig, sig_hash) + except Exception: + verified = False + + self.stack.push(b'\x01' if verified else b'\x00') + return 1 + + except Exception as e: + self.stack.push(b'\x00') + return 1 + + def op_checkmultisig(self) -> int: + """ + Verify multiple signatures against multiple public keys + Returns number of bytes consumed + """ + if self.stack.size() < 1: + raise InvalidScriptException("Stack too small for CHECKMULTISIG") + + # Get number of public keys + n = int.from_bytes(self.stack.pop(), 'little') + if n < 0 or n > 20: + raise InvalidScriptException("Invalid number of public keys") + + if self.stack.size() < n + 1: + raise InvalidScriptException("Stack too small for public keys") + + # Get public keys + pubkeys = [] + for _ in range(n): + pubkeys.append(self.stack.pop()) + + # Get number of signatures + m = int.from_bytes(self.stack.pop(), 'little') + if m < 0 or m > n: + raise InvalidScriptException("Invalid number of signatures") + + if self.stack.size() < m: + raise InvalidScriptException("Stack too small for signatures") + + # Get signatures + signatures = [] + for _ in range(m): + signatures.append(self.stack.pop()) + + # Remove the extra null byte (Bitcoin protocol quirk) + if self.stack.size() < 1: + raise InvalidScriptException("No extra null byte for CHECKMULTISIG") + self.stack.pop() + + # TODO: Implement proper multisig verification + # This is a simplified version that always returns true + # In a real implementation, you would: + # 1. Verify each signature against public keys in order + # 2. Ensure all signatures are valid + # 3. Handle proper error cases + + verified = True # Replace with actual verification + + self.stack.push(b'\x01' if verified else b'\x00') + return 1 + + @staticmethod + def combine_scripts(*scripts: Union[bytes, 'Script']) -> 'Script': + """ + Combine multiple scripts into a single script. + Accepts both bytes and Script objects. + """ + combined = bytearray() + for script in scripts: + if isinstance(script, Script): + combined.extend(script.script) + elif isinstance(script, bytes): + combined.extend(script) + else: + raise InvalidScriptException(f"Invalid script type: {type(script)}") + return Script(bytes(combined)) + \ No newline at end of file diff --git a/src/serialize.py b/src/serialize.py new file mode 100644 index 00000000..db84e33e --- /dev/null +++ b/src/serialize.py @@ -0,0 +1,95 @@ +from src.constants import SIGHASH_ALL + +# The code in this file is inpired from the following source: +# http://karpathy.github.io/2021/06/21/blockchain/ + +def serialize_input(tx_input, override=None): + serialized_input = [] + serialized_input += [bytes.fromhex(tx_input["txid"])[::-1]] # Reversed txid + serialized_input += [encode_int(tx_input["vout"], 4)] + + if override is None: + # None = just use the actual script + serialized_input += [serialize_script(bytes.fromhex(tx_input["scriptsig"]))] + elif override is True: + # True = override the script with the script_pubkey of the associated input + serialized_input += [serialize_script(bytes.fromhex(tx_input["prevout"]["scriptpubkey"]))] + elif override is False: + # False = override with an empty script + serialized_input += [serialize_script(bytes.fromhex(""))] + else: + raise ValueError("script_override must be one of None|True|False") + + serialized_input += [encode_int(tx_input["sequence"], 4)] + + return b''.join(serialized_input) + +def encode_int(i, nbytes, encoding='little'): + return i.to_bytes(nbytes, encoding) + +def serialize_script(script): + return b''.join([encode_varint(len(script)), script]) + +def serialize_output(output): + serialized_output = [] + + serialized_output += [encode_int(output["value"], 8)] + serialized_output += [serialize_script(bytes.fromhex(output["scriptpubkey"]))] + + return b''.join(serialized_output) + +def encode_int(i, nbytes, encoding='little'): + """ encode integer i into nbytes bytes using a given byte ordering """ + return i.to_bytes(nbytes, encoding) + +def encode_varint(i): + """ encode a (possibly but rarely large) integer into bytes with a super simple compression scheme """ + if i < 0xfd: + return bytes([i]) + elif i < 0x10000: + return b'\xfd' + i.to_bytes(2, 'little') + elif i < 0x100000000: + return b'\xfe' + i.to_bytes(4, 'little') + elif i < 0x10000000000000000: + return b'\xff' + i.to_bytes(8, 'little') + else: + raise ValueError("integer too large: %d" % (i, )) + +def serialize_transaction(transaction, index=-1, sighash_type=1, segwit=False): + # for now for p2pkh + out = [] + out += [encode_int(transaction["version"], 4)] + + if segwit: + out += [b'\x00\x01'] # segwit marker + + # inputs + out += [encode_varint(len(transaction["vin"]))] + + inputs = transaction["vin"] + outputs = transaction["vout"] + + if index == -1: + out += [serialize_input(tx_in) for tx_in in inputs] + else: + # used when crafting digital signature for a specific input index + out += [serialize_input(tx_in, index == i) for i, tx_in in enumerate(inputs)] + + # outputs + out += [encode_varint(len(transaction["vout"]))] + out += [serialize_output(tx_out) for tx_out in outputs] + + # witness + if segwit: + for tx_in in inputs: + out += [encode_varint(len(tx_in["witness"]))] + + for item in tx_in["witness"]: + item_bytes = bytes.fromhex(item) + out += [encode_varint(len(item_bytes)), item_bytes] + + # encode rest of data + out += [encode_int(transaction["locktime"], 4)] + out += [encode_int(SIGHASH_ALL, 4) if index != -1 else b''] + + return b''.join(out) diff --git a/src/stack.py b/src/stack.py new file mode 100644 index 00000000..8ad14d22 --- /dev/null +++ b/src/stack.py @@ -0,0 +1,36 @@ +class Stack: + def __init__(self): + # Initialize an empty list to hold stack elements + self._stack = [] + + def push(self, item): + # Add an item to the top of the stack + self._stack.append(item) + + def pop(self): + # Remove and return the top item from the stack + # Raise an exception if the stack is empty + if not self.is_empty(): + return self._stack.pop() + else: + raise IndexError("pop from empty stack") + + def peek(self): + # Return the top item from the stack without removing it + # Raise an exception if the stack is empty + if not self.is_empty(): + return self._stack[-1] + else: + raise IndexError("peek from empty stack") + + def is_empty(self): + # Return True if the stack is empty, False otherwise + return len(self._stack) == 0 + + def size(self): + # Return the number of items in the stack + return len(self._stack) + + def __repr__(self): + # Return a string representation of the stack + return f"Stack({self._stack})" diff --git a/src/transaction.py b/src/transaction.py new file mode 100644 index 00000000..cb905f1c --- /dev/null +++ b/src/transaction.py @@ -0,0 +1,310 @@ +import hashlib +import json + +from ecdsa import VerifyingKey, SECP256k1, BadSignatureError + +from src.script import Script, InvalidScriptException +from src.serialize import serialize_transaction +from src.utils import decode_hex, get_filename_without_extension, hash160 +from src.verify import parse_der_signature_bytes, valid_transaction_syntax + +def calculate_txid(transaction_content, coinbase=False): + # Serialize the transaction content + if coinbase: + serialized_transaction = serialize_transaction(transaction_content, segwit=True) #json.dumps(transaction_content, sort_keys=True).encode() + else: + serialized_transaction = serialize_transaction(transaction_content) #json.dumps(transaction_content, sort_keys=True).encode() + + # Calculate double SHA-256 hash + hash_result = hashlib.sha256(hashlib.sha256(serialized_transaction).digest()).digest() + + # Reverse byte order to obtain txid + txid = hash_result[::-1].hex() + + return txid + +class Transaction: + def __init__(self, transaction_json_file): + # Parse transaction. + with open(transaction_json_file) as transaction: + json_transaction = json.load(transaction) + + # check jestli je valid + if valid_transaction_syntax(json_transaction): + self.transaction_name = get_filename_without_extension(transaction_json_file) + self.version = json_transaction['version'] + self.locktime = json_transaction['locktime'] + self.vin = json_transaction['vin'] + self.vout = json_transaction['vout'] + self.json_transaction = json_transaction + else: + # TODO jestli nejakej error + print('Invalid transaction syntax') + + def is_valid(self): + # At least one input and one output. + if not self.non_empty_vin_vout(): + return False + + # Basic locktime check. + if not self.valid_locktime(): + return False + + if not self.check_input_output_sum(): + return False + + # Check each input validity. + for vin_idx, vin in enumerate(self.vin): + if not self.valid_input(vin_idx, vin): + return False + + # Check each output validity. + for vout in self.vout: + if not self.valid_output(vout): + return False + + return True + + def non_empty_vin_vout(self): + # Make sure neither in or out lists are empty + if not self.vin: + #print("vin is empty") + return False + if not self.vout: + #print("vout is empty") + return False + + return True + + def valid_locktime(self): + return isinstance(self.locktime, int) and self.locktime >= 0 + + def check_input_output_sum(self): + input_sum = 0 + for input in self.vin: + input_sum = input_sum + input['prevout']['value'] + + output_sum = 0 + for output in self.vout: + output_sum = output_sum + output['value'] + + # Output sum can't be greater than the input sum. + if input_sum < output_sum: + return False + + return True + + def valid_input(self, vin_idx, vin): + if vin.get("is_coinbase", False): + return False + + prevout = vin.get("prevout", {}) + scriptpubkey_type = prevout.get("scriptpubkey_type", "") + + if scriptpubkey_type == "p2pkh": + return self.validate_p2pkh(vin_idx, vin) + elif scriptpubkey_type == "p2sh": + pass + #return self.validate_p2sh_p2wpkh(vin_idx, vin) + elif scriptpubkey_type == "v0_p2wsh": + pass + #return self.validate_p2wsh(vin) + elif scriptpubkey_type == "v1_p2tr": + pass + #return self.validate_p2tr(vin) + elif scriptpubkey_type == "v0_p2wpkh": + pass + #return self.validate_p2wpkh(vin) + + # Unknown script type. + return False + + def valid_output(self, vout): + scriptpubkey_type = vout.get("scriptpubkey_type", "") + return scriptpubkey_type in ["v0_p2wpkh", "p2sh", "v0_p2wsh", "v1_p2tr", "p2pkh"] + + def validate_p2pkh(self, vin_idx, vin): + ################# + # Pubkey script # + ################# + scriptsig = decode_hex(vin.get("scriptsig", "")) + + if not scriptsig: + return False + + prevout = vin.get("prevout", {}) + + if not prevout: + return False + + scriptpubkey = decode_hex(prevout.get("scriptpubkey", "")) + + # Combine and verify + script = Script.combine_scripts(scriptsig, scriptpubkey) + is_valid = script.execute() + return is_valid +""" + ##################################################################### + # Extract signature and public key from scriptSig (Parse scriptSig) # + ##################################################################### + # https://learnmeabitcoin.com/technical/script/p2pkh/ + # Explanation: the scriptSig contains the signature and the public key (including ASM instructions). + + signature_len = scriptsig[0] # The first byte represents the length of the DER signature (including hash type) + signature_w_hash_type = scriptsig[1:1+signature_len] # Extract the signature (includes the hash type at the end) + + # The last byte of the signature is the hash type (e.g., SIGHASH_ALL = 0x01) + signature = signature_w_hash_type[:-1] + hash_type = signature_w_hash_type[-1] + + public_key_idx = 1 + signature_len + public_key_len = scriptsig[public_key_idx] + public_key = scriptsig[public_key_idx+1:public_key_idx+1+public_key_len] + + ####################### + # Parse DER signature # + ####################### + # https://bitcoin.stackexchange.com/questions/92680/what-are-the-der-signature-and-sec-format + # https://learnmeabitcoin.com/technical/keys/signature/ + + # Remove the hash_type from the DER signature + der_signature = signature_w_hash_type[:-1] + + r, s, hash_type = parse_der_signature_bytes(der_signature) + + der_len = len(der_signature) + signature_len = len(r + s) + 6 + + if der_len != signature_len: + return False + + signature = r + s + + ###################### + # Parse scriptPubKey # + ###################### + # https://learnmeabitcoin.com/technical/script/p2pkh/ + # Explanation: the scriptPubKey contains: DUP, HASH160, public key hash (including OP_PUSHBYTES_20), EQUALVERIFY and CHECKSIG. + + if scriptpubkey[0:1] != b'\x76' or scriptpubkey[1:2] != b'\xa9' or scriptpubkey[2:3] != b'\x14': + return False # Not a valid P2PKH scriptPubKey (missing OP_DUP, OP_HASH160, or length mismatch) + + if scriptpubkey[23:24] != b'\x88' or scriptpubkey[24:25] != b'\xac': + return False # Not a valid P2PKH scriptPubKey (missing OP_EQUALVERIFY or OP_CHECKSIG) + + pkh = scriptpubkey[3:23] + + # Compute the public key hash (HASH160 of the public key) and compare with scriptPubKey + calc_pkh = hash160(public_key) + if calc_pkh != pkh: + return False # Public key hash does not match + + ############################################ + # Verify the signature with the public key # + ############################################ + + data_signed = serialize_transaction(self.json_transaction, vin_idx, int(hash_type)) + data_hash = hashlib.sha256(data_signed).digest() + + print(self.json_transaction) + print("********************************") + + # Verify the signature + verifying_key = VerifyingKey.from_string(public_key, curve=SECP256k1) + try: + verifying_key.verify(signature, data_hash, hashlib.sha256) + except BadSignatureError: + return False + + return True""" + +""" + def validate_p2sh_p2wpkh(self, vin_idx, vin): + # Extract scriptSig and witness + scriptsig = decode_hex(vin.get("scriptsig", "")) + witness = vin.get("witness", []) + + if not scriptsig or len(witness) < 2: + return False + + print(vin["txid"]) + + prevout = vin.get("prevout", {}) + + if not prevout: + return False + + scriptpubkey = decode_hex(prevout.get("scriptpubkey", "")) + + ############################# + # Check if it's a P2SH script # + ############################# + if len(scriptpubkey) != 23 or scriptpubkey[0:1] != b'\xa9' or scriptpubkey[-1:] != b'\x87': + return False # Not a valid P2SH scriptPubKey + + # Extract the redeem script hash from the scriptPubKey + # Extract redeem script hash from scriptPubKey + if scriptpubkey[0] != 0xa9: # Check for OP_HASH160 + return False + + length_of_hash = scriptpubkey[1] + if length_of_hash != 0x14: # 20 bytes + return False + + expected_redeem_script_hash = scriptpubkey[2:2+length_of_hash] + + ########################### + # Extract the redeem script # + ########################### + # The redeem script is the data in the scriptSig + redeem_script = scriptsig + + # Hash the redeem script and compare it with the expected hash in the scriptPubKey + redeem_script_hash = hash160(redeem_script) + + #print("rsh: ", redeem_script_hash) + #print("ersh: ", expected_redeem_script_hash) + + if redeem_script_hash != expected_redeem_script_hash: + return False # Redeem script hash does not match + + ############################## + # Parse and execute redeem script # + ############################## + # The redeem script should be a P2WPKH script: OP_0 <20-byte-public-key-hash> + if len(redeem_script) != 22 or redeem_script[0:1] != b'\x00' or redeem_script[1:2] != b'\x14': + return False # Not a valid P2WPKH redeem script + + # Extract the public key hash from the redeem script + public_key_hash = redeem_script[2:] + + ###################### + # Verify the witness # + ###################### + # The witness field contains: + # - witness[0] = signature + # - witness[1] = public key + + signature = decode_hex(witness[0]) + public_key = decode_hex(witness[1]) + + # Compute the public key hash (HASH160 of the public key) and compare with the public key hash in the redeem script + calc_pkh = hash160(public_key) + if calc_pkh != public_key_hash: + return False # Public key hash does not match + + ############################################ + # Verify the signature with the public key # + ############################################ + + data_signed = serialize_transaction(self.json_transaction, vin_idx, 1) # SIGHASH_ALL is typically 1 + data_hash = hashlib.sha256(data_signed).digest() + + # Verify the signature + verifying_key = VerifyingKey.from_string(public_key, curve=SECP256k1) + try: + verifying_key.verify(signature[:-1], data_hash, hashlib.sha256) # Remove the last byte (hash type) + except BadSignatureError: + return False + + return True """ \ No newline at end of file diff --git a/src/utils.py b/src/utils.py new file mode 100644 index 00000000..32ad658c --- /dev/null +++ b/src/utils.py @@ -0,0 +1,24 @@ +import hashlib +import os + +from Crypto.Hash import RIPEMD160 + +def get_filename_without_extension(file_path): + # Get the base filename from the path + filename = os.path.basename(file_path) + # Remove the extension + filename_without_extension = os.path.splitext(filename)[0] + return filename_without_extension + +def decode_hex(hex_data): + # Decode a hex-encoded data into its raw bytecode. + return bytes.fromhex(hex_data) + +def hash160(data): + # SHA-256 followed by RIPEMD-160 (Bitcoin's HASH160). + sha256_hash = hashlib.sha256(data).digest() + + ripemd160 = RIPEMD160.new() + ripemd160.update(sha256_hash) + + return ripemd160.digest() diff --git a/src/verify.py b/src/verify.py new file mode 100644 index 00000000..7844b78b --- /dev/null +++ b/src/verify.py @@ -0,0 +1,57 @@ +def valid_transaction_syntax(json_transaction): + required = ["version", "locktime", "vin", "vout"] + + for field in required: + if field not in json_transaction: + #print('Required field is missing') + return False + + if not isinstance(json_transaction["version"], int): + #print('Invalid data type') + return False + + if not isinstance(json_transaction["locktime"], int): + #print('Invalid data type') + return False + + if not isinstance(json_transaction["vin"], list): + #print('Invalid data type') + return False + + if not isinstance(json_transaction["vout"], list): + #print('Invalid data type') + return False + + # Check inputs + for input in json_transaction['vin']: + if not isinstance(input, dict): + #print('Invalid data type') + return False + + if 'txid' not in input or 'vout' not in input: + #print('Invalid data type') + return False + + # Check outputs + for output in json_transaction['vout']: + if not isinstance(output, dict): + #print('Invalid data type') + return False + + if 'scriptpubkey' not in output or 'value' not in output: + #print('Invalid data type') + return False + + return True + + +def parse_der_signature_bytes(der_signature): + # Parse the DER signature + r_length = der_signature[3] + r = der_signature[4:4 + r_length] + s_length_index = 4 + r_length + 1 + s_length = der_signature[s_length_index] + s = der_signature[s_length_index + 1:s_length_index + 1 + s_length] + hash_type = der_signature[-1] + + return r, s, hash_type