Skip to content

Commit 5707ed7

Browse files
authored
Add block Header type and fix Beef/MerklePath Clone methods (#276)
* add Header type * json tags and lint fix * update block header fields * implement Clone methods for Beef and MerklePath types * add tests * update changelog
1 parent 42df0af commit 5707ed7

File tree

7 files changed

+374
-3
lines changed

7 files changed

+374
-3
lines changed

CHANGELOG.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ All notable changes to this project will be documented in this file. The format
44

55
## Table of Contents
66

7+
- [1.2.13 - 2025-12-05](#1213---2025-12-05)
78
- [1.2.12 - 2025-11-12](#1212---2025-11-12)
89
- [1.2.11 - 2025-10-27](#1211---2025-10-27)
910
- [1.2.10 - 2025-09-16](#1210---2025-09-16)
@@ -47,6 +48,15 @@ All notable changes to this project will be documented in this file. The format
4748
- [1.1.0 - 2024-08-19](#110---2024-08-19)
4849
- [1.0.0 - 2024-06-06](#100---2024-06-06)
4950

51+
## [1.2.13] - 2025-12-05
52+
53+
### Added
54+
- `Header` type in block package for 80-byte Bitcoin block header parsing
55+
- `MerklePath.Clone()` method for deep copying merkle paths
56+
57+
### Fixed
58+
- `Beef.Clone()` now performs deep copy of all nested structures (BUMPs, transactions, input references)
59+
5060
## [1.2.12] - 2025-11-12
5161

5262
### Added

block/header.go

Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
package block
2+
3+
import (
4+
"bytes"
5+
"encoding/binary"
6+
"encoding/hex"
7+
"fmt"
8+
"io"
9+
10+
"github.com/bsv-blockchain/go-sdk/chainhash"
11+
)
12+
13+
const (
14+
// HeaderSize is the size of a Bitcoin block header in bytes (80 bytes)
15+
HeaderSize = 80
16+
)
17+
18+
// Header represents a Bitcoin block header (80 bytes)
19+
type Header struct {
20+
Version int32 `json:"version"` // 4 bytes - Block version
21+
PrevHash chainhash.Hash `json:"previousHash"` // 32 bytes - Previous block hash
22+
MerkleRoot chainhash.Hash `json:"merkleRoot"` // 32 bytes - Merkle root hash
23+
Timestamp uint32 `json:"time"` // 4 bytes - Block timestamp (Unix time)
24+
Bits uint32 `json:"bits"` // 4 bytes - Difficulty target
25+
Nonce uint32 `json:"nonce"` // 4 bytes - Nonce
26+
}
27+
28+
// NewHeaderFromBytes creates a BlockHeader from an 80-byte slice
29+
func NewHeaderFromBytes(data []byte) (*Header, error) {
30+
if len(data) != HeaderSize {
31+
return nil, fmt.Errorf("invalid header size: expected %d bytes, got %d", HeaderSize, len(data))
32+
}
33+
34+
h := &Header{}
35+
r := bytes.NewReader(data)
36+
37+
// Read version (4 bytes, little-endian)
38+
if err := binary.Read(r, binary.LittleEndian, &h.Version); err != nil {
39+
return nil, fmt.Errorf("failed to read version: %w", err)
40+
}
41+
42+
// Read previous block hash (32 bytes)
43+
if _, err := io.ReadFull(r, h.PrevHash[:]); err != nil {
44+
return nil, fmt.Errorf("failed to read prev block hash: %w", err)
45+
}
46+
47+
// Read merkle root (32 bytes)
48+
if _, err := io.ReadFull(r, h.MerkleRoot[:]); err != nil {
49+
return nil, fmt.Errorf("failed to read merkle root: %w", err)
50+
}
51+
52+
// Read timestamp (4 bytes, little-endian)
53+
if err := binary.Read(r, binary.LittleEndian, &h.Timestamp); err != nil {
54+
return nil, fmt.Errorf("failed to read timestamp: %w", err)
55+
}
56+
57+
// Read bits (4 bytes, little-endian)
58+
if err := binary.Read(r, binary.LittleEndian, &h.Bits); err != nil {
59+
return nil, fmt.Errorf("failed to read bits: %w", err)
60+
}
61+
62+
// Read nonce (4 bytes, little-endian)
63+
if err := binary.Read(r, binary.LittleEndian, &h.Nonce); err != nil {
64+
return nil, fmt.Errorf("failed to read nonce: %w", err)
65+
}
66+
67+
return h, nil
68+
}
69+
70+
// NewHeaderFromHex creates a BlockHeader from a hex string (160 characters)
71+
func NewHeaderFromHex(hexStr string) (*Header, error) {
72+
data, err := hex.DecodeString(hexStr)
73+
if err != nil {
74+
return nil, fmt.Errorf("failed to decode hex: %w", err)
75+
}
76+
return NewHeaderFromBytes(data)
77+
}
78+
79+
// Bytes serializes the block header to an 80-byte slice
80+
func (h *Header) Bytes() []byte {
81+
buf := new(bytes.Buffer)
82+
buf.Grow(HeaderSize)
83+
84+
// Write version (4 bytes, little-endian)
85+
_ = binary.Write(buf, binary.LittleEndian, h.Version)
86+
87+
// Write previous block hash (32 bytes)
88+
buf.Write(h.PrevHash[:])
89+
90+
// Write merkle root (32 bytes)
91+
buf.Write(h.MerkleRoot[:])
92+
93+
// Write timestamp (4 bytes, little-endian)
94+
_ = binary.Write(buf, binary.LittleEndian, h.Timestamp)
95+
96+
// Write bits (4 bytes, little-endian)
97+
_ = binary.Write(buf, binary.LittleEndian, h.Bits)
98+
99+
// Write nonce (4 bytes, little-endian)
100+
_ = binary.Write(buf, binary.LittleEndian, h.Nonce)
101+
102+
return buf.Bytes()
103+
}
104+
105+
// Hex returns the block header as a hex string
106+
func (h *Header) Hex() string {
107+
return hex.EncodeToString(h.Bytes())
108+
}
109+
110+
// Hash calculates the block hash (double SHA-256 of the header)
111+
func (h *Header) Hash() chainhash.Hash {
112+
return chainhash.DoubleHashH(h.Bytes())
113+
}
114+
115+
// String returns a string representation of the header
116+
func (h *Header) String() string {
117+
return fmt.Sprintf("Header{Hash: %s, PrevBlock: %s, Height: ?, Bits: %d}",
118+
h.Hash().String(), h.PrevHash.String(), h.Bits)
119+
}

block/header_test.go

Lines changed: 139 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,139 @@
1+
package block
2+
3+
import (
4+
"encoding/hex"
5+
"strings"
6+
"testing"
7+
)
8+
9+
func TestNewHeaderFromBytes(t *testing.T) {
10+
// Genesis block mainnet header
11+
genesisHex := "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49ffff001d1dac2b7c"
12+
genesisBytes, err := hex.DecodeString(genesisHex)
13+
if err != nil {
14+
t.Fatalf("Failed to decode genesis hex: %v", err)
15+
}
16+
17+
header, err := NewHeaderFromBytes(genesisBytes)
18+
if err != nil {
19+
t.Fatalf("NewHeaderFromBytes() error = %v", err)
20+
}
21+
22+
if header.Version != 1 {
23+
t.Errorf("Version = %d, want 1", header.Version)
24+
}
25+
26+
if header.Timestamp != 1231006505 {
27+
t.Errorf("Timestamp = %d, want 1231006505", header.Timestamp)
28+
}
29+
30+
if header.Bits != 0x1d00ffff {
31+
t.Errorf("Bits = %x, want 0x1d00ffff", header.Bits)
32+
}
33+
34+
if header.Nonce != 2083236893 {
35+
t.Errorf("Nonce = %d, want 2083236893", header.Nonce)
36+
}
37+
}
38+
39+
func TestHeaderBytes(t *testing.T) {
40+
genesisHex := "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49ffff001d1dac2b7c"
41+
genesisBytes, _ := hex.DecodeString(genesisHex)
42+
43+
header, err := NewHeaderFromBytes(genesisBytes)
44+
if err != nil {
45+
t.Fatalf("NewHeaderFromBytes() error = %v", err)
46+
}
47+
48+
serialized := header.Bytes()
49+
50+
if len(serialized) != HeaderSize {
51+
t.Errorf("Bytes() returned %d bytes, want %d", len(serialized), HeaderSize)
52+
}
53+
54+
if hex.EncodeToString(serialized) != genesisHex {
55+
t.Errorf("Bytes() = %s, want %s", hex.EncodeToString(serialized), genesisHex)
56+
}
57+
}
58+
59+
func TestHeaderHex(t *testing.T) {
60+
genesisHex := "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49ffff001d1dac2b7c"
61+
62+
header, err := NewHeaderFromHex(genesisHex)
63+
if err != nil {
64+
t.Fatalf("NewHeaderFromHex() error = %v", err)
65+
}
66+
67+
if header.Hex() != genesisHex {
68+
t.Errorf("Hex() = %s, want %s", header.Hex(), genesisHex)
69+
}
70+
}
71+
72+
func TestHeaderHash(t *testing.T) {
73+
// Genesis block mainnet
74+
genesisHex := "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49ffff001d1dac2b7c"
75+
expectedHash := "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f"
76+
77+
header, err := NewHeaderFromHex(genesisHex)
78+
if err != nil {
79+
t.Fatalf("NewHeaderFromHex() error = %v", err)
80+
}
81+
82+
hash := header.Hash()
83+
hashStr := hash.String()
84+
85+
if hashStr != expectedHash {
86+
t.Errorf("Hash() = %s, want %s", hashStr, expectedHash)
87+
}
88+
}
89+
90+
func TestNewHeaderFromBytesInvalidSize(t *testing.T) {
91+
invalidData := []byte{0x01, 0x02, 0x03}
92+
93+
_, err := NewHeaderFromBytes(invalidData)
94+
if err == nil {
95+
t.Error("NewHeaderFromBytes() with invalid size should return error")
96+
}
97+
}
98+
99+
func TestHeaderPrevBlockAndMerkleRoot(t *testing.T) {
100+
// Block 1 mainnet header
101+
block1Hex := "010000006fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677ba1a3c3540bf7b1cdb606e857233e0e61bc6649ffff001d01e36299"
102+
expectedPrevBlock := "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f"
103+
104+
header, err := NewHeaderFromHex(block1Hex)
105+
if err != nil {
106+
t.Fatalf("NewHeaderFromHex() error = %v", err)
107+
}
108+
109+
prevBlockStr := header.PrevHash.String()
110+
if prevBlockStr != expectedPrevBlock {
111+
t.Errorf("PrevBlock = %s, want %s", prevBlockStr, expectedPrevBlock)
112+
}
113+
}
114+
115+
func TestHeaderString(t *testing.T) {
116+
genesisHex := "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49ffff001d1dac2b7c"
117+
118+
header, err := NewHeaderFromHex(genesisHex)
119+
if err != nil {
120+
t.Fatalf("NewHeaderFromHex() error = %v", err)
121+
}
122+
123+
str := header.String()
124+
if str == "" {
125+
t.Error("String() returned empty string")
126+
}
127+
128+
expectedHash := "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f"
129+
if !strings.Contains(str, expectedHash) {
130+
t.Errorf("String() should contain hash %s, got %s", expectedHash, str)
131+
}
132+
}
133+
134+
func TestNewHeaderFromHexInvalid(t *testing.T) {
135+
_, err := NewHeaderFromHex("invalid hex")
136+
if err == nil {
137+
t.Error("NewHeaderFromHex() with invalid hex should return error")
138+
}
139+
}

transaction/beef.go

Lines changed: 55 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1138,15 +1138,67 @@ txLoop:
11381138
return log
11391139
}
11401140

1141+
// Clone creates a deep copy of the Beef object.
1142+
// All nested structures are copied, so modifications to the clone
1143+
// will not affect the original.
11411144
func (b *Beef) Clone() *Beef {
11421145
c := &Beef{
11431146
Version: b.Version,
1144-
BUMPs: append([]*MerklePath(nil), b.BUMPs...),
1147+
BUMPs: make([]*MerklePath, len(b.BUMPs)),
11451148
Transactions: make(map[chainhash.Hash]*BeefTx, len(b.Transactions)),
11461149
}
1147-
for k, v := range b.Transactions {
1148-
c.Transactions[k] = v
1150+
1151+
// Deep clone BUMPs
1152+
for i, mp := range b.BUMPs {
1153+
c.BUMPs[i] = mp.Clone()
11491154
}
1155+
1156+
// First pass: ShallowClone all Transactions
1157+
for txid, beefTx := range b.Transactions {
1158+
cloned := &BeefTx{
1159+
DataFormat: beefTx.DataFormat,
1160+
BumpIndex: beefTx.BumpIndex,
1161+
}
1162+
1163+
if beefTx.KnownTxID != nil {
1164+
id := *beefTx.KnownTxID
1165+
cloned.KnownTxID = &id
1166+
}
1167+
1168+
if beefTx.InputTxids != nil {
1169+
cloned.InputTxids = make([]*chainhash.Hash, len(beefTx.InputTxids))
1170+
for i, inputTxid := range beefTx.InputTxids {
1171+
if inputTxid != nil {
1172+
id := *inputTxid
1173+
cloned.InputTxids[i] = &id
1174+
}
1175+
}
1176+
}
1177+
1178+
if beefTx.Transaction != nil {
1179+
cloned.Transaction = beefTx.Transaction.ShallowClone()
1180+
// Link to cloned BUMP
1181+
if beefTx.DataFormat == RawTxAndBumpIndex && beefTx.BumpIndex >= 0 && beefTx.BumpIndex < len(c.BUMPs) {
1182+
cloned.Transaction.MerklePath = c.BUMPs[beefTx.BumpIndex]
1183+
}
1184+
}
1185+
1186+
c.Transactions[txid] = cloned
1187+
}
1188+
1189+
// Second pass: wire up SourceTransaction references
1190+
for _, beefTx := range c.Transactions {
1191+
if beefTx.Transaction != nil {
1192+
for _, input := range beefTx.Transaction.Inputs {
1193+
if input.SourceTXID != nil {
1194+
if sourceBeefTx, ok := c.Transactions[*input.SourceTXID]; ok && sourceBeefTx.Transaction != nil {
1195+
input.SourceTransaction = sourceBeefTx.Transaction
1196+
}
1197+
}
1198+
}
1199+
}
1200+
}
1201+
11501202
return c
11511203
}
11521204

transaction/beef_test.go

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -267,6 +267,26 @@ func TestBeefClone(t *testing.T) {
267267
original, err := NewBeefFromBytes(beefBytes)
268268
require.NoError(t, err)
269269

270+
// Test cloning with nil fields by adding a BeefTx with minimal data
271+
nilFieldsTxID := chainhash.HashH([]byte("test-nil-fields"))
272+
original.Transactions[nilFieldsTxID] = &BeefTx{
273+
DataFormat: TxIDOnly,
274+
KnownTxID: nil,
275+
InputTxids: nil,
276+
Transaction: nil,
277+
}
278+
279+
// Test cloning with InputTxids populated
280+
inputTxidsTxID := chainhash.HashH([]byte("test-input-txids"))
281+
knownID := chainhash.HashH([]byte("known-id"))
282+
inputID1 := chainhash.HashH([]byte("input-1"))
283+
original.Transactions[inputTxidsTxID] = &BeefTx{
284+
DataFormat: TxIDOnly,
285+
KnownTxID: &knownID,
286+
InputTxids: []*chainhash.Hash{&inputID1, nil},
287+
Transaction: nil,
288+
}
289+
270290
// Clone the object
271291
clone := original.Clone()
272292

transaction/merklepath.go

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,15 @@ func (ip IndexedPath) GetOffsetLeaf(layer int, offset uint64) *PathElement {
5555
return nil
5656
}
5757

58+
// Clone creates a deep copy of the MerklePath by serializing and deserializing.
59+
func (mp *MerklePath) Clone() *MerklePath {
60+
if mp == nil {
61+
return nil
62+
}
63+
clone, _ := NewMerklePathFromBinary(mp.Bytes())
64+
return clone
65+
}
66+
5867
// NewMerklePath creates a new MerklePath with the given block height and path
5968
func NewMerklePath(blockHeight uint32, path [][]*PathElement) *MerklePath {
6069
return &MerklePath{

0 commit comments

Comments
 (0)