Browse Source

Completed upgradable encoder for Merkle DataNode bytes sequence;

tags/1.1.2^2
huanghaiquan 4 years ago
parent
commit
dd9279379d
8 changed files with 509 additions and 147 deletions
  1. +14
    -0
      source/ledger/ledger-core/src/main/java/com/jd/blockchain/ledger/core/MerkleDataNodeEncoder.java
  2. +128
    -0
      source/ledger/ledger-core/src/main/java/com/jd/blockchain/ledger/core/MerkleDataNodeEncoder_V0.java
  3. +200
    -0
      source/ledger/ledger-core/src/main/java/com/jd/blockchain/ledger/core/MerkleDataNodeEncoder_V1.java
  4. +47
    -63
      source/ledger/ledger-core/src/main/java/com/jd/blockchain/ledger/core/MerkleTree.java
  5. +20
    -83
      source/ledger/ledger-core/src/main/java/com/jd/blockchain/ledger/core/MerkleTreeEncoder.java
  6. +96
    -0
      source/ledger/ledger-core/src/test/java/com/jd/blockchain/ledger/core/MerkleDataNodeEncoderTest.java
  7. +1
    -1
      source/ledger/ledger-core/src/test/java/test/com/jd/blockchain/ledger/core/MerkleTreeTest.java
  8. +3
    -0
      source/ledger/ledger-model/src/main/java/com/jd/blockchain/ledger/MerkleDataNode.java

+ 14
- 0
source/ledger/ledger-core/src/main/java/com/jd/blockchain/ledger/core/MerkleDataNodeEncoder.java View File

@@ -0,0 +1,14 @@
package com.jd.blockchain.ledger.core;

import com.jd.blockchain.ledger.core.MerkleTree.DataNode;
import com.jd.blockchain.utils.Bytes;

public interface MerkleDataNodeEncoder {

byte getFormatVersion();

DataNode create(short hashAlgorithm, long sn, Bytes key, long version, byte[] hashedData);

DataNode resolve(byte[] bytes);

}

+ 128
- 0
source/ledger/ledger-core/src/main/java/com/jd/blockchain/ledger/core/MerkleDataNodeEncoder_V0.java View File

@@ -0,0 +1,128 @@
package com.jd.blockchain.ledger.core;

import com.jd.blockchain.crypto.Crypto;
import com.jd.blockchain.crypto.HashDigest;
import com.jd.blockchain.crypto.HashFunction;
import com.jd.blockchain.ledger.core.MerkleTree.DataNode;
import com.jd.blockchain.utils.Bytes;
import com.jd.blockchain.utils.io.BytesUtils;
import com.jd.blockchain.utils.io.NumberMask;

/**
* The first version of the DataNode binary sequence encoder, which's version
* number is 0.
*
* <p>
* This version of DataNode binary sequence is composed of sn(8 bytes),
* key(variable size), version(8 bytes) and node hash(32 bytes for SHA256);
*
* <p>
* In this version, the node hash is computed from bytes sequence composing of
* sn, key, version and original value of the key;
*
* <p>
* For the purpose of upgrading the version of DataNode binary format, we use
* the first byte of the binary sequence as the tag to identify the version of
* DataNode binary format, and reduce the maximum value of the valid range of SN
* to 2^56. <br>
* Other versions of the implementation also follow the above rules, the version
* of the data node binary format is marked from 0, incremented by 1.
*
* @author huanghaiquan
*
*/
class MerkleDataNodeEncoder_V0 implements MerkleDataNodeEncoder {

@Override
public byte getFormatVersion() {
return 0;
}

@Override
public DataNode resolve(byte[] bytes) {
if (bytes[0] != getFormatVersion()) {
throw new IllegalArgumentException("Unsupported version of data node bytes sequence[" + bytes[0] + "]! ");
}

// resolve SN;
byte[] snBytes = new byte[8];
snBytes[0] = 0x0;
System.arraycopy(bytes, 1, snBytes, 1, 7);
long sn = BytesUtils.toLong(snBytes);

// skip bytes of SN;
int offset = 8;

// byte[] keyBytes = BytesEncoding.read(NumberMask.SHORT, in);
// String key = BytesUtils.toString(keyBytes);
int keySize = NumberMask.SHORT.resolveMaskedNumber(bytes, offset);
offset += NumberMask.SHORT.getMaskLength(keySize);
byte[] keyBytes = new byte[keySize];
System.arraycopy(bytes, offset, keyBytes, 0, keySize);
offset += keySize;
// String key = BytesUtils.toString(keyBytes);
Bytes key = new Bytes(keyBytes);

// long version = BytesUtils.readLong(in);
long version = BytesUtils.toLong(bytes, offset);
offset += 8;

// byte[] dataHashBytes = BytesEncoding.read(NumberMask.SHORT, in);
int hashSize = NumberMask.TINY.resolveMaskedNumber(bytes, offset);
offset += NumberMask.TINY.getMaskLength(hashSize);
byte[] nodeHashBytes = new byte[hashSize];
System.arraycopy(bytes, offset, nodeHashBytes, 0, hashSize);
offset += hashSize;
HashDigest nodeHash = new HashDigest(nodeHashBytes);
return new DataNode(nodeHash, sn, key, version, null, bytes);
}

@Deprecated
@Override
public DataNode create(short hashAlgorithm, long sn, Bytes key, long version, byte[] value) {
// Header is composed of sn, key and version;
// So the size of header is: 8 + "mask of key size" + "key bytes" + 8;
int keySize = key.size();
int maskSize = NumberMask.SHORT.getMaskLength(keySize);

int headerSize = 8 + maskSize + keySize + 8;
byte[] headerBytes = new byte[headerSize];

int offset = 0;
// write sn;
offset += BytesUtils.toBytes(sn, headerBytes, 0);

// write the size of key bytes;
NumberMask.SHORT.writeMask(keySize, headerBytes, offset);
offset += maskSize;

// write the key bytes;
offset += key.copyTo(headerBytes, offset, keySize);

// version;
offset += BytesUtils.toBytes(version, headerBytes, offset);

// compute node hash from the combination of header and data value;
byte[] dataBytes = BytesUtils.concat(headerBytes, value);

HashFunction hashFunc = Crypto.getHashFunction(hashAlgorithm);
HashDigest dataNodeHash = hashFunc.hash(dataBytes);

// build bytes of data node, which is composed of sn, key, version and node
// hash;
int hashMaskSize = NumberMask.TINY.getMaskLength(dataNodeHash.size());
int dataNodeSize = headerSize + hashMaskSize + dataNodeHash.size();
byte[] nodeBytes = new byte[dataNodeSize];

offset = 0;
System.arraycopy(headerBytes, 0, nodeBytes, offset, headerSize);
offset += headerSize;
NumberMask.TINY.writeMask(dataNodeHash.size(), nodeBytes, offset);
offset += hashMaskSize;
System.arraycopy(dataNodeHash.toBytes(), 0, nodeBytes, offset, dataNodeHash.size());

// No data hash has been computed and record in this old version of
// implementation;
return new DataNode(dataNodeHash, sn, key, version, null, nodeBytes);
}
}

+ 200
- 0
source/ledger/ledger-core/src/main/java/com/jd/blockchain/ledger/core/MerkleDataNodeEncoder_V1.java View File

@@ -0,0 +1,200 @@
package com.jd.blockchain.ledger.core;

import com.jd.blockchain.crypto.Crypto;
import com.jd.blockchain.crypto.HashDigest;
import com.jd.blockchain.crypto.HashFunction;
import com.jd.blockchain.ledger.core.MerkleTree.DataNode;
import com.jd.blockchain.utils.Bytes;
import com.jd.blockchain.utils.io.BytesUtils;
import com.jd.blockchain.utils.io.NumberMask;

/**
* The second version of the DataNode binary sequence encoder, which's version
* number is 1.
*
* <p>
* This version of DataNode binary sequence is composed of sn(8 bytes),
* key(variable size), version(8 bytes), hash of original value the key, and
* node hash;
*
* <p>
* In this version, the node hash is computed from bytes sequence composing of
* sn, key, version , hash of original value of the key;
*
* <p>
* For the purpose of upgrading the version of DataNode binary format, we use
* the first byte of the binary sequence as the tag to identify the version of
* DataNode binary format, and reduce the maximum value of the valid range of SN
* to 2^56. <br>
* Other versions of the implementation also follow the above rules, the version
* of the data node binary format is marked from 0, incremented by 1.
*
* @author huanghaiquan
*
*/
class MerkleDataNodeEncoder_V1 implements MerkleDataNodeEncoder {

@Override
public byte getFormatVersion() {
return 1;
}

/**
* Parse DataNode from it's bytes sequence;
* <p>
* the bytes sequence is: sn + key + version + data_hash;
*
* @param bytes
* @return
*/
@Override
public DataNode resolve(byte[] bytes) {
if (bytes[0] != getFormatVersion()) {
throw new IllegalArgumentException("Unsupported version of data node bytes sequence[" + bytes[0] + "]! ");
}

// resolve SN;
byte[] snBytes = new byte[8];
snBytes[0] = 0x0;
System.arraycopy(bytes, 1, snBytes, 1, 7);
long sn = BytesUtils.toLong(snBytes);

// skip bytes of SN;
int offset = 8;

// resolve key of data;
// First, resolve the number mask of the key size;
// Second, read the key bytes;
int keySize = NumberMask.SHORT.resolveMaskedNumber(bytes, offset);
offset += NumberMask.SHORT.getMaskLength(keySize);
byte[] keyBytes = new byte[keySize];
System.arraycopy(bytes, offset, keyBytes, 0, keySize);
offset += keySize;
Bytes key = new Bytes(keyBytes);

// Resolve version of key;
long version = BytesUtils.toLong(bytes, offset);
offset += 8;

// resovle data hash;
int dataHashSize = NumberMask.TINY.resolveMaskedNumber(bytes, offset);
offset += NumberMask.TINY.getMaskLength(dataHashSize);
byte[] dataHashBytes = new byte[dataHashSize];
System.arraycopy(bytes, offset, dataHashBytes, 0, dataHashSize);
offset += dataHashSize;
HashDigest dataHash = new HashDigest(dataHashBytes);

// resovle node hash;
int nodeHashSize = NumberMask.TINY.resolveMaskedNumber(bytes, offset);
offset += NumberMask.TINY.getMaskLength(nodeHashSize);
byte[] nodeHashBytes = new byte[nodeHashSize];
System.arraycopy(bytes, offset, nodeHashBytes, 0, nodeHashSize);
offset += nodeHashSize;
HashDigest nodeHash = new HashDigest(nodeHashBytes);

return new DataNode(nodeHash, sn, key, version, dataHash, bytes);
}

public DataNode newDataNode(short hashAlgorithm, long sn, Bytes key, long version, HashDigest dataHash) {
HashFunction hashFunc = Crypto.getHashFunction(hashAlgorithm);
return create(hashFunc, sn, key, version, dataHash);
}

@Override
public DataNode create(short hashAlgorithm, long sn, Bytes key, long version, byte[] data) {
HashFunction hashFunc = Crypto.getHashFunction(hashAlgorithm);
HashDigest dataHash = hashFunc.hash(data);

return create(hashFunc, sn, key, version, dataHash);
}

/**
* Data node's bytes sequence is composited by header( reference:
* {@link #buildKeyHeaderBytes(long, Bytes, long)} ) and data hash;
*
* <p>
* In general, the bytes sequence is: sn + key + version + data_hash +
* node_hash;
*
* @param hashFunc
* @param sn
* @param key
* @param version
* @param dataHash
* @return
*/
private DataNode create(HashFunction hashFunc, long sn, Bytes key, long version, HashDigest dataHash) {
byte[] headerBytes = buildKeyHeaderBytes(sn, key, version);
int headerSize = headerBytes.length;

// 单独对头部和数据进行哈希,以便在提供 Merkle 证明时能够不必传递原始数据即可进行哈希验证;
HashDigest headerHash = hashFunc.hash(headerBytes);
byte[] dataHashBytes = BytesUtils.concat(headerHash.getRawDigest(), dataHash.getRawDigest());

HashDigest dataNodeHash = hashFunc.hash(dataHashBytes);

int dataHashSize = dataHash.size();
int nodeHashSize = dataNodeHash.size();
int dataHashMaskSize = NumberMask.TINY.getMaskLength(dataHashSize);
int nodeHashMaskSize = NumberMask.TINY.getMaskLength(nodeHashSize);
int nodeSize = headerSize + dataHashMaskSize + dataHashSize + nodeHashMaskSize + nodeHashSize;
byte[] nodeBytes = new byte[nodeSize];

// write header;
int offset = 0;
System.arraycopy(headerBytes, 0, nodeBytes, offset, headerSize);
offset += headerSize;

// write data hash;
NumberMask.TINY.writeMask(dataHashSize, nodeBytes, offset);
offset += dataHashMaskSize;
System.arraycopy(dataHash.toBytes(), 0, nodeBytes, offset, dataHashSize);
offset += dataHashSize;

// write node hash;
NumberMask.TINY.writeMask(nodeHashSize, nodeBytes, offset);
offset += nodeHashMaskSize;
System.arraycopy(dataNodeHash.toBytes(), 0, nodeBytes, offset, nodeHashSize);

// set format version;
nodeBytes[0] = getFormatVersion();

return new DataNode(dataNodeHash, sn, key, version, dataHash, nodeBytes);
}

/**
* Header is composited by sn + key + version; Bytes sequence: sn_size(8) +
* number_mask_of_key_size + key_bytes + version_size(8);
*
* @param sn
* @param key
* @param version
* @return
*/
private static byte[] buildKeyHeaderBytes(long sn, Bytes key, long version) {
int keySize = key.size();
int maskSize = NumberMask.SHORT.getMaskLength(keySize);

// Size Of header = sn + key + version;
// sn_size(8) + mask_size + key_size + version_size(8);
int headerSize = 8 + maskSize + keySize + 8;
byte[] headerBytes = new byte[headerSize];

// write bytes of sn;
int offset = 0;
offset += BytesUtils.toBytes(sn, headerBytes, 0);

// write bytes of key mask;
NumberMask.SHORT.writeMask(keySize, headerBytes, offset);
offset += maskSize;

// write bytes of key;
offset += key.copyTo(headerBytes, offset, keySize);

// write bytes of version;
offset += BytesUtils.toBytes(version, headerBytes, offset);

return headerBytes;
}

}

+ 47
- 63
source/ledger/ledger-core/src/main/java/com/jd/blockchain/ledger/core/MerkleTree.java View File

@@ -52,8 +52,9 @@ public class MerkleTree implements Transactional {


public static final int TREE_DEGREE = 16; public static final int TREE_DEGREE = 16;


public static final int MAX_LEVEL = 15;
public static final int MAX_LEVEL = 14;


// 正好是 2 的 56 次方(7字节),将 SN 8个字节中的首个字节预留作为 DataNode 的编码格式版本标记;
public static final long MAX_DATACOUNT = power(TREE_DEGREE, MAX_LEVEL); public static final long MAX_DATACOUNT = power(TREE_DEGREE, MAX_LEVEL);


public static final long MAX_SN = MAX_DATACOUNT - 1; public static final long MAX_SN = MAX_DATACOUNT - 1;
@@ -147,14 +148,10 @@ public class MerkleTree implements Transactional {
/** /**
* 创建 Merkle 树; * 创建 Merkle 树;
* *
* @param rootHash
* 节点的根Hash; 如果指定为 null,则实际上创建一个空的 Merkle Tree;
* @param verifyOnLoad
* 从外部存储加载节点时是否校验节点的哈希;
* @param kvStorage
* 保存 Merkle 节点的存储服务;
* @param readonly
* 是否只读;
* @param rootHash 节点的根Hash; 如果指定为 null,则实际上创建一个空的 Merkle Tree;
* @param verifyOnLoad 从外部存储加载节点时是否校验节点的哈希;
* @param kvStorage 保存 Merkle 节点的存储服务;
* @param readonly 是否只读;
*/ */
public MerkleTree(HashDigest rootHash, CryptoSetting setting, String keyPrefix, ExPolicyKVStorage kvStorage, public MerkleTree(HashDigest rootHash, CryptoSetting setting, String keyPrefix, ExPolicyKVStorage kvStorage,
boolean readonly) { boolean readonly) {
@@ -164,14 +161,10 @@ public class MerkleTree implements Transactional {
/** /**
* 创建 Merkle 树; * 创建 Merkle 树;
* *
* @param rootHash
* 节点的根Hash; 如果指定为 null,则实际上创建一个空的 Merkle Tree;
* @param verifyOnLoad
* 从外部存储加载节点时是否校验节点的哈希;
* @param kvStorage
* 保存 Merkle 节点的存储服务;
* @param readonly
* 是否只读;
* @param rootHash 节点的根Hash; 如果指定为 null,则实际上创建一个空的 Merkle Tree;
* @param verifyOnLoad 从外部存储加载节点时是否校验节点的哈希;
* @param kvStorage 保存 Merkle 节点的存储服务;
* @param readonly 是否只读;
*/ */
public MerkleTree(HashDigest rootHash, CryptoSetting setting, Bytes keyPrefix, ExPolicyKVStorage kvStorage, public MerkleTree(HashDigest rootHash, CryptoSetting setting, Bytes keyPrefix, ExPolicyKVStorage kvStorage,
boolean readonly) { boolean readonly) {
@@ -205,8 +198,7 @@ public class MerkleTree implements Transactional {
* <p> * <p>
* 如果 sn 超出范围,则引发 {@link IndexOutOfBoundsException} ; * 如果 sn 超出范围,则引发 {@link IndexOutOfBoundsException} ;
* *
* @param sn
* 数据的序列号;
* @param sn 数据的序列号;
* @return 默克尔证明的实例; * @return 默克尔证明的实例;
*/ */
public MerkleProof getProof(long sn) { public MerkleProof getProof(long sn) {
@@ -242,13 +234,10 @@ public class MerkleTree implements Transactional {
* 注:默克尔树只保存指定数据的哈希以及关联的键,而不会保存数据原文,因此调用者需要自己处理对数据的存储; <br> * 注:默克尔树只保存指定数据的哈希以及关联的键,而不会保存数据原文,因此调用者需要自己处理对数据的存储; <br>
* 此外,哈希计算是把键和数据内容拼接一起进行计算的; * 此外,哈希计算是把键和数据内容拼接一起进行计算的;
* *
* @param sn
* 与此数据唯一相关的序列号;sn 必须大于等于 0 ;
* @param key
* 与此数据唯一相关的键;
* @param sn 与此数据唯一相关的序列号;sn 必须大于等于 0 ;
* @param key 与此数据唯一相关的键;
* @param version * @param version
* @param hashedData
* 要参与哈希计算的数据内容;注:此参数值并不会被默克尔树保存;
* @param hashedData 要参与哈希计算的数据内容;注:此参数值并不会被默克尔树保存;
* @return * @return
*/ */
public MerkleDataNode setData(long sn, String key, long version, byte[] hashedData) { public MerkleDataNode setData(long sn, String key, long version, byte[] hashedData) {
@@ -266,13 +255,10 @@ public class MerkleTree implements Transactional {
* 注:默克尔树只保存指定数据的哈希以及关联的键,而不会保存数据原文,因此调用者需要自己处理对数据的存储; <br> * 注:默克尔树只保存指定数据的哈希以及关联的键,而不会保存数据原文,因此调用者需要自己处理对数据的存储; <br>
* 此外,哈希计算是把键和数据内容拼接一起进行计算的; * 此外,哈希计算是把键和数据内容拼接一起进行计算的;
* *
* @param sn
* 与此数据唯一相关的序列号;sn 必须大于等于 0 ;
* @param key
* 与此数据唯一相关的键;
* @param sn 与此数据唯一相关的序列号;sn 必须大于等于 0 ;
* @param key 与此数据唯一相关的键;
* @param version * @param version
* @param hashedData
* 要参与哈希计算的数据内容;注:此参数值并不会被默克尔树保存;
* @param hashedData 要参与哈希计算的数据内容;注:此参数值并不会被默克尔树保存;
* @return * @return
*/ */
public MerkleDataNode setData(long sn, Bytes key, long version, byte[] hashedData) { public MerkleDataNode setData(long sn, Bytes key, long version, byte[] hashedData) {
@@ -285,7 +271,8 @@ public class MerkleTree implements Transactional {
if (sn > MAX_SN) { if (sn > MAX_SN) {
throw new IllegalArgumentException("The sn is great than MAX[" + MAX_SN + "]!"); throw new IllegalArgumentException("The sn is great than MAX[" + MAX_SN + "]!");
} }
DataNode dataNode = MerkleTreeEncoder.newDataNode(setting.getHashAlgorithm(), sn, key, version, hashedData);
DataNode dataNode = MerkleTreeEncoder.LATEST_DATANODE_ENCODER.create(setting.getHashAlgorithm(), sn, key,
version, hashedData);
updatedDataNodes.put(sn, dataNode); updatedDataNodes.put(sn, dataNode);
return dataNode; return dataNode;
} }
@@ -591,10 +578,8 @@ public class MerkleTree implements Transactional {
/** /**
* 重新计算所有子节点以及自身的哈希,并返回新加入的数据节点的数量; * 重新计算所有子节点以及自身的哈希,并返回新加入的数据节点的数量;
* *
* @param pathNode
* 需要重新计算 hash 的路径节点;
* @param updatedNodes
* 用于记录已更新节点的列表;
* @param pathNode 需要重新计算 hash 的路径节点;
* @param updatedNodes 用于记录已更新节点的列表;
* @return * @return
*/ */
@SuppressWarnings("unused") @SuppressWarnings("unused")
@@ -732,12 +717,10 @@ public class MerkleTree implements Transactional {
* *
* 如果 sn 超出范围,则引发 {@link IndexOutOfBoundsException} ; * 如果 sn 超出范围,则引发 {@link IndexOutOfBoundsException} ;
* *
* @param sn
* 数据节点的序列号;
* @param path
* 用于记录节点路径的列表,长度必须大于等于当前默克尔树的总的层级(即 path.length 大于等于 root.level +
* 1);<br>
* 如果参数为 null,则不记录;
* @param sn 数据节点的序列号;
* @param path 用于记录节点路径的列表,长度必须大于等于当前默克尔树的总的层级(即 path.length 大于等于 root.level +
* 1);<br>
* 如果参数为 null,则不记录;
* @return 序列号对应的数据节点;<br> * @return 序列号对应的数据节点;<br>
* 如果不存在,则返回 null,注意,此时指定的路径参数 path 依然写入了查找过程的路径; * 如果不存在,则返回 null,注意,此时指定的路径参数 path 依然写入了查找过程的路径;
*/ */
@@ -844,7 +827,8 @@ public class MerkleTree implements Transactional {
if (bytes == null || bytes.length == 0) { if (bytes == null || bytes.length == 0) {
return null; return null;
} }
DataNode dataNode = MerkleTreeEncoder.parse(bytes);

DataNode dataNode = MerkleTreeEncoder.resolve(bytes);
if (verify && !hashBytes.equals(dataNode.nodeHash)) { if (verify && !hashBytes.equals(dataNode.nodeHash)) {
String keyStr = hashBytes.toBase58(); String keyStr = hashBytes.toBase58();
String actualHashStr = dataNode.nodeHash.toBase58(); String actualHashStr = dataNode.nodeHash.toBase58();
@@ -861,8 +845,7 @@ public class MerkleTree implements Transactional {
* 注:此方法不处理溢出;调用者需要自行规避; * 注:此方法不处理溢出;调用者需要自行规避;
* *
* @param value * @param value
* @param x
* 大于等于 0 的整数;
* @param x 大于等于 0 的整数;
* @return * @return
*/ */
private static long power(long value, int x) { private static long power(long value, int x) {
@@ -1140,14 +1123,10 @@ public class MerkleTree implements Transactional {
/** /**
* 创建一个路径节点; * 创建一个路径节点;
* *
* @param hashAlgorithm
* 生成节点采用的哈希算法;
* @param startingSN
* 路径节点表示的子树的起始序列号;
* @param level
* 路径节点的层级深度;路径节点的深度从 1 开始往上递增(数据节点作为树的深度为 0);
* @param dataCount
* 路径节点表示的子树所包含的数据节点的数量;
* @param hashAlgorithm 生成节点采用的哈希算法;
* @param startingSN 路径节点表示的子树的起始序列号;
* @param level 路径节点的层级深度;路径节点的深度从 1 开始往上递增(数据节点作为树的深度为 0);
* @param dataCount 路径节点表示的子树所包含的数据节点的数量;
*/ */
private PathNode(CryptoAlgorithm hashAlgorithm, long startingSN, int level, long dataCount) { private PathNode(CryptoAlgorithm hashAlgorithm, long startingSN, int level, long dataCount) {
this(hashAlgorithm, startingSN, level, dataCount, new HashDigest[TREE_DEGREE], null); this(hashAlgorithm, startingSN, level, dataCount, new HashDigest[TREE_DEGREE], null);
@@ -1338,10 +1317,8 @@ public class MerkleTree implements Transactional {
/** /**
* 从指定的字节数组反序列化节点; * 从指定的字节数组反序列化节点;
* *
* @param bytes
* 字节数组;合法的输入应等同于 {@link #toBytes()} 方法的输出;
* @param checkHash
* 是否重新计算并校验节点的哈希;
* @param bytes 字节数组;合法的输入应等同于 {@link #toBytes()} 方法的输出;
* @param checkHash 是否重新计算并校验节点的哈希;
* @return * @return
*/ */
private static PathNode parse(byte[] bytes, boolean checkHash) { private static PathNode parse(byte[] bytes, boolean checkHash) {
@@ -1437,17 +1414,19 @@ public class MerkleTree implements Transactional {


private long version; private long version;


private byte[] dataNodeBytes;
private byte[] nodeBytes;


DataNode(long sn, Bytes key, long version, HashDigest dataHash, byte[] dataBytes) {
private HashDigest valueHash;

DataNode(HashDigest nodeHash, long sn, Bytes key, long version, HashDigest valueHash, byte[] nodeBytes) {
this.sn = sn; this.sn = sn;
this.key = key; this.key = key;
this.version = version; this.version = version;
this.nodeHash = dataHash;
this.dataNodeBytes = dataBytes;
this.nodeHash = nodeHash;
this.valueHash = valueHash;
this.nodeBytes = nodeBytes;
} }



@Override @Override
protected long getStartingSN() { protected long getStartingSN() {
return sn; return sn;
@@ -1498,6 +1477,11 @@ public class MerkleTree implements Transactional {
return version; return version;
} }


@Override
public HashDigest getValueHash() {
return valueHash;
}

@Override @Override
public byte[] toBytes() { public byte[] toBytes() {
// ByteArrayOutputStream out = new ByteArrayOutputStream(); // ByteArrayOutputStream out = new ByteArrayOutputStream();
@@ -1525,7 +1509,7 @@ public class MerkleTree implements Transactional {
// //
// System.arraycopy(nodeHash.toBytes(), 0, totalBytes, offset, hashSize); // System.arraycopy(nodeHash.toBytes(), 0, totalBytes, offset, hashSize);


return dataNodeBytes;
return nodeBytes;
} }


@Override @Override


+ 20
- 83
source/ledger/ledger-core/src/main/java/com/jd/blockchain/ledger/core/MerkleTreeEncoder.java View File

@@ -1,94 +1,31 @@
package com.jd.blockchain.ledger.core; package com.jd.blockchain.ledger.core;


import com.jd.blockchain.crypto.Crypto;
import com.jd.blockchain.crypto.CryptoAlgorithm;
import com.jd.blockchain.crypto.HashDigest;
import com.jd.blockchain.crypto.HashFunction;
import com.jd.blockchain.ledger.core.MerkleTree.DataNode;
import com.jd.blockchain.utils.Bytes;
import com.jd.blockchain.utils.io.BytesUtils;
import com.jd.blockchain.utils.io.NumberMask;

public class MerkleTreeEncoder {
static DataNode newDataNode(CryptoAlgorithm hashAlgorithm, long sn, Bytes key, long version,
byte[] hashedData) {
return newDataNode(hashAlgorithm.code(), sn, key, version, hashedData);
}
static DataNode newDataNode(short hashAlgorithm, long sn, Bytes key, long version, byte[] hashedData) {
// byte[] keyStrBytes = BytesUtils.toBytes(key);
// int maskSize = NumberMask.SHORT.getMaskLength(keyStrBytes.length);
int keySize = key.size();
int maskSize = NumberMask.SHORT.getMaskLength(keySize);

// int bodySize = 8 + maskSize + keyStrBytes.length + 8;// sn + key + version;
int bodySize = 8 + maskSize + keySize + 8;// sn + key + version;
byte[] bodyBytes = new byte[bodySize];

int offset = 0;
offset += BytesUtils.toBytes(sn, bodyBytes, 0);

// NumberMask.SHORT.writeMask(keyStrBytes.length, bodyBytes, offset);
NumberMask.SHORT.writeMask(keySize, bodyBytes, offset);
offset += maskSize;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;


// System.arraycopy(keyStrBytes, 0, bodyBytes, offset, keyStrBytes.length);
// System.arraycopy(keyStrBytes, 0, bodyBytes, offset, keyStrBytes.length);
// offset += keyStrBytes.length;
offset += key.copyTo(bodyBytes, offset, keySize);

// TODO: version;
offset += BytesUtils.toBytes(version, bodyBytes, offset);

byte[] dataBytes = BytesUtils.concat(bodyBytes, hashedData);

HashFunction hashFunc = Crypto.getHashFunction(hashAlgorithm);
HashDigest dataHash = hashFunc.hash(dataBytes);

int hashMaskSize = NumberMask.TINY.getMaskLength(dataHash.size());
int dataNodeSize = bodySize + hashMaskSize + dataHash.size();
byte[] dataNodeBytes = new byte[dataNodeSize];
import com.jd.blockchain.ledger.core.MerkleTree.DataNode;


offset = 0;
System.arraycopy(bodyBytes, 0, dataNodeBytes, offset, bodySize);
offset += bodySize;
NumberMask.TINY.writeMask(dataHash.size(), dataNodeBytes, offset);
offset += hashMaskSize;
System.arraycopy(dataHash.toBytes(), 0, dataNodeBytes, offset, dataHash.size());
class MerkleTreeEncoder {


return new DataNode(sn, key, version, dataHash, dataNodeBytes);
}
static DataNode parse(byte[] bytes) {
// InputStream in = new ByteArrayInputStream(bytes);
static final MerkleDataNodeEncoder LATEST_DATANODE_ENCODER = new MerkleDataNodeEncoder_V1();


int offset = 0;
long sn = BytesUtils.toLong(bytes, offset);
offset += 8;
static final MerkleDataNodeEncoder V0_DATANODE_ENCODER = new MerkleDataNodeEncoder_V0();


// byte[] keyBytes = BytesEncoding.read(NumberMask.SHORT, in);
// String key = BytesUtils.toString(keyBytes);
int keySize = NumberMask.SHORT.resolveMaskedNumber(bytes, offset);
offset += NumberMask.SHORT.getMaskLength(keySize);
byte[] keyBytes = new byte[keySize];
System.arraycopy(bytes, offset, keyBytes, 0, keySize);
offset += keySize;
// String key = BytesUtils.toString(keyBytes);
Bytes key = new Bytes(keyBytes);
static final List<MerkleDataNodeEncoder> DATANODE_ENCODERS = Collections
.unmodifiableList(Arrays.asList(LATEST_DATANODE_ENCODER, V0_DATANODE_ENCODER));


// long version = BytesUtils.readLong(in);
long version = BytesUtils.toLong(bytes, offset);
offset += 8;
/**
* @param bytes
* @return
*/
static DataNode resolve(byte[] bytes) {
for (MerkleDataNodeEncoder encoder : MerkleTreeEncoder.DATANODE_ENCODERS) {
if (encoder.getFormatVersion() == bytes[0]) {
return encoder.resolve(bytes);
}
}


// byte[] dataHashBytes = BytesEncoding.read(NumberMask.SHORT, in);
int hashSize = NumberMask.TINY.resolveMaskedNumber(bytes, offset);
offset += NumberMask.TINY.getMaskLength(hashSize);
byte[] dataHashBytes = new byte[hashSize];
System.arraycopy(bytes, offset, dataHashBytes, 0, hashSize);
offset += hashSize;
HashDigest dataHash = new HashDigest(dataHashBytes);
return new DataNode(sn, key, version, dataHash, bytes);
throw new IllegalStateException("Unsupported version of DataNode bytes sequence[" + bytes[0] + "]!");
} }
} }

+ 96
- 0
source/ledger/ledger-core/src/test/java/com/jd/blockchain/ledger/core/MerkleDataNodeEncoderTest.java View File

@@ -0,0 +1,96 @@
package com.jd.blockchain.ledger.core;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;

import java.util.Random;

import org.junit.Test;

import com.jd.blockchain.crypto.Crypto;
import com.jd.blockchain.crypto.HashDigest;
import com.jd.blockchain.crypto.HashFunction;
import com.jd.blockchain.crypto.service.classic.ClassicAlgorithm;
import com.jd.blockchain.ledger.core.MerkleTree.DataNode;
import com.jd.blockchain.utils.Bytes;

public class MerkleDataNodeEncoderTest {

@Test
public void testEnocoderV0() {
MerkleDataNodeEncoder encoderV0 = new MerkleDataNodeEncoder_V0();

Random rand = new Random();

byte[] data = new byte[512];
byte[] key = new byte[256];

rand.nextBytes(data);
rand.nextBytes(key);

long sn = 1024;
long version = 1;

DataNode nodeV0 = encoderV0.create(ClassicAlgorithm.SHA256.code(), sn, new Bytes(key), version, data);

assertNull(nodeV0.getValueHash());

assertEquals(sn, nodeV0.getSN());
assertEquals(version, nodeV0.getVersion());
assertEquals(new Bytes(key), nodeV0.getKey());

byte[] nodeBytes = nodeV0.toBytes();

DataNode nodeV0_reversed = encoderV0.resolve(nodeBytes);
assertNull(nodeV0_reversed.getValueHash());

assertEquals(nodeV0.getNodeHash(), nodeV0_reversed.getNodeHash());
assertEquals(encoderV0.getFormatVersion(), nodeBytes[0]);


assertEquals(sn, nodeV0_reversed.getSN());
assertEquals(version, nodeV0_reversed.getVersion());
assertEquals(new Bytes(key), nodeV0_reversed.getKey());
}

@Test
public void testEnocoderV1() {
MerkleDataNodeEncoder encoderV1 = new MerkleDataNodeEncoder_V1();

Random rand = new Random();

byte[] data = new byte[512];
byte[] key = new byte[256];

rand.nextBytes(data);
rand.nextBytes(key);

HashFunction hashFunc = Crypto.getHashFunction(ClassicAlgorithm.SHA256);
HashDigest dataHash = hashFunc.hash(data);

long sn = 1024;
long version = 1;

DataNode node = encoderV1.create(ClassicAlgorithm.SHA256.code(), sn, new Bytes(key), version, data);

assertEquals(dataHash, node.getValueHash());

assertEquals(sn, node.getSN());
assertEquals(version, node.getVersion());
assertEquals(new Bytes(key), node.getKey());

byte[] nodeBytes = node.toBytes();

DataNode node_reversed = encoderV1.resolve(nodeBytes);

assertEquals(dataHash, node_reversed.getValueHash());
assertEquals(node.getNodeHash(), node_reversed.getNodeHash());
assertEquals(encoderV1.getFormatVersion(), nodeBytes[0]);

assertEquals(sn, node_reversed.getSN());
assertEquals(version, node_reversed.getVersion());
assertEquals(new Bytes(key), node_reversed.getKey());

}

}

+ 1
- 1
source/ledger/ledger-core/src/test/java/test/com/jd/blockchain/ledger/core/MerkleTreeTest.java View File

@@ -763,7 +763,7 @@ public class MerkleTreeTest {
} }
} }
} }
@SuppressWarnings("unused") @SuppressWarnings("unused")
private static int getLevel(long dataCount) { private static int getLevel(long dataCount) {
if (dataCount < 0) { if (dataCount < 0) {


+ 3
- 0
source/ledger/ledger-model/src/main/java/com/jd/blockchain/ledger/MerkleDataNode.java View File

@@ -1,5 +1,6 @@
package com.jd.blockchain.ledger; package com.jd.blockchain.ledger;


import com.jd.blockchain.crypto.HashDigest;
import com.jd.blockchain.utils.Bytes; import com.jd.blockchain.utils.Bytes;


public interface MerkleDataNode extends MerkleNode { public interface MerkleDataNode extends MerkleNode {
@@ -9,5 +10,7 @@ public interface MerkleDataNode extends MerkleNode {
Bytes getKey(); Bytes getKey();


long getVersion(); long getVersion();
HashDigest getValueHash();


} }

Loading…
Cancel
Save