| @@ -5,12 +5,10 @@ import static com.jd.blockchain.crypto.CryptoBytes.ALGORYTHM_CODE_SIZE; | |||
| import java.util.Arrays; | |||
| import com.jd.blockchain.crypto.CryptoAlgorithm; | |||
| import com.jd.blockchain.crypto.CryptoBytes; | |||
| import com.jd.blockchain.crypto.CryptoException; | |||
| import com.jd.blockchain.crypto.HashDigest; | |||
| import com.jd.blockchain.crypto.HashFunction; | |||
| import com.jd.blockchain.crypto.utils.classic.RIPEMD160Utils; | |||
| import com.jd.blockchain.utils.security.RipeMD160Utils; | |||
| public class RIPEMD160HashFunction implements HashFunction { | |||
| @@ -19,7 +17,7 @@ public class RIPEMD160HashFunction implements HashFunction { | |||
| private static final int DIGEST_BYTES = 160 / 8; | |||
| private static final int DIGEST_LENGTH = ALGORYTHM_CODE_SIZE + DIGEST_BYTES; | |||
| RIPEMD160HashFunction() { | |||
| } | |||
| @@ -30,7 +28,6 @@ public class RIPEMD160HashFunction implements HashFunction { | |||
| @Override | |||
| public HashDigest hash(byte[] data) { | |||
| if (data == null) { | |||
| throw new CryptoException("data is null!"); | |||
| } | |||
| @@ -39,6 +36,16 @@ public class RIPEMD160HashFunction implements HashFunction { | |||
| return new HashDigest(RIPEMD160, digestBytes); | |||
| } | |||
| @Override | |||
| public HashDigest hash(byte[] data, int offset, int len) { | |||
| if (data == null) { | |||
| throw new CryptoException("data is null!"); | |||
| } | |||
| byte[] digestBytes = RIPEMD160Utils.hash(data, offset, len); | |||
| return new HashDigest(RIPEMD160, digestBytes); | |||
| } | |||
| @Override | |||
| public boolean verify(HashDigest digest, byte[] data) { | |||
| HashDigest hashDigest = hash(data); | |||
| @@ -59,5 +66,5 @@ public class RIPEMD160HashFunction implements HashFunction { | |||
| throw new CryptoException("digestBytes is invalid!"); | |||
| } | |||
| } | |||
| } | |||
| @@ -5,12 +5,10 @@ import static com.jd.blockchain.crypto.CryptoBytes.ALGORYTHM_CODE_SIZE; | |||
| import java.util.Arrays; | |||
| import com.jd.blockchain.crypto.CryptoAlgorithm; | |||
| import com.jd.blockchain.crypto.CryptoBytes; | |||
| import com.jd.blockchain.crypto.CryptoException; | |||
| import com.jd.blockchain.crypto.HashDigest; | |||
| import com.jd.blockchain.crypto.HashFunction; | |||
| import com.jd.blockchain.crypto.utils.classic.SHA256Utils; | |||
| import com.jd.blockchain.utils.security.ShaUtils; | |||
| public class SHA256HashFunction implements HashFunction { | |||
| @@ -30,7 +28,6 @@ public class SHA256HashFunction implements HashFunction { | |||
| @Override | |||
| public HashDigest hash(byte[] data) { | |||
| if (data == null) { | |||
| throw new CryptoException("data is null!"); | |||
| } | |||
| @@ -38,6 +35,16 @@ public class SHA256HashFunction implements HashFunction { | |||
| byte[] digestBytes = SHA256Utils.hash(data); | |||
| return new HashDigest(SHA256, digestBytes); | |||
| } | |||
| @Override | |||
| public HashDigest hash(byte[] data, int offset, int len) { | |||
| if (data == null) { | |||
| throw new CryptoException("data is null!"); | |||
| } | |||
| byte[] digestBytes = SHA256Utils.hash(data, offset, len); | |||
| return new HashDigest(SHA256, digestBytes); | |||
| } | |||
| @Override | |||
| public boolean verify(HashDigest digest, byte[] data) { | |||
| @@ -10,16 +10,26 @@ import org.bouncycastle.crypto.digests.RIPEMD160Digest; | |||
| */ | |||
| public class RIPEMD160Utils { | |||
| // The length of RIPEMD160 output is 20 bytes | |||
| private static final int RIPEMD160DIGEST_LENGTH = 160 / 8; | |||
| // The length of RIPEMD160 output is 20 bytes | |||
| public static final int RIPEMD160DIGEST_LENGTH = 160 / 8; | |||
| public static byte[] hash(byte[] data){ | |||
| public static byte[] hash(byte[] data) { | |||
| byte[] result = new byte[RIPEMD160DIGEST_LENGTH]; | |||
| RIPEMD160Digest ripemd160Digest = new RIPEMD160Digest(); | |||
| byte[] result = new byte[RIPEMD160DIGEST_LENGTH]; | |||
| RIPEMD160Digest ripemd160Digest = new RIPEMD160Digest(); | |||
| ripemd160Digest.update(data,0,data.length); | |||
| ripemd160Digest.doFinal(result,0); | |||
| return result; | |||
| } | |||
| ripemd160Digest.update(data, 0, data.length); | |||
| ripemd160Digest.doFinal(result, 0); | |||
| return result; | |||
| } | |||
| public static byte[] hash(byte[] data, int offset, int len) { | |||
| byte[] result = new byte[RIPEMD160DIGEST_LENGTH]; | |||
| RIPEMD160Digest ripemd160Digest = new RIPEMD160Digest(); | |||
| ripemd160Digest.update(data, offset, len); | |||
| ripemd160Digest.doFinal(result, 0); | |||
| return result; | |||
| } | |||
| } | |||
| @@ -11,7 +11,7 @@ import org.bouncycastle.crypto.digests.SHA256Digest; | |||
| public class SHA256Utils { | |||
| // The length of SHA256 output is 32 bytes | |||
| private static final int SHA256DIGEST_LENGTH = 256 / 8; | |||
| public static final int SHA256DIGEST_LENGTH = 256 / 8; | |||
| public static byte[] hash(byte[] data){ | |||
| @@ -22,4 +22,14 @@ public class SHA256Utils { | |||
| sha256Digest.doFinal(result,0); | |||
| return result; | |||
| } | |||
| public static byte[] hash(byte[] data, int offset, int len){ | |||
| byte[] result = new byte[SHA256DIGEST_LENGTH]; | |||
| SHA256Digest sha256Digest = new SHA256Digest(); | |||
| sha256Digest.update(data, offset, len); | |||
| sha256Digest.doFinal(result,0); | |||
| return result; | |||
| } | |||
| } | |||
| @@ -10,6 +10,14 @@ public interface HashFunction extends CryptoFunction { | |||
| */ | |||
| HashDigest hash(byte[] data); | |||
| /** | |||
| * 计算指定数据的 hash; | |||
| * | |||
| * @param data | |||
| * @return | |||
| */ | |||
| HashDigest hash(byte[] data, int offset, int len); | |||
| /** | |||
| * 校验 hash 摘要与指定的数据是否匹配; | |||
| @@ -16,7 +16,7 @@ public class SM3HashFunction implements HashFunction { | |||
| private static final int DIGEST_BYTES = 256 / 8; | |||
| private static final int DIGEST_LENGTH = CryptoBytes.ALGORYTHM_CODE_SIZE + DIGEST_BYTES; | |||
| SM3HashFunction() { | |||
| } | |||
| @@ -27,7 +27,6 @@ public class SM3HashFunction implements HashFunction { | |||
| @Override | |||
| public HashDigest hash(byte[] data) { | |||
| if (data == null) { | |||
| throw new CryptoException("data is null!"); | |||
| } | |||
| @@ -36,6 +35,16 @@ public class SM3HashFunction implements HashFunction { | |||
| return new HashDigest(SM3, digestBytes); | |||
| } | |||
| @Override | |||
| public HashDigest hash(byte[] data, int offset, int len) { | |||
| if (data == null) { | |||
| throw new CryptoException("data is null!"); | |||
| } | |||
| byte[] digestBytes = SM3Utils.hash(data, offset, len); | |||
| return new HashDigest(SM3, digestBytes); | |||
| } | |||
| @Override | |||
| public boolean verify(HashDigest digest, byte[] data) { | |||
| HashDigest hashDigest = hash(data); | |||
| @@ -4,19 +4,30 @@ import org.bouncycastle.crypto.digests.SM3Digest; | |||
| public class SM3Utils { | |||
| // The length of sm3 output is 32 bytes | |||
| private static final int SM3DIGEST_LENGTH = 32; | |||
| // The length of sm3 output is 32 bytes | |||
| public static final int SM3DIGEST_LENGTH = 32; | |||
| public static byte[] hash(byte[] data) { | |||
| public static byte[] hash(byte[] data) { | |||
| byte[] result = new byte[SM3DIGEST_LENGTH]; | |||
| byte[] result = new byte[SM3DIGEST_LENGTH]; | |||
| SM3Digest sm3digest = new SM3Digest(); | |||
| SM3Digest sm3digest = new SM3Digest(); | |||
| sm3digest.update(data, 0, data.length); | |||
| sm3digest.doFinal(result, 0); | |||
| sm3digest.update(data, 0, data.length); | |||
| sm3digest.doFinal(result, 0); | |||
| return result; | |||
| } | |||
| } | |||
| return result; | |||
| } | |||
| public static byte[] hash(byte[] data, int offset, int len) { | |||
| byte[] result = new byte[SM3DIGEST_LENGTH]; | |||
| SM3Digest sm3digest = new SM3Digest(); | |||
| sm3digest.update(data, offset, len); | |||
| sm3digest.doFinal(result, 0); | |||
| return result; | |||
| } | |||
| } | |||
| @@ -11,14 +11,14 @@ import com.jd.blockchain.ledger.HashProof; | |||
| * @author huanghaiquan | |||
| * | |||
| */ | |||
| public class HashDegistList implements HashProof { | |||
| public class HashDigestList implements HashProof { | |||
| private List<HashDigest> proofs = new ArrayList<HashDigest>(); | |||
| public HashDegistList() { | |||
| public HashDigestList() { | |||
| } | |||
| public HashDegistList(HashProof proof) { | |||
| public HashDigestList(HashProof proof) { | |||
| concat(proof); | |||
| } | |||
| @@ -208,7 +208,7 @@ public class MerkleAccount implements CompositeAccount, HashProvable, MerkleSnap | |||
| if (rootProof == null) { | |||
| return null; | |||
| } | |||
| HashDegistList proof = new HashDegistList(rootProof); | |||
| HashDigestList proof = new HashDigestList(rootProof); | |||
| proof.concat(dataProof); | |||
| return proof; | |||
| } | |||
| @@ -0,0 +1,14 @@ | |||
| package com.jd.blockchain.ledger.core; | |||
| import com.jd.blockchain.ledger.core.MerkleTree.DataNode; | |||
| import com.jd.blockchain.utils.Bytes; | |||
| public interface MerkleDataNodeEncoder { | |||
| byte getFormatVersion(); | |||
| DataNode create(short hashAlgorithm, long sn, Bytes key, long version, byte[] hashedData); | |||
| DataNode resolve(byte[] bytes); | |||
| } | |||
| @@ -0,0 +1,128 @@ | |||
| package com.jd.blockchain.ledger.core; | |||
| import com.jd.blockchain.crypto.Crypto; | |||
| import com.jd.blockchain.crypto.HashDigest; | |||
| import com.jd.blockchain.crypto.HashFunction; | |||
| import com.jd.blockchain.ledger.core.MerkleTree.DataNode; | |||
| import com.jd.blockchain.utils.Bytes; | |||
| import com.jd.blockchain.utils.io.BytesUtils; | |||
| import com.jd.blockchain.utils.io.NumberMask; | |||
| /** | |||
| * The first version of the DataNode binary sequence encoder, which's version | |||
| * number is 0. | |||
| * | |||
| * <p> | |||
| * This version of DataNode binary sequence is composed of sn(8 bytes), | |||
| * key(variable size), version(8 bytes) and node hash(32 bytes for SHA256); | |||
| * | |||
| * <p> | |||
| * In this version, the node hash is computed from bytes sequence composing of | |||
| * sn, key, version and original value of the key; | |||
| * | |||
| * <p> | |||
| * For the purpose of upgrading the version of DataNode binary format, we use | |||
| * the first byte of the binary sequence as the tag to identify the version of | |||
| * DataNode binary format, and reduce the maximum value of the valid range of SN | |||
| * to 2^56. <br> | |||
| * Other versions of the implementation also follow the above rules, the version | |||
| * of the data node binary format is marked from 0, incremented by 1. | |||
| * | |||
| * @author huanghaiquan | |||
| * | |||
| */ | |||
| class MerkleDataNodeEncoder_V0 implements MerkleDataNodeEncoder { | |||
| @Override | |||
| public byte getFormatVersion() { | |||
| return 0; | |||
| } | |||
| @Override | |||
| public DataNode resolve(byte[] bytes) { | |||
| if (bytes[0] != getFormatVersion()) { | |||
| throw new IllegalArgumentException("Unsupported version of data node bytes sequence[" + bytes[0] + "]! "); | |||
| } | |||
| // resolve SN; | |||
| byte[] snBytes = new byte[8]; | |||
| snBytes[0] = 0x0; | |||
| System.arraycopy(bytes, 1, snBytes, 1, 7); | |||
| long sn = BytesUtils.toLong(snBytes); | |||
| // skip bytes of SN; | |||
| int offset = 8; | |||
| // byte[] keyBytes = BytesEncoding.read(NumberMask.SHORT, in); | |||
| // String key = BytesUtils.toString(keyBytes); | |||
| int keySize = NumberMask.SHORT.resolveMaskedNumber(bytes, offset); | |||
| offset += NumberMask.SHORT.getMaskLength(keySize); | |||
| byte[] keyBytes = new byte[keySize]; | |||
| System.arraycopy(bytes, offset, keyBytes, 0, keySize); | |||
| offset += keySize; | |||
| // String key = BytesUtils.toString(keyBytes); | |||
| Bytes key = new Bytes(keyBytes); | |||
| // long version = BytesUtils.readLong(in); | |||
| long version = BytesUtils.toLong(bytes, offset); | |||
| offset += 8; | |||
| // byte[] dataHashBytes = BytesEncoding.read(NumberMask.SHORT, in); | |||
| int hashSize = NumberMask.TINY.resolveMaskedNumber(bytes, offset); | |||
| offset += NumberMask.TINY.getMaskLength(hashSize); | |||
| byte[] nodeHashBytes = new byte[hashSize]; | |||
| System.arraycopy(bytes, offset, nodeHashBytes, 0, hashSize); | |||
| offset += hashSize; | |||
| HashDigest nodeHash = new HashDigest(nodeHashBytes); | |||
| return new DataNode(nodeHash, sn, key, version, null, bytes); | |||
| } | |||
| @Deprecated | |||
| @Override | |||
| public DataNode create(short hashAlgorithm, long sn, Bytes key, long version, byte[] value) { | |||
| // Header is composed of sn, key and version; | |||
| // So the size of header is: 8 + "mask of key size" + "key bytes" + 8; | |||
| int keySize = key.size(); | |||
| int maskSize = NumberMask.SHORT.getMaskLength(keySize); | |||
| int headerSize = 8 + maskSize + keySize + 8; | |||
| byte[] headerBytes = new byte[headerSize]; | |||
| int offset = 0; | |||
| // write sn; | |||
| offset += BytesUtils.toBytes(sn, headerBytes, 0); | |||
| // write the size of key bytes; | |||
| NumberMask.SHORT.writeMask(keySize, headerBytes, offset); | |||
| offset += maskSize; | |||
| // write the key bytes; | |||
| offset += key.copyTo(headerBytes, offset, keySize); | |||
| // version; | |||
| offset += BytesUtils.toBytes(version, headerBytes, offset); | |||
| // compute node hash from the combination of header and data value; | |||
| byte[] dataBytes = BytesUtils.concat(headerBytes, value); | |||
| HashFunction hashFunc = Crypto.getHashFunction(hashAlgorithm); | |||
| HashDigest dataNodeHash = hashFunc.hash(dataBytes); | |||
| // build bytes of data node, which is composed of sn, key, version and node | |||
| // hash; | |||
| int hashMaskSize = NumberMask.TINY.getMaskLength(dataNodeHash.size()); | |||
| int dataNodeSize = headerSize + hashMaskSize + dataNodeHash.size(); | |||
| byte[] nodeBytes = new byte[dataNodeSize]; | |||
| offset = 0; | |||
| System.arraycopy(headerBytes, 0, nodeBytes, offset, headerSize); | |||
| offset += headerSize; | |||
| NumberMask.TINY.writeMask(dataNodeHash.size(), nodeBytes, offset); | |||
| offset += hashMaskSize; | |||
| System.arraycopy(dataNodeHash.toBytes(), 0, nodeBytes, offset, dataNodeHash.size()); | |||
| // No data hash has been computed and record in this old version of | |||
| // implementation; | |||
| return new DataNode(dataNodeHash, sn, key, version, null, nodeBytes); | |||
| } | |||
| } | |||
| @@ -0,0 +1,200 @@ | |||
| package com.jd.blockchain.ledger.core; | |||
| import com.jd.blockchain.crypto.Crypto; | |||
| import com.jd.blockchain.crypto.HashDigest; | |||
| import com.jd.blockchain.crypto.HashFunction; | |||
| import com.jd.blockchain.ledger.core.MerkleTree.DataNode; | |||
| import com.jd.blockchain.utils.Bytes; | |||
| import com.jd.blockchain.utils.io.BytesUtils; | |||
| import com.jd.blockchain.utils.io.NumberMask; | |||
| /** | |||
| * The second version of the DataNode binary sequence encoder, which's version | |||
| * number is 1. | |||
| * | |||
| * <p> | |||
| * This version of DataNode binary sequence is composed of sn(8 bytes), | |||
| * key(variable size), version(8 bytes), hash of original value the key, and | |||
| * node hash; | |||
| * | |||
| * <p> | |||
| * In this version, the node hash is computed from bytes sequence composing of | |||
| * sn, key, version , hash of original value of the key; | |||
| * | |||
| * <p> | |||
| * For the purpose of upgrading the version of DataNode binary format, we use | |||
| * the first byte of the binary sequence as the tag to identify the version of | |||
| * DataNode binary format, and reduce the maximum value of the valid range of SN | |||
| * to 2^56. <br> | |||
| * Other versions of the implementation also follow the above rules, the version | |||
| * of the data node binary format is marked from 0, incremented by 1. | |||
| * | |||
| * @author huanghaiquan | |||
| * | |||
| */ | |||
| class MerkleDataNodeEncoder_V1 implements MerkleDataNodeEncoder { | |||
| @Override | |||
| public byte getFormatVersion() { | |||
| return 1; | |||
| } | |||
| /** | |||
| * Parse DataNode from it's bytes sequence; | |||
| * <p> | |||
| * the bytes sequence is: sn + key + version + data_hash; | |||
| * | |||
| * @param bytes | |||
| * @return | |||
| */ | |||
| @Override | |||
| public DataNode resolve(byte[] bytes) { | |||
| if (bytes[0] != getFormatVersion()) { | |||
| throw new IllegalArgumentException("Unsupported version of data node bytes sequence[" + bytes[0] + "]! "); | |||
| } | |||
| // resolve SN; | |||
| byte[] snBytes = new byte[8]; | |||
| snBytes[0] = 0x0; | |||
| System.arraycopy(bytes, 1, snBytes, 1, 7); | |||
| long sn = BytesUtils.toLong(snBytes); | |||
| // skip bytes of SN; | |||
| int offset = 8; | |||
| // resolve key of data; | |||
| // First, resolve the number mask of the key size; | |||
| // Second, read the key bytes; | |||
| int keySize = NumberMask.SHORT.resolveMaskedNumber(bytes, offset); | |||
| offset += NumberMask.SHORT.getMaskLength(keySize); | |||
| byte[] keyBytes = new byte[keySize]; | |||
| System.arraycopy(bytes, offset, keyBytes, 0, keySize); | |||
| offset += keySize; | |||
| Bytes key = new Bytes(keyBytes); | |||
| // Resolve version of key; | |||
| long version = BytesUtils.toLong(bytes, offset); | |||
| offset += 8; | |||
| // resovle data hash; | |||
| int dataHashSize = NumberMask.TINY.resolveMaskedNumber(bytes, offset); | |||
| offset += NumberMask.TINY.getMaskLength(dataHashSize); | |||
| byte[] dataHashBytes = new byte[dataHashSize]; | |||
| System.arraycopy(bytes, offset, dataHashBytes, 0, dataHashSize); | |||
| offset += dataHashSize; | |||
| HashDigest dataHash = new HashDigest(dataHashBytes); | |||
| // resovle node hash; | |||
| int nodeHashSize = NumberMask.TINY.resolveMaskedNumber(bytes, offset); | |||
| offset += NumberMask.TINY.getMaskLength(nodeHashSize); | |||
| byte[] nodeHashBytes = new byte[nodeHashSize]; | |||
| System.arraycopy(bytes, offset, nodeHashBytes, 0, nodeHashSize); | |||
| offset += nodeHashSize; | |||
| HashDigest nodeHash = new HashDigest(nodeHashBytes); | |||
| return new DataNode(nodeHash, sn, key, version, dataHash, bytes); | |||
| } | |||
| public DataNode newDataNode(short hashAlgorithm, long sn, Bytes key, long version, HashDigest dataHash) { | |||
| HashFunction hashFunc = Crypto.getHashFunction(hashAlgorithm); | |||
| return create(hashFunc, sn, key, version, dataHash); | |||
| } | |||
| @Override | |||
| public DataNode create(short hashAlgorithm, long sn, Bytes key, long version, byte[] data) { | |||
| HashFunction hashFunc = Crypto.getHashFunction(hashAlgorithm); | |||
| HashDigest dataHash = hashFunc.hash(data); | |||
| return create(hashFunc, sn, key, version, dataHash); | |||
| } | |||
| /** | |||
| * Data node's bytes sequence is composited by header( reference: | |||
| * {@link #buildKeyHeaderBytes(long, Bytes, long)} ) and data hash; | |||
| * | |||
| * <p> | |||
| * In general, the bytes sequence is: sn + key + version + data_hash + | |||
| * node_hash; | |||
| * | |||
| * @param hashFunc | |||
| * @param sn | |||
| * @param key | |||
| * @param version | |||
| * @param dataHash | |||
| * @return | |||
| */ | |||
| private DataNode create(HashFunction hashFunc, long sn, Bytes key, long version, HashDigest dataHash) { | |||
| byte[] headerBytes = buildKeyHeaderBytes(sn, key, version); | |||
| int headerSize = headerBytes.length; | |||
| // 单独对头部和数据进行哈希,以便在提供 Merkle 证明时能够不必传递原始数据即可进行哈希验证; | |||
| HashDigest headerHash = hashFunc.hash(headerBytes); | |||
| byte[] dataHashBytes = BytesUtils.concat(headerHash.getRawDigest(), dataHash.getRawDigest()); | |||
| HashDigest dataNodeHash = hashFunc.hash(dataHashBytes); | |||
| int dataHashSize = dataHash.size(); | |||
| int nodeHashSize = dataNodeHash.size(); | |||
| int dataHashMaskSize = NumberMask.TINY.getMaskLength(dataHashSize); | |||
| int nodeHashMaskSize = NumberMask.TINY.getMaskLength(nodeHashSize); | |||
| int nodeSize = headerSize + dataHashMaskSize + dataHashSize + nodeHashMaskSize + nodeHashSize; | |||
| byte[] nodeBytes = new byte[nodeSize]; | |||
| // write header; | |||
| int offset = 0; | |||
| System.arraycopy(headerBytes, 0, nodeBytes, offset, headerSize); | |||
| offset += headerSize; | |||
| // write data hash; | |||
| NumberMask.TINY.writeMask(dataHashSize, nodeBytes, offset); | |||
| offset += dataHashMaskSize; | |||
| System.arraycopy(dataHash.toBytes(), 0, nodeBytes, offset, dataHashSize); | |||
| offset += dataHashSize; | |||
| // write node hash; | |||
| NumberMask.TINY.writeMask(nodeHashSize, nodeBytes, offset); | |||
| offset += nodeHashMaskSize; | |||
| System.arraycopy(dataNodeHash.toBytes(), 0, nodeBytes, offset, nodeHashSize); | |||
| // set format version; | |||
| nodeBytes[0] = getFormatVersion(); | |||
| return new DataNode(dataNodeHash, sn, key, version, dataHash, nodeBytes); | |||
| } | |||
| /** | |||
| * Header is composited by sn + key + version; Bytes sequence: sn_size(8) + | |||
| * number_mask_of_key_size + key_bytes + version_size(8); | |||
| * | |||
| * @param sn | |||
| * @param key | |||
| * @param version | |||
| * @return | |||
| */ | |||
| private static byte[] buildKeyHeaderBytes(long sn, Bytes key, long version) { | |||
| int keySize = key.size(); | |||
| int maskSize = NumberMask.SHORT.getMaskLength(keySize); | |||
| // Size Of header = sn + key + version; | |||
| // sn_size(8) + mask_size + key_size + version_size(8); | |||
| int headerSize = 8 + maskSize + keySize + 8; | |||
| byte[] headerBytes = new byte[headerSize]; | |||
| // write bytes of sn; | |||
| int offset = 0; | |||
| offset += BytesUtils.toBytes(sn, headerBytes, 0); | |||
| // write bytes of key mask; | |||
| NumberMask.SHORT.writeMask(keySize, headerBytes, offset); | |||
| offset += maskSize; | |||
| // write bytes of key; | |||
| offset += key.copyTo(headerBytes, offset, keySize); | |||
| // write bytes of version; | |||
| offset += BytesUtils.toBytes(version, headerBytes, offset); | |||
| return headerBytes; | |||
| } | |||
| } | |||
| @@ -52,8 +52,9 @@ public class MerkleTree implements Transactional { | |||
| public static final int TREE_DEGREE = 16; | |||
| public static final int MAX_LEVEL = 15; | |||
| public static final int MAX_LEVEL = 14; | |||
| // 正好是 2 的 56 次方(7字节),将 SN 8个字节中的首个字节预留作为 DataNode 的编码格式版本标记; | |||
| public static final long MAX_DATACOUNT = power(TREE_DEGREE, MAX_LEVEL); | |||
| public static final long MAX_SN = MAX_DATACOUNT - 1; | |||
| @@ -147,14 +148,10 @@ public class MerkleTree implements Transactional { | |||
| /** | |||
| * 创建 Merkle 树; | |||
| * | |||
| * @param rootHash | |||
| * 节点的根Hash; 如果指定为 null,则实际上创建一个空的 Merkle Tree; | |||
| * @param verifyOnLoad | |||
| * 从外部存储加载节点时是否校验节点的哈希; | |||
| * @param kvStorage | |||
| * 保存 Merkle 节点的存储服务; | |||
| * @param readonly | |||
| * 是否只读; | |||
| * @param rootHash 节点的根Hash; 如果指定为 null,则实际上创建一个空的 Merkle Tree; | |||
| * @param verifyOnLoad 从外部存储加载节点时是否校验节点的哈希; | |||
| * @param kvStorage 保存 Merkle 节点的存储服务; | |||
| * @param readonly 是否只读; | |||
| */ | |||
| public MerkleTree(HashDigest rootHash, CryptoSetting setting, String keyPrefix, ExPolicyKVStorage kvStorage, | |||
| boolean readonly) { | |||
| @@ -164,14 +161,10 @@ public class MerkleTree implements Transactional { | |||
| /** | |||
| * 创建 Merkle 树; | |||
| * | |||
| * @param rootHash | |||
| * 节点的根Hash; 如果指定为 null,则实际上创建一个空的 Merkle Tree; | |||
| * @param verifyOnLoad | |||
| * 从外部存储加载节点时是否校验节点的哈希; | |||
| * @param kvStorage | |||
| * 保存 Merkle 节点的存储服务; | |||
| * @param readonly | |||
| * 是否只读; | |||
| * @param rootHash 节点的根Hash; 如果指定为 null,则实际上创建一个空的 Merkle Tree; | |||
| * @param verifyOnLoad 从外部存储加载节点时是否校验节点的哈希; | |||
| * @param kvStorage 保存 Merkle 节点的存储服务; | |||
| * @param readonly 是否只读; | |||
| */ | |||
| public MerkleTree(HashDigest rootHash, CryptoSetting setting, Bytes keyPrefix, ExPolicyKVStorage kvStorage, | |||
| boolean readonly) { | |||
| @@ -205,8 +198,7 @@ public class MerkleTree implements Transactional { | |||
| * <p> | |||
| * 如果 sn 超出范围,则引发 {@link IndexOutOfBoundsException} ; | |||
| * | |||
| * @param sn | |||
| * 数据的序列号; | |||
| * @param sn 数据的序列号; | |||
| * @return 默克尔证明的实例; | |||
| */ | |||
| public MerkleProof getProof(long sn) { | |||
| @@ -242,13 +234,10 @@ public class MerkleTree implements Transactional { | |||
| * 注:默克尔树只保存指定数据的哈希以及关联的键,而不会保存数据原文,因此调用者需要自己处理对数据的存储; <br> | |||
| * 此外,哈希计算是把键和数据内容拼接一起进行计算的; | |||
| * | |||
| * @param sn | |||
| * 与此数据唯一相关的序列号;sn 必须大于等于 0 ; | |||
| * @param key | |||
| * 与此数据唯一相关的键; | |||
| * @param sn 与此数据唯一相关的序列号;sn 必须大于等于 0 ; | |||
| * @param key 与此数据唯一相关的键; | |||
| * @param version | |||
| * @param hashedData | |||
| * 要参与哈希计算的数据内容;注:此参数值并不会被默克尔树保存; | |||
| * @param hashedData 要参与哈希计算的数据内容;注:此参数值并不会被默克尔树保存; | |||
| * @return | |||
| */ | |||
| public MerkleDataNode setData(long sn, String key, long version, byte[] hashedData) { | |||
| @@ -266,13 +255,10 @@ public class MerkleTree implements Transactional { | |||
| * 注:默克尔树只保存指定数据的哈希以及关联的键,而不会保存数据原文,因此调用者需要自己处理对数据的存储; <br> | |||
| * 此外,哈希计算是把键和数据内容拼接一起进行计算的; | |||
| * | |||
| * @param sn | |||
| * 与此数据唯一相关的序列号;sn 必须大于等于 0 ; | |||
| * @param key | |||
| * 与此数据唯一相关的键; | |||
| * @param sn 与此数据唯一相关的序列号;sn 必须大于等于 0 ; | |||
| * @param key 与此数据唯一相关的键; | |||
| * @param version | |||
| * @param hashedData | |||
| * 要参与哈希计算的数据内容;注:此参数值并不会被默克尔树保存; | |||
| * @param hashedData 要参与哈希计算的数据内容;注:此参数值并不会被默克尔树保存; | |||
| * @return | |||
| */ | |||
| public MerkleDataNode setData(long sn, Bytes key, long version, byte[] hashedData) { | |||
| @@ -285,7 +271,8 @@ public class MerkleTree implements Transactional { | |||
| if (sn > MAX_SN) { | |||
| throw new IllegalArgumentException("The sn is great than MAX[" + MAX_SN + "]!"); | |||
| } | |||
| DataNode dataNode = DataNode.newDataNode(setting.getHashAlgorithm(), sn, key, version, hashedData); | |||
| DataNode dataNode = MerkleTreeEncoder.LATEST_DATANODE_ENCODER.create(setting.getHashAlgorithm(), sn, key, | |||
| version, hashedData); | |||
| updatedDataNodes.put(sn, dataNode); | |||
| return dataNode; | |||
| } | |||
| @@ -591,10 +578,8 @@ public class MerkleTree implements Transactional { | |||
| /** | |||
| * 重新计算所有子节点以及自身的哈希,并返回新加入的数据节点的数量; | |||
| * | |||
| * @param pathNode | |||
| * 需要重新计算 hash 的路径节点; | |||
| * @param updatedNodes | |||
| * 用于记录已更新节点的列表; | |||
| * @param pathNode 需要重新计算 hash 的路径节点; | |||
| * @param updatedNodes 用于记录已更新节点的列表; | |||
| * @return | |||
| */ | |||
| @SuppressWarnings("unused") | |||
| @@ -732,12 +717,10 @@ public class MerkleTree implements Transactional { | |||
| * | |||
| * 如果 sn 超出范围,则引发 {@link IndexOutOfBoundsException} ; | |||
| * | |||
| * @param sn | |||
| * 数据节点的序列号; | |||
| * @param path | |||
| * 用于记录节点路径的列表,长度必须大于等于当前默克尔树的总的层级(即 path.length 大于等于 root.level + | |||
| * 1);<br> | |||
| * 如果参数为 null,则不记录; | |||
| * @param sn 数据节点的序列号; | |||
| * @param path 用于记录节点路径的列表,长度必须大于等于当前默克尔树的总的层级(即 path.length 大于等于 root.level + | |||
| * 1);<br> | |||
| * 如果参数为 null,则不记录; | |||
| * @return 序列号对应的数据节点;<br> | |||
| * 如果不存在,则返回 null,注意,此时指定的路径参数 path 依然写入了查找过程的路径; | |||
| */ | |||
| @@ -844,7 +827,8 @@ public class MerkleTree implements Transactional { | |||
| if (bytes == null || bytes.length == 0) { | |||
| return null; | |||
| } | |||
| DataNode dataNode = DataNode.parse(bytes); | |||
| DataNode dataNode = MerkleTreeEncoder.resolve(bytes); | |||
| if (verify && !hashBytes.equals(dataNode.nodeHash)) { | |||
| String keyStr = hashBytes.toBase58(); | |||
| String actualHashStr = dataNode.nodeHash.toBase58(); | |||
| @@ -861,8 +845,7 @@ public class MerkleTree implements Transactional { | |||
| * 注:此方法不处理溢出;调用者需要自行规避; | |||
| * | |||
| * @param value | |||
| * @param x | |||
| * 大于等于 0 的整数; | |||
| * @param x 大于等于 0 的整数; | |||
| * @return | |||
| */ | |||
| private static long power(long value, int x) { | |||
| @@ -1140,14 +1123,10 @@ public class MerkleTree implements Transactional { | |||
| /** | |||
| * 创建一个路径节点; | |||
| * | |||
| * @param hashAlgorithm | |||
| * 生成节点采用的哈希算法; | |||
| * @param startingSN | |||
| * 路径节点表示的子树的起始序列号; | |||
| * @param level | |||
| * 路径节点的层级深度;路径节点的深度从 1 开始往上递增(数据节点作为树的深度为 0); | |||
| * @param dataCount | |||
| * 路径节点表示的子树所包含的数据节点的数量; | |||
| * @param hashAlgorithm 生成节点采用的哈希算法; | |||
| * @param startingSN 路径节点表示的子树的起始序列号; | |||
| * @param level 路径节点的层级深度;路径节点的深度从 1 开始往上递增(数据节点作为树的深度为 0); | |||
| * @param dataCount 路径节点表示的子树所包含的数据节点的数量; | |||
| */ | |||
| private PathNode(CryptoAlgorithm hashAlgorithm, long startingSN, int level, long dataCount) { | |||
| this(hashAlgorithm, startingSN, level, dataCount, new HashDigest[TREE_DEGREE], null); | |||
| @@ -1338,10 +1317,8 @@ public class MerkleTree implements Transactional { | |||
| /** | |||
| * 从指定的字节数组反序列化节点; | |||
| * | |||
| * @param bytes | |||
| * 字节数组;合法的输入应等同于 {@link #toBytes()} 方法的输出; | |||
| * @param checkHash | |||
| * 是否重新计算并校验节点的哈希; | |||
| * @param bytes 字节数组;合法的输入应等同于 {@link #toBytes()} 方法的输出; | |||
| * @param checkHash 是否重新计算并校验节点的哈希; | |||
| * @return | |||
| */ | |||
| private static PathNode parse(byte[] bytes, boolean checkHash) { | |||
| @@ -1429,7 +1406,7 @@ public class MerkleTree implements Transactional { | |||
| * @author huanghaiquan | |||
| * | |||
| */ | |||
| private static class DataNode extends AbstractMerkleNode implements MerkleDataNode { | |||
| static class DataNode extends AbstractMerkleNode implements MerkleDataNode { | |||
| private long sn; | |||
| @@ -1437,64 +1414,17 @@ public class MerkleTree implements Transactional { | |||
| private long version; | |||
| private byte[] dataNodeBytes; | |||
| private byte[] nodeBytes; | |||
| private HashDigest valueHash; | |||
| private DataNode(long sn, Bytes key, long version, HashDigest dataHash, byte[] dataBytes) { | |||
| DataNode(HashDigest nodeHash, long sn, Bytes key, long version, HashDigest valueHash, byte[] nodeBytes) { | |||
| this.sn = sn; | |||
| this.key = key; | |||
| this.version = version; | |||
| this.nodeHash = dataHash; | |||
| this.dataNodeBytes = dataBytes; | |||
| } | |||
| @SuppressWarnings("unused") | |||
| private static DataNode newDataNode(CryptoAlgorithm hashAlgorithm, long sn, Bytes key, long version, | |||
| byte[] hashedData) { | |||
| return newDataNode(hashAlgorithm.code(), sn, key, version, hashedData); | |||
| } | |||
| private static DataNode newDataNode(short hashAlgorithm, long sn, Bytes key, long version, byte[] hashedData) { | |||
| // byte[] keyStrBytes = BytesUtils.toBytes(key); | |||
| // int maskSize = NumberMask.SHORT.getMaskLength(keyStrBytes.length); | |||
| int keySize = key.size(); | |||
| int maskSize = NumberMask.SHORT.getMaskLength(keySize); | |||
| // int bodySize = 8 + maskSize + keyStrBytes.length + 8;// sn + key + version; | |||
| int bodySize = 8 + maskSize + keySize + 8;// sn + key + version; | |||
| byte[] bodyBytes = new byte[bodySize]; | |||
| int offset = 0; | |||
| offset += BytesUtils.toBytes(sn, bodyBytes, 0); | |||
| // NumberMask.SHORT.writeMask(keyStrBytes.length, bodyBytes, offset); | |||
| NumberMask.SHORT.writeMask(keySize, bodyBytes, offset); | |||
| offset += maskSize; | |||
| // System.arraycopy(keyStrBytes, 0, bodyBytes, offset, keyStrBytes.length); | |||
| // System.arraycopy(keyStrBytes, 0, bodyBytes, offset, keyStrBytes.length); | |||
| // offset += keyStrBytes.length; | |||
| offset += key.copyTo(bodyBytes, offset, keySize); | |||
| // TODO: version; | |||
| offset += BytesUtils.toBytes(version, bodyBytes, offset); | |||
| byte[] dataBytes = BytesUtils.concat(bodyBytes, hashedData); | |||
| HashFunction hashFunc = Crypto.getHashFunction(hashAlgorithm); | |||
| HashDigest dataHash = hashFunc.hash(dataBytes); | |||
| int hashMaskSize = NumberMask.TINY.getMaskLength(dataHash.size()); | |||
| int dataNodeSize = bodySize + hashMaskSize + dataHash.size(); | |||
| byte[] dataNodeBytes = new byte[dataNodeSize]; | |||
| offset = 0; | |||
| System.arraycopy(bodyBytes, 0, dataNodeBytes, offset, bodySize); | |||
| offset += bodySize; | |||
| NumberMask.TINY.writeMask(dataHash.size(), dataNodeBytes, offset); | |||
| offset += hashMaskSize; | |||
| System.arraycopy(dataHash.toBytes(), 0, dataNodeBytes, offset, dataHash.size()); | |||
| return new DataNode(sn, key, version, dataHash, dataNodeBytes); | |||
| this.nodeHash = nodeHash; | |||
| this.valueHash = valueHash; | |||
| this.nodeBytes = nodeBytes; | |||
| } | |||
| @Override | |||
| @@ -1547,6 +1477,11 @@ public class MerkleTree implements Transactional { | |||
| return version; | |||
| } | |||
| @Override | |||
| public HashDigest getValueHash() { | |||
| return valueHash; | |||
| } | |||
| @Override | |||
| public byte[] toBytes() { | |||
| // ByteArrayOutputStream out = new ByteArrayOutputStream(); | |||
| @@ -1574,38 +1509,7 @@ public class MerkleTree implements Transactional { | |||
| // | |||
| // System.arraycopy(nodeHash.toBytes(), 0, totalBytes, offset, hashSize); | |||
| return dataNodeBytes; | |||
| } | |||
| private static DataNode parse(byte[] bytes) { | |||
| // InputStream in = new ByteArrayInputStream(bytes); | |||
| int offset = 0; | |||
| long sn = BytesUtils.toLong(bytes, offset); | |||
| offset += 8; | |||
| // byte[] keyBytes = BytesEncoding.read(NumberMask.SHORT, in); | |||
| // String key = BytesUtils.toString(keyBytes); | |||
| int keySize = NumberMask.SHORT.resolveMaskedNumber(bytes, offset); | |||
| offset += NumberMask.SHORT.getMaskLength(keySize); | |||
| byte[] keyBytes = new byte[keySize]; | |||
| System.arraycopy(bytes, offset, keyBytes, 0, keySize); | |||
| offset += keySize; | |||
| // String key = BytesUtils.toString(keyBytes); | |||
| Bytes key = new Bytes(keyBytes); | |||
| // long version = BytesUtils.readLong(in); | |||
| long version = BytesUtils.toLong(bytes, offset); | |||
| offset += 8; | |||
| // byte[] dataHashBytes = BytesEncoding.read(NumberMask.SHORT, in); | |||
| int hashSize = NumberMask.TINY.resolveMaskedNumber(bytes, offset); | |||
| offset += NumberMask.TINY.getMaskLength(hashSize); | |||
| byte[] dataHashBytes = new byte[hashSize]; | |||
| System.arraycopy(bytes, offset, dataHashBytes, 0, hashSize); | |||
| offset += hashSize; | |||
| HashDigest dataHash = new HashDigest(dataHashBytes); | |||
| return new DataNode(sn, key, version, dataHash, bytes); | |||
| return nodeBytes; | |||
| } | |||
| @Override | |||
| @@ -0,0 +1,31 @@ | |||
| package com.jd.blockchain.ledger.core; | |||
| import java.util.Arrays; | |||
| import java.util.Collections; | |||
| import java.util.List; | |||
| import com.jd.blockchain.ledger.core.MerkleTree.DataNode; | |||
| class MerkleTreeEncoder { | |||
| static final MerkleDataNodeEncoder LATEST_DATANODE_ENCODER = new MerkleDataNodeEncoder_V1(); | |||
| static final MerkleDataNodeEncoder V0_DATANODE_ENCODER = new MerkleDataNodeEncoder_V0(); | |||
| static final List<MerkleDataNodeEncoder> DATANODE_ENCODERS = Collections | |||
| .unmodifiableList(Arrays.asList(LATEST_DATANODE_ENCODER, V0_DATANODE_ENCODER)); | |||
| /** | |||
| * @param bytes | |||
| * @return | |||
| */ | |||
| static DataNode resolve(byte[] bytes) { | |||
| for (MerkleDataNodeEncoder encoder : MerkleTreeEncoder.DATANODE_ENCODERS) { | |||
| if (encoder.getFormatVersion() == bytes[0]) { | |||
| return encoder.resolve(bytes); | |||
| } | |||
| } | |||
| throw new IllegalStateException("Unsupported version of DataNode bytes sequence[" + bytes[0] + "]!"); | |||
| } | |||
| } | |||
| @@ -0,0 +1,153 @@ | |||
| package com.jd.blockchain.ledger.core; | |||
| import static org.junit.Assert.assertEquals; | |||
| import static org.junit.Assert.assertNull; | |||
| import java.util.Random; | |||
| import org.junit.Test; | |||
| import com.jd.blockchain.crypto.Crypto; | |||
| import com.jd.blockchain.crypto.HashDigest; | |||
| import com.jd.blockchain.crypto.HashFunction; | |||
| import com.jd.blockchain.crypto.service.classic.ClassicAlgorithm; | |||
| import com.jd.blockchain.ledger.core.MerkleTree.DataNode; | |||
| import com.jd.blockchain.utils.Bytes; | |||
| public class MerkleDataNodeEncoderTest { | |||
| @Test | |||
| public void testEnocoderV0() { | |||
| Random rand = new Random(); | |||
| byte[] data = new byte[512]; | |||
| byte[] keyBytes = new byte[256]; | |||
| rand.nextBytes(data); | |||
| rand.nextBytes(keyBytes); | |||
| Bytes key = new Bytes(keyBytes); | |||
| long sn = 1024; | |||
| long version = 1; | |||
| doTestV0(sn, version, key, data); | |||
| sn = 0; | |||
| version = 1000; | |||
| doTestV0(sn, version, key, data); | |||
| sn = (1 << 56) -1; | |||
| version = 1000; | |||
| doTestV0(sn, version, key, data); | |||
| } | |||
| private void doTestV0(long sn, long version, Bytes key, byte[] data) { | |||
| MerkleDataNodeEncoder encoderV0 = new MerkleDataNodeEncoder_V0(); | |||
| DataNode nodeV0 = encoderV0.create(ClassicAlgorithm.SHA256.code(), sn, key, version, data); | |||
| assertNull(nodeV0.getValueHash()); | |||
| assertEquals(sn, nodeV0.getSN()); | |||
| assertEquals(version, nodeV0.getVersion()); | |||
| assertEquals(key, nodeV0.getKey()); | |||
| byte[] nodeBytes = nodeV0.toBytes(); | |||
| DataNode nodeV0_reversed = encoderV0.resolve(nodeBytes); | |||
| assertNull(nodeV0_reversed.getValueHash()); | |||
| assertEquals(nodeV0.getNodeHash(), nodeV0_reversed.getNodeHash()); | |||
| assertEquals(encoderV0.getFormatVersion(), nodeBytes[0]); | |||
| assertEquals(sn, nodeV0_reversed.getSN()); | |||
| assertEquals(version, nodeV0_reversed.getVersion()); | |||
| assertEquals(key, nodeV0_reversed.getKey()); | |||
| } | |||
| @Test | |||
| public void testEnocoderV1() { | |||
| Random rand = new Random(); | |||
| byte[] data = new byte[512]; | |||
| byte[] keyBytes = new byte[256]; | |||
| rand.nextBytes(data); | |||
| rand.nextBytes(keyBytes); | |||
| Bytes key = new Bytes(keyBytes); | |||
| long sn = 1024; | |||
| long version = 1; | |||
| doTestV1(sn, version, key, data); | |||
| sn = 0; | |||
| version = 10088; | |||
| doTestV1(sn, version, key, data); | |||
| sn = (1 << 56) -1; | |||
| version = 1000; | |||
| doTestV1(sn, version, key, data); | |||
| } | |||
| private void doTestV1(long sn, long version, Bytes key, byte[] data) { | |||
| HashFunction hashFunc = Crypto.getHashFunction(ClassicAlgorithm.SHA256); | |||
| HashDigest dataHash = hashFunc.hash(data); | |||
| MerkleDataNodeEncoder encoderV1 = new MerkleDataNodeEncoder_V1(); | |||
| DataNode node = encoderV1.create(ClassicAlgorithm.SHA256.code(), sn, key, version, data); | |||
| assertEquals(dataHash, node.getValueHash()); | |||
| assertEquals(sn, node.getSN()); | |||
| assertEquals(version, node.getVersion()); | |||
| assertEquals(key, node.getKey()); | |||
| byte[] nodeBytes = node.toBytes(); | |||
| DataNode node_reversed = encoderV1.resolve(nodeBytes); | |||
| assertEquals(dataHash, node_reversed.getValueHash()); | |||
| assertEquals(node.getNodeHash(), node_reversed.getNodeHash()); | |||
| assertEquals(encoderV1.getFormatVersion(), nodeBytes[0]); | |||
| assertEquals(sn, node_reversed.getSN()); | |||
| assertEquals(version, node_reversed.getVersion()); | |||
| assertEquals(key, node_reversed.getKey()); | |||
| } | |||
| @Test | |||
| public void testCompatibility() { | |||
| Random rand = new Random(); | |||
| byte[] data = new byte[512]; | |||
| byte[] keyBytes = new byte[256]; | |||
| rand.nextBytes(data); | |||
| rand.nextBytes(keyBytes); | |||
| Bytes key = new Bytes(keyBytes); | |||
| long sn = 1024; | |||
| long version = 1; | |||
| PreviousDataNode pdataNode = PreviousDataNode.newDataNode(ClassicAlgorithm.SHA256.code(), sn, key, version, | |||
| data); | |||
| MerkleDataNodeEncoder encoderV0 = new MerkleDataNodeEncoder_V0(); | |||
| DataNode dataNode = encoderV0.create(ClassicAlgorithm.SHA256.code(), sn, key, version, data); | |||
| assertEquals(pdataNode.getNodeHash(), dataNode.getNodeHash()); | |||
| assertEquals(pdataNode.getSN(), dataNode.getSN()); | |||
| assertEquals(pdataNode.getVersion(), dataNode.getVersion()); | |||
| assertEquals(pdataNode.getKey(), dataNode.getKey()); | |||
| DataNode dataNode_reversed = encoderV0.resolve(pdataNode.toBytes()); | |||
| assertNull(dataNode_reversed.getValueHash()); | |||
| assertEquals(pdataNode.getNodeHash(), dataNode_reversed.getNodeHash()); | |||
| assertEquals(pdataNode.getSN(), dataNode_reversed.getSN()); | |||
| assertEquals(pdataNode.getVersion(), dataNode_reversed.getVersion()); | |||
| assertEquals(pdataNode.getKey(), dataNode_reversed.getKey()); | |||
| } | |||
| } | |||
| @@ -0,0 +1,191 @@ | |||
| package com.jd.blockchain.ledger.core; | |||
| import com.jd.blockchain.crypto.Crypto; | |||
| import com.jd.blockchain.crypto.CryptoAlgorithm; | |||
| import com.jd.blockchain.crypto.HashDigest; | |||
| import com.jd.blockchain.crypto.HashFunction; | |||
| import com.jd.blockchain.utils.Bytes; | |||
| import com.jd.blockchain.utils.io.BytesUtils; | |||
| import com.jd.blockchain.utils.io.NumberMask; | |||
| /** | |||
| * A copy of previous version of com.jd.blockchain.ledger.core.MerkleTree.DataNode; | |||
| * | |||
| * @author huanghaiquan | |||
| * | |||
| */ | |||
| public class PreviousDataNode { | |||
| private HashDigest nodeHash; | |||
| private long sn; | |||
| private Bytes key; | |||
| private long version; | |||
| private byte[] dataNodeBytes; | |||
| private PreviousDataNode(long sn, Bytes key, long version, HashDigest dataHash, byte[] dataBytes) { | |||
| this.sn = sn; | |||
| this.key = key; | |||
| this.version = version; | |||
| this.nodeHash = dataHash; | |||
| this.dataNodeBytes = dataBytes; | |||
| } | |||
| static PreviousDataNode newDataNode(CryptoAlgorithm hashAlgorithm, long sn, Bytes key, long version, | |||
| byte[] hashedData) { | |||
| return newDataNode(hashAlgorithm.code(), sn, key, version, hashedData); | |||
| } | |||
| static PreviousDataNode newDataNode(short hashAlgorithm, long sn, Bytes key, long version, byte[] hashedData) { | |||
| // byte[] keyStrBytes = BytesUtils.toBytes(key); | |||
| // int maskSize = NumberMask.SHORT.getMaskLength(keyStrBytes.length); | |||
| int keySize = key.size(); | |||
| int maskSize = NumberMask.SHORT.getMaskLength(keySize); | |||
| // int bodySize = 8 + maskSize + keyStrBytes.length + 8;// sn + key + version; | |||
| int bodySize = 8 + maskSize + keySize + 8;// sn + key + version; | |||
| byte[] bodyBytes = new byte[bodySize]; | |||
| int offset = 0; | |||
| offset += BytesUtils.toBytes(sn, bodyBytes, 0); | |||
| // NumberMask.SHORT.writeMask(keyStrBytes.length, bodyBytes, offset); | |||
| NumberMask.SHORT.writeMask(keySize, bodyBytes, offset); | |||
| offset += maskSize; | |||
| // System.arraycopy(keyStrBytes, 0, bodyBytes, offset, keyStrBytes.length); | |||
| // System.arraycopy(keyStrBytes, 0, bodyBytes, offset, keyStrBytes.length); | |||
| // offset += keyStrBytes.length; | |||
| offset += key.copyTo(bodyBytes, offset, keySize); | |||
| // TODO: version; | |||
| offset += BytesUtils.toBytes(version, bodyBytes, offset); | |||
| byte[] dataBytes = BytesUtils.concat(bodyBytes, hashedData); | |||
| HashFunction hashFunc = Crypto.getHashFunction(hashAlgorithm); | |||
| HashDigest dataHash = hashFunc.hash(dataBytes); | |||
| int hashMaskSize = NumberMask.TINY.getMaskLength(dataHash.size()); | |||
| int dataNodeSize = bodySize + hashMaskSize + dataHash.size(); | |||
| byte[] dataNodeBytes = new byte[dataNodeSize]; | |||
| offset = 0; | |||
| System.arraycopy(bodyBytes, 0, dataNodeBytes, offset, bodySize); | |||
| offset += bodySize; | |||
| NumberMask.TINY.writeMask(dataHash.size(), dataNodeBytes, offset); | |||
| offset += hashMaskSize; | |||
| System.arraycopy(dataHash.toBytes(), 0, dataNodeBytes, offset, dataHash.size()); | |||
| return new PreviousDataNode(sn, key, version, dataHash, dataNodeBytes); | |||
| } | |||
| public HashDigest getNodeHash() { | |||
| return nodeHash; | |||
| } | |||
| protected long getStartingSN() { | |||
| return sn; | |||
| } | |||
| protected long getDataCount() { | |||
| return 1; | |||
| } | |||
| /* | |||
| * (non-Javadoc) | |||
| * | |||
| * @see com.jd.blockchain.ledger.core.MerkleDataNode#getLevel() | |||
| */ | |||
| public int getLevel() { | |||
| return 0; | |||
| } | |||
| /* | |||
| * (non-Javadoc) | |||
| * | |||
| * @see com.jd.blockchain.ledger.core.MerkleDataNode#getSN() | |||
| */ | |||
| public long getSN() { | |||
| return sn; | |||
| } | |||
| /* | |||
| * (non-Javadoc) | |||
| * | |||
| * @see com.jd.blockchain.ledger.core.MerkleDataNode#getKey() | |||
| */ | |||
| public Bytes getKey() { | |||
| return key; | |||
| } | |||
| /* | |||
| * (non-Javadoc) | |||
| * | |||
| * @see com.jd.blockchain.ledger.core.MerkleDataNode#getVersion() | |||
| */ | |||
| public long getVersion() { | |||
| return version; | |||
| } | |||
| public byte[] toBytes() { | |||
| return dataNodeBytes; | |||
| } | |||
| static PreviousDataNode parse(byte[] bytes) { | |||
| // InputStream in = new ByteArrayInputStream(bytes); | |||
| int offset = 0; | |||
| long sn = BytesUtils.toLong(bytes, offset); | |||
| offset += 8; | |||
| // byte[] keyBytes = BytesEncoding.read(NumberMask.SHORT, in); | |||
| // String key = BytesUtils.toString(keyBytes); | |||
| int keySize = NumberMask.SHORT.resolveMaskedNumber(bytes, offset); | |||
| offset += NumberMask.SHORT.getMaskLength(keySize); | |||
| byte[] keyBytes = new byte[keySize]; | |||
| System.arraycopy(bytes, offset, keyBytes, 0, keySize); | |||
| offset += keySize; | |||
| // String key = BytesUtils.toString(keyBytes); | |||
| Bytes key = new Bytes(keyBytes); | |||
| // long version = BytesUtils.readLong(in); | |||
| long version = BytesUtils.toLong(bytes, offset); | |||
| offset += 8; | |||
| // byte[] dataHashBytes = BytesEncoding.read(NumberMask.SHORT, in); | |||
| int hashSize = NumberMask.TINY.resolveMaskedNumber(bytes, offset); | |||
| offset += NumberMask.TINY.getMaskLength(hashSize); | |||
| byte[] dataHashBytes = new byte[hashSize]; | |||
| System.arraycopy(bytes, offset, dataHashBytes, 0, hashSize); | |||
| offset += hashSize; | |||
| HashDigest dataHash = new HashDigest(dataHashBytes); | |||
| return new PreviousDataNode(sn, key, version, dataHash, bytes); | |||
| } | |||
| @Override | |||
| public int hashCode() { | |||
| return nodeHash.hashCode(); | |||
| } | |||
| @Override | |||
| public boolean equals(Object obj) { | |||
| if (obj == null) { | |||
| return false; | |||
| } | |||
| if (obj == this) { | |||
| return true; | |||
| } | |||
| if (obj instanceof PreviousDataNode) { | |||
| PreviousDataNode node1 = (PreviousDataNode) obj; | |||
| return this.nodeHash.equals(node1.nodeHash); | |||
| } | |||
| return false; | |||
| } | |||
| } | |||
| @@ -556,6 +556,9 @@ public class MerkleTreeTest { | |||
| /** | |||
| * 测试从存储重新加载 Merkle 树的正确性; | |||
| */ | |||
| /** | |||
| * | |||
| */ | |||
| @Test | |||
| public void testMerkleReload() { | |||
| CryptoSetting setting = Mockito.mock(CryptoSetting.class); | |||
| @@ -563,7 +566,7 @@ public class MerkleTreeTest { | |||
| when(setting.getAutoVerifyHash()).thenReturn(true); | |||
| // 保存所有写入的数据节点的 SN-Hash 映射表; | |||
| TreeMap<Long, HashDigest> dataNodes = new TreeMap<>(); | |||
| TreeMap<Long, HashDigest> expectedDataNodes = new TreeMap<>(); | |||
| MerkleNode nd; | |||
| // 测试从空的树开始,顺序增加数据节点; | |||
| @@ -580,7 +583,7 @@ public class MerkleTreeTest { | |||
| for (int i = 0; i < count; i++) { | |||
| rand.nextBytes(dataBuf); | |||
| nd = mkt.setData(sn, "KEY-" + sn, 0, dataBuf); | |||
| dataNodes.put(sn, nd.getNodeHash()); | |||
| expectedDataNodes.put(sn, nd.getNodeHash()); | |||
| sn++; | |||
| } | |||
| mkt.commit(); | |||
| @@ -610,6 +613,24 @@ public class MerkleTreeTest { | |||
| // 预期扩展为 4 层16叉树,由 3 层满16叉树扩展 1 新分支(4个路径节点)而形成; | |||
| long expectedNodes = getMaxPathNodeCount(3) + 4 + 4097; | |||
| assertEquals(expectedNodes, storage.getCount()); | |||
| //重新加载,判断数据是否正确; | |||
| MerkleTree r1_mkt = new MerkleTree(r1_rootHash, setting, keyPrefix, storage, true); | |||
| { | |||
| // 验证每一个数据节点都产生了存在性证明; | |||
| MerkleProof proof = null; | |||
| HashDigest expectedNodeHash = null; | |||
| MerkleDataNode reallyDataNode = null; | |||
| for (long n = 0; n < maxSN; n++) { | |||
| expectedNodeHash = expectedDataNodes.get(n); | |||
| reallyDataNode = r1_mkt.getData(n); | |||
| assertEquals(expectedNodeHash, reallyDataNode.getNodeHash()); | |||
| proof = r1_mkt.getProof(n); | |||
| assertNotNull(proof); | |||
| assertEquals(expectedNodeHash, proof.getHash(0)); | |||
| } | |||
| } | |||
| } | |||
| // 覆盖到每一路分支修改数据节点; | |||
| @@ -621,7 +642,7 @@ public class MerkleTreeTest { | |||
| rand.nextBytes(dataBuf); | |||
| sn = i; | |||
| nd = mkt.setData(sn, "KEY-" + sn, 0, dataBuf); | |||
| dataNodes.put(sn, nd.getNodeHash()); | |||
| expectedDataNodes.put(sn, nd.getNodeHash()); | |||
| } | |||
| mkt.commit(); | |||
| @@ -658,16 +679,18 @@ public class MerkleTreeTest { | |||
| rand.nextBytes(dataBuf); | |||
| sn = maxSN + 1 + i; | |||
| nd = mkt.setData(sn, "KEY-" + sn, 0, dataBuf); | |||
| dataNodes.put(sn, nd.getNodeHash()); | |||
| expectedDataNodes.put(sn, nd.getNodeHash()); | |||
| } | |||
| mkt.commit(); | |||
| // 验证每一个数据节点都产生了存在性证明; | |||
| MerkleProof proof = null; | |||
| for (Long n : dataNodes.keySet()) { | |||
| proof = mkt.getProof(n.longValue()); | |||
| assertNotNull(proof); | |||
| assertEquals(dataNodes.get(n), proof.getHash(0)); | |||
| { | |||
| // 验证每一个数据节点都产生了存在性证明; | |||
| MerkleProof proof = null; | |||
| for (Long n : expectedDataNodes.keySet()) { | |||
| proof = mkt.getProof(n.longValue()); | |||
| assertNotNull(proof); | |||
| assertEquals(expectedDataNodes.get(n), proof.getHash(0)); | |||
| } | |||
| } | |||
| // 记录一次提交的根哈希以及部分节点信息,用于后续的加载校验; | |||
| @@ -700,6 +723,7 @@ public class MerkleTreeTest { | |||
| assertEquals(r1_proof1, r1_mkt.getProof(r1_sn1).toString()); | |||
| assertEquals(r1_proof2, r1_mkt.getProof(r1_sn2).toString()); | |||
| // 从第 2 轮提交的 Merkle 根哈希加载; | |||
| // 第 2 轮生成的 Merkle 树是对第 1 轮的数据的全部节点的修改,因此同一个 SN 的节点的证明是不同的; | |||
| MerkleTree r2_mkt = new MerkleTree(r2_rootHash, setting, keyPrefix, storage, true); | |||
| @@ -730,13 +754,16 @@ public class MerkleTreeTest { | |||
| assertEquals(r3_proof3, r3_mkt.getProof(r3_sn3).toString()); | |||
| // 验证每一个数据节点都产生了存在性证明; | |||
| for (Long n : dataNodes.keySet()) { | |||
| proof = r3_mkt.getProof(n.longValue()); | |||
| assertNotNull(proof); | |||
| assertEquals(dataNodes.get(n), proof.getHash(0)); | |||
| { | |||
| MerkleProof proof = null; | |||
| for (Long n : expectedDataNodes.keySet()) { | |||
| proof = r3_mkt.getProof(n.longValue()); | |||
| assertNotNull(proof); | |||
| assertEquals(expectedDataNodes.get(n), proof.getHash(0)); | |||
| } | |||
| } | |||
| } | |||
| @SuppressWarnings("unused") | |||
| private static int getLevel(long dataCount) { | |||
| if (dataCount < 0) { | |||
| @@ -772,8 +799,7 @@ public class MerkleTreeTest { | |||
| * 注:此方法不处理溢出;调用者需要自行规避; | |||
| * | |||
| * @param value | |||
| * @param x | |||
| * 大于等于 0 的整数; | |||
| * @param x 大于等于 0 的整数; | |||
| * @return | |||
| */ | |||
| private static long power(long value, int x) { | |||
| @@ -1,5 +1,6 @@ | |||
| package com.jd.blockchain.ledger; | |||
| import com.jd.blockchain.crypto.HashDigest; | |||
| import com.jd.blockchain.utils.Bytes; | |||
| public interface MerkleDataNode extends MerkleNode { | |||
| @@ -9,5 +10,7 @@ public interface MerkleDataNode extends MerkleNode { | |||
| Bytes getKey(); | |||
| long getVersion(); | |||
| HashDigest getValueHash(); | |||
| } | |||