Skip to content

Commit

Permalink
0.6.0 release
Browse files Browse the repository at this point in the history
  • Loading branch information
isaacJChen committed Feb 11, 2020
1 parent 81193fd commit 0cb46ad
Show file tree
Hide file tree
Showing 39 changed files with 2,814 additions and 2 deletions.
112 changes: 112 additions & 0 deletions lib/core/versions/0.6.0/AnchorFile.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import AnchorFileModel from './models/AnchorFileModel';
import Compressor from './util/Compressor';
import Encoder from './Encoder';
import ErrorCode from './ErrorCode';
import JsonAsync from './util/JsonAsync';
import Multihash from './Multihash';
import ProtocolParameters from './ProtocolParameters';
import SidetreeError from '../../SidetreeError';

/**
* Class containing Anchor File related operations.
*/
export default class AnchorFile {
/**
* Parses and validates the given anchor file buffer.
* @throws `SidetreeError` if failed parsing or validation.
*/
public static async parseAndValidate (anchorFileBuffer: Buffer): Promise<AnchorFileModel> {

let anchorFileDecompressedBuffer;
try {
anchorFileDecompressedBuffer = await Compressor.decompress(anchorFileBuffer);
} catch (e) {
throw SidetreeError.createFromError(ErrorCode.AnchorFileDecompressionFailure, e);
}

let anchorFile;
try {
anchorFile = await JsonAsync.parse(anchorFileDecompressedBuffer);
} catch (e) {
throw SidetreeError.createFromError(ErrorCode.AnchorFileNotJson, e);
}

const anchorFileProperties = Object.keys(anchorFile);
if (anchorFileProperties.length > 2) {
throw new SidetreeError(ErrorCode.AnchorFileHasUnknownProperty);
}

if (!anchorFile.hasOwnProperty('mapFileHash')) {
throw new SidetreeError(ErrorCode.AnchorFileMapFileHashMissing);
}

if (!anchorFile.hasOwnProperty('didUniqueSuffixes')) {
throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixesMissing);
}

// Map file hash validations.
if (typeof anchorFile.mapFileHash !== 'string') {
throw new SidetreeError(ErrorCode.AnchorFileMapFileHashNotString);
}

const didUniqueSuffixBuffer = Encoder.decodeAsBuffer(anchorFile.mapFileHash);
if (!Multihash.isComputedUsingHashAlgorithm(didUniqueSuffixBuffer, ProtocolParameters.hashAlgorithmInMultihashCode)) {
throw new SidetreeError(ErrorCode.AnchorFileMapFileHashUnsupported, `Map file hash '${anchorFile.mapFileHash}' is unsupported.`);
}

// DID Unique Suffixes validations.
if (!Array.isArray(anchorFile.didUniqueSuffixes)) {
throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixesNotArray);
}

if (anchorFile.didUniqueSuffixes.length > ProtocolParameters.maxOperationsPerBatch) {
throw new SidetreeError(ErrorCode.AnchorFileExceededMaxOperationCount);
}

if (this.hasDuplicates(anchorFile.didUniqueSuffixes)) {
throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixesHasDuplicates);
}

// Verify each entry in DID unique suffixes.
for (let uniqueSuffix of anchorFile.didUniqueSuffixes) {
if (typeof uniqueSuffix !== 'string') {
throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixEntryNotString);
}

const maxEncodedHashStringLength = ProtocolParameters.maxEncodedHashStringLength;
if (uniqueSuffix.length > maxEncodedHashStringLength) {
throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixTooLong, `Unique suffix '${uniqueSuffix}' exceeds length of ${maxEncodedHashStringLength}.`);
}
}

return anchorFile;
}

/**
* Checkes to see if there are duplicates in the given array.
*/
public static hasDuplicates<T> (array: Array<T>): boolean {
const uniqueValues = new Set<T>();

for (let i = 0; i < array.length; i++) {
const value = array[i];
if (uniqueValues.has(value)) {
return true;
}
uniqueValues.add(value);
}

return false;
}

/**
* Creates a buffer from the input so that the buffer can be persisted.
*/
public static async createBufferFromAnchorFileModel (anchorFileModel: AnchorFileModel): Promise<Buffer> {

const anchorFileJson = JSON.stringify(anchorFileModel);
const anchorFileBuffer = Buffer.from(anchorFileJson);

return Compressor.compress(anchorFileBuffer);
}
}
86 changes: 86 additions & 0 deletions lib/core/versions/0.6.0/AnchoredDataSerializer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import AnchoredData from './models/AnchoredData';
import Encoder from './Encoder';
import ErrorCode from './ErrorCode';
import SidetreeError from '../../SidetreeError';

/**
* Encapsulates functionality to serialize/deserialize data that read/write to
* the blockchain.
*/
export default class AnchoredDataSerializer {

private static readonly delimiter = '.';
private static readonly maxUnsignedIntegerValue = 0xFFFFFFFF;

/**
* Converts the given inputs to the string that is to be written to the blockchain.
*
* @param dataToBeAnchored The data to serialize.
*/
public static serialize (dataToBeAnchored: AnchoredData): string {

// First convert the number of operations input into a 4-byte buffer and then base64 encode it
const numberAsBuffer = AnchoredDataSerializer.convertNumberToBuffer(dataToBeAnchored.numberOfOperations);
const encodedNumberOfOperations = Encoder.encode(numberAsBuffer);

// Concatenate the inputs w/ the delimiter and return
return `${encodedNumberOfOperations}${AnchoredDataSerializer.delimiter}${dataToBeAnchored.anchorFileHash}`;
}

/**
* Deserializes the given string that is read from the blockchain into data.
*
* @param serializedData The data to be deserialized.
*/
public static deserialize (serializedData: string): AnchoredData {

const splitData = serializedData.split(AnchoredDataSerializer.delimiter);

if (splitData.length !== 2) {
throw new SidetreeError(ErrorCode.AnchoredDataIncorrectFormat, `Input is not in correct format: ${serializedData}`);
}

const decodedNumberOfOperations = Encoder.decodeAsBuffer(splitData[0]);
const numberOfOperations = AnchoredDataSerializer.convertBufferToNumber(decodedNumberOfOperations);

return {
anchorFileHash: splitData[1],
numberOfOperations: numberOfOperations
};
}

private static convertNumberToBuffer (numberOfOperations: number): Buffer {

if (!Number.isInteger(numberOfOperations)) {
throw new SidetreeError(ErrorCode.AnchoredDataNumberOfOperationsNotInteger, `Number of operations ${numberOfOperations} must be an integer.`);
}

if (numberOfOperations < 0) {
throw new SidetreeError(ErrorCode.AnchoredDataNumberOfOperationsLessThanZero, `Number of operations ${numberOfOperations} must be greater than 0`);
}

if (numberOfOperations > this.maxUnsignedIntegerValue) {
// We are only using 4 bytes to store the number of operations so any number greater than
// that is not allowed.
throw new SidetreeError(ErrorCode.AnchoredDataNumberOfOperationsGreaterThanMax,
`Number of operations ${numberOfOperations} must be less than equal to ${this.maxUnsignedIntegerValue}`);
}

// First write the input into a 4 bytes buffer. Little Endian format.
const byteArrayBuffer = Buffer.alloc(4);
byteArrayBuffer.writeUInt32LE(numberOfOperations, 0);

return byteArrayBuffer;
}

private static convertBufferToNumber (bytesBuffer: Buffer): number {

// Ensure that the input has 4 bytes
if (bytesBuffer.length !== 4) {
throw new SidetreeError(ErrorCode.AnchoredDataNumberOfOperationsNotFourBytes,
`Input has ${bytesBuffer.length} bytes.`);
}

return bytesBuffer.readUInt32LE(0);
}
}
35 changes: 35 additions & 0 deletions lib/core/versions/0.6.0/AnchoredOperation.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import AnchoredOperationModel from '../../models/AnchoredOperationModel';
import NamedAnchoredOperationModel from '../../models/NamedAnchoredOperationModel';
import Operation from './Operation';

/**
* A class that represents an anchored Sidetree operation.
*/
export default class AnchoredOperation extends Operation implements NamedAnchoredOperationModel {
/** The index this operation was assigned to in the batch. */
public readonly operationIndex: number;
/** The transaction number of the transaction this operation was batched within. */
public readonly transactionNumber: number;
/** The logical blockchain time that this opeartion was anchored on the blockchain */
public readonly transactionTime: number;

/**
* Constructs an anchored peration if the operation buffer passes schema validation, throws error otherwise.
*/
private constructor (anchoredOperationModel: AnchoredOperationModel) {
super(anchoredOperationModel.operationBuffer);

// Properties of an operation in a resolved transaction.
this.operationIndex = anchoredOperationModel.operationIndex;
this.transactionNumber = anchoredOperationModel.transactionNumber;
this.transactionTime = anchoredOperationModel.transactionTime;
}

/**
* Validates and creates an anchored operation that has been anchored on the blockchain.
* @throws Error if given operation buffer fails any validation.
*/
public static createAnchoredOperation (anchoredOperationModel: AnchoredOperationModel): AnchoredOperation {
return new AnchoredOperation(anchoredOperationModel);
}
}
129 changes: 129 additions & 0 deletions lib/core/versions/0.6.0/BatchFile.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
import AnchoredOperation from './AnchoredOperation';
import AnchoredOperationModel from '../../models/AnchoredOperationModel';
import AnchorFileModel from './models/AnchorFileModel';
import BatchFileModel from './models/BatchFileModel';
import Compressor from './util/Compressor';
import Encoder from './Encoder';
import ErrorCode from './ErrorCode';
import JsonAsync from './util/JsonAsync';
import NamedAnchoredOperationModel from '../../models/NamedAnchoredOperationModel';
import ProtocolParameters from './ProtocolParameters';
import SidetreeError from '../../SidetreeError';
import timeSpan = require('time-span');

/**
* Defines the schema of a Batch File and its related operations.
* NOTE: Must NOT add properties not defined by Sidetree protocol.
*/
export default class BatchFile {
/**
* Parses and validates the given batch file buffer and all the operations within it.
* @throws SidetreeError if failed parsing or validation.
*/
public static async parseAndValidate (
batchFileBuffer: Buffer,
anchorFile: AnchorFileModel,
transactionNumber: number,
transactionTime: number
): Promise<NamedAnchoredOperationModel[]> {

let endTimer = timeSpan();
const decompressedBatchFileBuffer = await Compressor.decompress(batchFileBuffer);
const batchFileObject = await JsonAsync.parse(decompressedBatchFileBuffer);
console.info(`Parsed batch file in ${endTimer.rounded()} ms.`);

// Ensure only properties specified by Sidetree protocol are given.
const allowedProperties = new Set(['operations']);
for (let property in batchFileObject) {
if (!allowedProperties.has(property)) {
throw new SidetreeError(ErrorCode.BatchFileUnexpectedProperty, `Unexpected property ${property} in batch file.`);
}
}

// Make sure operations is an array.
if (!(batchFileObject.operations instanceof Array)) {
throw new SidetreeError(ErrorCode.BatchFileOperationsPropertyNotArray, 'Invalid batch file, operations property is not an array.');
}

// Make sure all operations are strings.
batchFileObject.operations.forEach((operation: any) => {
if (typeof operation !== 'string') {
throw new SidetreeError(ErrorCode.BatchFileOperationsNotArrayOfStrings, 'Invalid batch file, operations property is not an array of strings.');
}
});

const batchFile = batchFileObject as BatchFileModel;
const batchSize = batchFile.operations.length;

// Verify the number of operations does not exceed the maximum allowed limit.
if (batchSize > ProtocolParameters.maxOperationsPerBatch) {
throw new SidetreeError(
ErrorCode.BatchFileOperationCountExceedsLimit,
`Batch size of ${batchSize} operations exceeds the allowed limit of ${ProtocolParameters.maxOperationsPerBatch}.`
);
}

// Verify that the batch size count matches that of the anchor file.
const operationCountInAnchorFile = anchorFile.didUniqueSuffixes.length;
if (batchSize !== operationCountInAnchorFile) {
throw new SidetreeError(
ErrorCode.BatchFileOperationCountMismatch,
`Batch size of ${batchSize} in batch file does not match of size of ${operationCountInAnchorFile} in anchor file.`
);
}

endTimer = timeSpan();
const namedAnchoredOperationModels: NamedAnchoredOperationModel[] = [];

for (let operationIndex = 0; operationIndex < batchSize; operationIndex++) {
const encodedOperation = batchFile.operations[operationIndex];
const operationBuffer = Encoder.decodeAsBuffer(encodedOperation);

// Verify size of each operation does not exceed the maximum allowed limit.
if (operationBuffer.length > ProtocolParameters.maxOperationByteSize) {
throw new SidetreeError(
ErrorCode.BatchFileOperationSizeExceedsLimit,
`Operation size of ${operationBuffer.length} bytes exceeds the allowed limit of ${ProtocolParameters.maxOperationByteSize} bytes.`
);
}

const anchoredOperationModel: AnchoredOperationModel = {
operationBuffer,
operationIndex,
transactionNumber,
transactionTime
};

const operation = AnchoredOperation.createAnchoredOperation(anchoredOperationModel);

const didUniqueSuffixesInAnchorFile = anchorFile.didUniqueSuffixes[operationIndex];
if (operation.didUniqueSuffix !== didUniqueSuffixesInAnchorFile) {
throw new SidetreeError(
ErrorCode.BatchFileOperationMismatch,
`Operation ${operationIndex}'s DID unique suffix '${operation.didUniqueSuffix}' ` +
`is not the same as '${didUniqueSuffixesInAnchorFile}' seen in anchor file.`);
}

namedAnchoredOperationModels.push(operation);
}
console.info(`Decoded ${batchSize} operations in batch file. Time taken: ${endTimer.rounded()} ms.`);

return namedAnchoredOperationModels;
}

/**
* Creates the Batch File buffer from an array of operation Buffers.
* @param operationBuffers Operation buffers in JSON serialized form, NOT encoded in anyway.
* @returns The Batch File buffer.
*/
public static async fromOperationBuffers (operationBuffers: Buffer[]): Promise<Buffer> {
const operations = operationBuffers.map((operation) => {
return Encoder.encode(operation);
});

const rawData = JSON.stringify({ operations });
const compressedRawData = await Compressor.compress(Buffer.from(rawData));

return compressedRawData;
}
}

0 comments on commit 0cb46ad

Please sign in to comment.