Skip to content

Commit

Permalink
v0.7.0
Browse files Browse the repository at this point in the history
  • Loading branch information
thehenrytsai committed May 6, 2020
1 parent a0377c5 commit e5fd80d
Show file tree
Hide file tree
Showing 47 changed files with 4,076 additions and 2 deletions.
210 changes: 210 additions & 0 deletions lib/core/versions/0.7.0/AnchorFile.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,210 @@
import AnchorFileModel from './models/AnchorFileModel';
import ArrayMethods from './util/ArrayMethods';
import Compressor from './util/Compressor';
import CreateOperation from './CreateOperation';
import DeactivateOperation from './DeactivateOperation';
import Encoder from './Encoder';
import ErrorCode from './ErrorCode';
import JsonAsync from './util/JsonAsync';
import Multihash from './Multihash';
import ProtocolParameters from './ProtocolParameters';
import RecoverOperation from './RecoverOperation';
import SidetreeError from '../../../common/SidetreeError';

/**
* Class containing Anchor File related operations.
*/
export default class AnchorFile {

/**
* Class that represents an anchor file.
* NOTE: this class is introduced as an internal structure in replacement to `AnchorFileModel`
* to keep useful metadata so that repeated computation can be avoided.
*/
private constructor (
public readonly model: AnchorFileModel,
public readonly didUniqueSuffixes: string[],
public readonly createOperations: CreateOperation[],
public readonly recoverOperations: RecoverOperation[],
public readonly deactivateOperations: DeactivateOperation[]) { }

/**
* Parses and validates the given anchor file buffer.
* @throws `SidetreeError` if failed parsing or validation.
*/
public static async parse (anchorFileBuffer: Buffer): Promise<AnchorFile> {

let anchorFileDecompressedBuffer;
try {
anchorFileDecompressedBuffer = await Compressor.decompress(anchorFileBuffer);
} catch (e) {
throw SidetreeError.createFromError(ErrorCode.AnchorFileDecompressionFailure, e);
}

let anchorFileModel;
try {
anchorFileModel = await JsonAsync.parse(anchorFileDecompressedBuffer);
} catch (e) {
throw SidetreeError.createFromError(ErrorCode.AnchorFileNotJson, e);
}

const allowedProperties = new Set(['map_file_uri', 'operations', 'writer_lock_id']);
for (let property in anchorFileModel) {
if (!allowedProperties.has(property)) {
throw new SidetreeError(ErrorCode.AnchorFileHasUnknownProperty);
}
}

if (!anchorFileModel.hasOwnProperty('map_file_uri')) {
throw new SidetreeError(ErrorCode.AnchorFileMapFileHashMissing);
}

if (!anchorFileModel.hasOwnProperty('operations')) {
throw new SidetreeError(ErrorCode.AnchorFileMissingOperationsProperty);
}

if (anchorFileModel.hasOwnProperty('writer_lock_id') &&
typeof anchorFileModel.writer_lock_id !== 'string') {
throw new SidetreeError(ErrorCode.AnchorFileWriterLockIPropertyNotString);
}

// Map file hash validations.
const mapFileUri = anchorFileModel.map_file_uri;
if (typeof mapFileUri !== 'string') {
throw new SidetreeError(ErrorCode.AnchorFileMapFileHashNotString);
}

const mapFileUriAsHashBuffer = Encoder.decodeAsBuffer(mapFileUri);
if (!Multihash.isComputedUsingHashAlgorithm(mapFileUriAsHashBuffer, ProtocolParameters.hashAlgorithmInMultihashCode)) {
throw new SidetreeError(ErrorCode.AnchorFileMapFileHashUnsupported, `Map file hash '${mapFileUri}' is unsupported.`);
}

// `operations` validations.

const allowedOperationsProperties = new Set(['create', 'recover', 'deactivate']);
const operations = anchorFileModel.operations;
for (let property in operations) {
if (!allowedOperationsProperties.has(property)) {
throw new SidetreeError(ErrorCode.AnchorFileUnexpectedPropertyInOperations, `Unexpected property ${property} in 'operations' property in anchor file.`);
}
}

// Will be populated for later validity check.
const didUniqueSuffixes: string[] = [];

// Validate `create` if exists.
const createOperations: CreateOperation[] = [];
if (operations.create !== undefined) {
if (!Array.isArray(operations.create)) {
throw new SidetreeError(ErrorCode.AnchorFileCreatePropertyNotArray);
}

// Validate every create operation.
for (const operation of operations.create) {
const createOperation = await CreateOperation.parseOperationFromAnchorFile(operation);
createOperations.push(createOperation);
didUniqueSuffixes.push(createOperation.didUniqueSuffix);
}
}

// Validate `recover` if exists.
const recoverOperations: RecoverOperation[] = [];
if (operations.recover !== undefined) {
if (!Array.isArray(operations.recover)) {
throw new SidetreeError(ErrorCode.AnchorFileRecoverPropertyNotArray);
}

// Validate every recover operation.
for (const operation of operations.recover) {
const recoverOperation = await RecoverOperation.parseOperationFromAnchorFile(operation);
recoverOperations.push(recoverOperation);
didUniqueSuffixes.push(recoverOperation.didUniqueSuffix);
}
}

// Validate `deactivate` if exists.
const deactivateOperations: DeactivateOperation[] = [];
if (operations.deactivate !== undefined) {
if (!Array.isArray(operations.deactivate)) {
throw new SidetreeError(ErrorCode.AnchorFileDeactivatePropertyNotArray);
}

// Validate every operation.
for (const operation of operations.deactivate) {
const deactivateOperation = await DeactivateOperation.parseOperationFromAnchorFile(operation);
deactivateOperations.push(deactivateOperation);
didUniqueSuffixes.push(deactivateOperation.didUniqueSuffix);
}
}

if (ArrayMethods.hasDuplicates(didUniqueSuffixes)) {
throw new SidetreeError(ErrorCode.AnchorFileMultipleOperationsForTheSameDid);
}

const anchorFile = new AnchorFile(anchorFileModel, didUniqueSuffixes, createOperations, recoverOperations, deactivateOperations);
return anchorFile;
}

/**
* Creates an `AnchorFileModel`.
*/
public static async createModel (
writerLockId: string | undefined,
mapFileHash: string,
createOperationArray: CreateOperation[],
recoverOperationArray: RecoverOperation[],
deactivateOperationArray: DeactivateOperation[]
): Promise<AnchorFileModel> {

const createOperations = createOperationArray.map(operation => {
return {
suffix_data: operation.encodedSuffixData
};
});

const recoverOperations = recoverOperationArray.map(operation => {
return {
did_suffix: operation.didUniqueSuffix,
recovery_reveal_value: operation.recoveryRevealValue,
signed_data: operation.signedDataJws.toCompactJws()
};
});

const deactivateOperations = deactivateOperationArray.map(operation => {
return {
did_suffix: operation.didUniqueSuffix,
recovery_reveal_value: operation.recoveryRevealValue,
signed_data: operation.signedDataJws.toCompactJws()
};
});

const anchorFileModel = {
writer_lock_id: writerLockId,
map_file_uri: mapFileHash,
operations: {
create: createOperations,
recover: recoverOperations,
deactivate: deactivateOperations
}
};

return anchorFileModel;
}

/**
* Creates an anchor file buffer.
*/
public static async createBuffer (
writerLockId: string | undefined,
mapFileHash: string,
createOperations: CreateOperation[],
recoverOperations: RecoverOperation[],
deactivateOperations: DeactivateOperation[]
): Promise<Buffer> {
const anchorFileModel = await AnchorFile.createModel(writerLockId, mapFileHash, createOperations, recoverOperations, deactivateOperations);
const anchorFileJson = JSON.stringify(anchorFileModel);
const anchorFileBuffer = Buffer.from(anchorFileJson);

return Compressor.compress(anchorFileBuffer);
}
}
86 changes: 86 additions & 0 deletions lib/core/versions/0.7.0/AnchoredDataSerializer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import AnchoredData from './models/AnchoredData';
import Encoder from './Encoder';
import ErrorCode from './ErrorCode';
import SidetreeError from '../../../common/SidetreeError';

/**
* Encapsulates functionality to serialize/deserialize data that read/write to
* the blockchain.
*/
export default class AnchoredDataSerializer {

private static readonly delimiter = '.';
private static readonly maxUnsignedIntegerValue = 0xFFFFFFFF;

/**
* Converts the given inputs to the string that is to be written to the blockchain.
*
* @param dataToBeAnchored The data to serialize.
*/
public static serialize (dataToBeAnchored: AnchoredData): string {

// First convert the number of operations input into a 4-byte buffer and then base64 encode it
const numberAsBuffer = AnchoredDataSerializer.convertNumberToBuffer(dataToBeAnchored.numberOfOperations);
const encodedNumberOfOperations = Encoder.encode(numberAsBuffer);

// Concatenate the inputs w/ the delimiter and return
return `${encodedNumberOfOperations}${AnchoredDataSerializer.delimiter}${dataToBeAnchored.anchorFileHash}`;
}

/**
* Deserializes the given string that is read from the blockchain into data.
*
* @param serializedData The data to be deserialized.
*/
public static deserialize (serializedData: string): AnchoredData {

const splitData = serializedData.split(AnchoredDataSerializer.delimiter);

if (splitData.length !== 2) {
throw new SidetreeError(ErrorCode.AnchoredDataIncorrectFormat, `Input is not in correct format: ${serializedData}`);
}

const decodedNumberOfOperations = Encoder.decodeAsBuffer(splitData[0]);
const numberOfOperations = AnchoredDataSerializer.convertBufferToNumber(decodedNumberOfOperations);

return {
anchorFileHash: splitData[1],
numberOfOperations: numberOfOperations
};
}

private static convertNumberToBuffer (numberOfOperations: number): Buffer {

if (!Number.isInteger(numberOfOperations)) {
throw new SidetreeError(ErrorCode.AnchoredDataNumberOfOperationsNotInteger, `Number of operations ${numberOfOperations} must be an integer.`);
}

if (numberOfOperations < 0) {
throw new SidetreeError(ErrorCode.AnchoredDataNumberOfOperationsLessThanZero, `Number of operations ${numberOfOperations} must be greater than 0`);
}

if (numberOfOperations > this.maxUnsignedIntegerValue) {
// We are only using 4 bytes to store the number of operations so any number greater than
// that is not allowed.
throw new SidetreeError(ErrorCode.AnchoredDataNumberOfOperationsGreaterThanMax,
`Number of operations ${numberOfOperations} must be less than equal to ${this.maxUnsignedIntegerValue}`);
}

// First write the input into a 4 bytes buffer. Little Endian format.
const byteArrayBuffer = Buffer.alloc(4);
byteArrayBuffer.writeUInt32LE(numberOfOperations, 0);

return byteArrayBuffer;
}

private static convertBufferToNumber (bytesBuffer: Buffer): number {

// Ensure that the input has 4 bytes
if (bytesBuffer.length !== 4) {
throw new SidetreeError(ErrorCode.AnchoredDataNumberOfOperationsNotFourBytes,
`Input has ${bytesBuffer.length} bytes.`);
}

return bytesBuffer.readUInt32LE(0);
}
}
98 changes: 98 additions & 0 deletions lib/core/versions/0.7.0/BatchWriter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import AnchoredData from './models/AnchoredData';
import AnchoredDataSerializer from './AnchoredDataSerializer';
import AnchorFile from './AnchorFile';
import ChunkFile from './ChunkFile';
import CreateOperation from './CreateOperation';
import DeactivateOperation from './DeactivateOperation';
import FeeManager from './FeeManager';
import ICas from '../../interfaces/ICas';
import IBatchWriter from '../../interfaces/IBatchWriter';
import IBlockchain from '../../interfaces/IBlockchain';
import IOperationQueue from './interfaces/IOperationQueue';
import MapFile from './MapFile';
import Operation from './Operation';
import OperationType from '../../enums/OperationType';
import ProtocolParameters from './ProtocolParameters';
import RecoverOperation from './RecoverOperation';
import UpdateOperation from './UpdateOperation';
import ValueTimeLockModel from '../../../common/models/ValueTimeLockModel';
import ValueTimeLockVerifier from './ValueTimeLockVerifier';

/**
* Implementation of the `IBatchWriter`.
*/
export default class BatchWriter implements IBatchWriter {
public constructor (
private operationQueue: IOperationQueue,
private blockchain: IBlockchain,
private cas: ICas) { }

public async write () {
const normalizedFee = await this.blockchain.getFee(this.blockchain.approximateTime.time);
const currentLock = await this.blockchain.getWriterValueTimeLock();
const numberOfOpsAllowed = this.getNumberOfOperationsToWrite(currentLock, normalizedFee);

// Get the batch of operations to be anchored on the blockchain.
const queuedOperations = await this.operationQueue.peek(numberOfOpsAllowed);
const numberOfOperations = queuedOperations.length;

console.info(`Batch size = ${numberOfOperations}`);

// Do nothing if there is nothing to batch together.
if (queuedOperations.length === 0) {
return;
}

const operationModels = await Promise.all(queuedOperations.map(async (queuedOperation) => Operation.parse(queuedOperation.operationBuffer)));
const createOperations = operationModels.filter(operation => operation.type === OperationType.Create) as CreateOperation[];
const recoverOperations = operationModels.filter(operation => operation.type === OperationType.Recover) as RecoverOperation[];
const updateOperations = operationModels.filter(operation => operation.type === OperationType.Update) as UpdateOperation[];
const deactivateOperations = operationModels.filter(operation => operation.type === OperationType.Deactivate) as DeactivateOperation[];

// Create the chunk file buffer from the operation models.
// NOTE: deactivate operations don't have delta.
const chunkFileBuffer = await ChunkFile.createBuffer(createOperations, recoverOperations, updateOperations);

// Write the chunk file to content addressable store.
const chunkFileHash = await this.cas.write(chunkFileBuffer);
console.info(`Wrote chunk file ${chunkFileHash} to content addressable store.`);

// Write the map file to content addressable store.
const mapFileBuffer = await MapFile.createBuffer(chunkFileHash, updateOperations);
const mapFileHash = await this.cas.write(mapFileBuffer);
console.info(`Wrote map file ${mapFileHash} to content addressable store.`);

// Write the anchor file to content addressable store.
const writerLock = currentLock ? currentLock.identifier : undefined;
const anchorFileBuffer = await AnchorFile.createBuffer(writerLock, mapFileHash, createOperations, recoverOperations, deactivateOperations);
const anchorFileHash = await this.cas.write(anchorFileBuffer);
console.info(`Wrote anchor file ${anchorFileHash} to content addressable store.`);

// Anchor the data to the blockchain
const dataToBeAnchored: AnchoredData = {
anchorFileHash,
numberOfOperations
};

const stringToWriteToBlockchain = AnchoredDataSerializer.serialize(dataToBeAnchored);
const fee = FeeManager.computeMinimumTransactionFee(normalizedFee, numberOfOperations);
console.info(`Writing data to blockchain: ${stringToWriteToBlockchain} with minimum fee of: ${fee}`);

await this.blockchain.write(stringToWriteToBlockchain, fee);

// Remove written operations from queue after batch writing has completed successfully.
await this.operationQueue.dequeue(queuedOperations.length);
}

private getNumberOfOperationsToWrite (valueTimeLock: ValueTimeLockModel | undefined, normalizedFee: number): number {
const maxNumberOfOpsAllowedByProtocol = ProtocolParameters.maxOperationsPerBatch;
const maxNumberOfOpsAllowedByLock = ValueTimeLockVerifier.calculateMaxNumberOfOperationsAllowed(valueTimeLock, normalizedFee);

if (maxNumberOfOpsAllowedByLock > maxNumberOfOpsAllowedByProtocol) {
// tslint:disable-next-line: max-line-length
console.info(`Maximum number of operations allowed by value time lock: ${maxNumberOfOpsAllowedByLock}; Maximum number of operations allowed by protocol: ${maxNumberOfOpsAllowedByProtocol}`);
}

return Math.min(maxNumberOfOpsAllowedByLock, maxNumberOfOpsAllowedByProtocol);
}
}

0 comments on commit e5fd80d

Please sign in to comment.