Skip to content

Commit

Permalink
v0.8.0
Browse files Browse the repository at this point in the history
  • Loading branch information
thehenrytsai committed Jun 3, 2020
1 parent e3b10c5 commit f6562a2
Show file tree
Hide file tree
Showing 49 changed files with 4,135 additions and 6 deletions.
208 changes: 208 additions & 0 deletions lib/core/versions/0.8.0/AnchorFile.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,208 @@
import AnchorFileModel from './models/AnchorFileModel';
import ArrayMethods from './util/ArrayMethods';
import Compressor from './util/Compressor';
import CreateOperation from './CreateOperation';
import DeactivateOperation from './DeactivateOperation';
import Encoder from './Encoder';
import ErrorCode from './ErrorCode';
import JsonAsync from './util/JsonAsync';
import Multihash from './Multihash';
import ProtocolParameters from './ProtocolParameters';
import RecoverOperation from './RecoverOperation';
import SidetreeError from '../../../common/SidetreeError';

/**
* Class containing Anchor File related operations.
*/
export default class AnchorFile {

/**
* Class that represents an anchor file.
* NOTE: this class is introduced as an internal structure in replacement to `AnchorFileModel`
* to keep useful metadata so that repeated computation can be avoided.
*/
private constructor (
public readonly model: AnchorFileModel,
public readonly didUniqueSuffixes: string[],
public readonly createOperations: CreateOperation[],
public readonly recoverOperations: RecoverOperation[],
public readonly deactivateOperations: DeactivateOperation[]) { }

/**
* Parses and validates the given anchor file buffer.
* @throws `SidetreeError` if failed parsing or validation.
*/
public static async parse (anchorFileBuffer: Buffer): Promise<AnchorFile> {

let anchorFileDecompressedBuffer;
try {
anchorFileDecompressedBuffer = await Compressor.decompress(anchorFileBuffer);
} catch (e) {
throw SidetreeError.createFromError(ErrorCode.AnchorFileDecompressionFailure, e);
}

let anchorFileModel;
try {
anchorFileModel = await JsonAsync.parse(anchorFileDecompressedBuffer);
} catch (e) {
throw SidetreeError.createFromError(ErrorCode.AnchorFileNotJson, e);
}

const allowedProperties = new Set(['map_file_uri', 'operations', 'writer_lock_id']);
for (let property in anchorFileModel) {
if (!allowedProperties.has(property)) {
throw new SidetreeError(ErrorCode.AnchorFileHasUnknownProperty);
}
}

if (!anchorFileModel.hasOwnProperty('map_file_uri')) {
throw new SidetreeError(ErrorCode.AnchorFileMapFileHashMissing);
}

if (!anchorFileModel.hasOwnProperty('operations')) {
throw new SidetreeError(ErrorCode.AnchorFileMissingOperationsProperty);
}

if (anchorFileModel.hasOwnProperty('writer_lock_id') &&
typeof anchorFileModel.writer_lock_id !== 'string') {
throw new SidetreeError(ErrorCode.AnchorFileWriterLockIPropertyNotString);
}

// Map file hash validations.
const mapFileUri = anchorFileModel.map_file_uri;
if (typeof mapFileUri !== 'string') {
throw new SidetreeError(ErrorCode.AnchorFileMapFileHashNotString);
}

const mapFileUriAsHashBuffer = Encoder.decodeAsBuffer(mapFileUri);
if (!Multihash.isComputedUsingHashAlgorithm(mapFileUriAsHashBuffer, ProtocolParameters.hashAlgorithmInMultihashCode)) {
throw new SidetreeError(ErrorCode.AnchorFileMapFileHashUnsupported, `Map file hash '${mapFileUri}' is unsupported.`);
}

// `operations` validations.

const allowedOperationsProperties = new Set(['create', 'recover', 'deactivate']);
const operations = anchorFileModel.operations;
for (let property in operations) {
if (!allowedOperationsProperties.has(property)) {
throw new SidetreeError(ErrorCode.AnchorFileUnexpectedPropertyInOperations, `Unexpected property ${property} in 'operations' property in anchor file.`);
}
}

// Will be populated for later validity check.
const didUniqueSuffixes: string[] = [];

// Validate `create` if exists.
const createOperations: CreateOperation[] = [];
if (operations.create !== undefined) {
if (!Array.isArray(operations.create)) {
throw new SidetreeError(ErrorCode.AnchorFileCreatePropertyNotArray);
}

// Validate every create operation.
for (const operation of operations.create) {
const createOperation = await CreateOperation.parseOperationFromAnchorFile(operation);
createOperations.push(createOperation);
didUniqueSuffixes.push(createOperation.didUniqueSuffix);
}
}

// Validate `recover` if exists.
const recoverOperations: RecoverOperation[] = [];
if (operations.recover !== undefined) {
if (!Array.isArray(operations.recover)) {
throw new SidetreeError(ErrorCode.AnchorFileRecoverPropertyNotArray);
}

// Validate every recover operation.
for (const operation of operations.recover) {
const recoverOperation = await RecoverOperation.parseOperationFromAnchorFile(operation);
recoverOperations.push(recoverOperation);
didUniqueSuffixes.push(recoverOperation.didUniqueSuffix);
}
}

// Validate `deactivate` if exists.
const deactivateOperations: DeactivateOperation[] = [];
if (operations.deactivate !== undefined) {
if (!Array.isArray(operations.deactivate)) {
throw new SidetreeError(ErrorCode.AnchorFileDeactivatePropertyNotArray);
}

// Validate every operation.
for (const operation of operations.deactivate) {
const deactivateOperation = await DeactivateOperation.parseOperationFromAnchorFile(operation);
deactivateOperations.push(deactivateOperation);
didUniqueSuffixes.push(deactivateOperation.didUniqueSuffix);
}
}

if (ArrayMethods.hasDuplicates(didUniqueSuffixes)) {
throw new SidetreeError(ErrorCode.AnchorFileMultipleOperationsForTheSameDid);
}

const anchorFile = new AnchorFile(anchorFileModel, didUniqueSuffixes, createOperations, recoverOperations, deactivateOperations);
return anchorFile;
}

/**
* Creates an `AnchorFileModel`.
*/
public static async createModel (
writerLockId: string | undefined,
mapFileHash: string,
createOperationArray: CreateOperation[],
recoverOperationArray: RecoverOperation[],
deactivateOperationArray: DeactivateOperation[]
): Promise<AnchorFileModel> {

const createOperations = createOperationArray.map(operation => {
return {
suffix_data: operation.encodedSuffixData
};
});

const recoverOperations = recoverOperationArray.map(operation => {
return {
did_suffix: operation.didUniqueSuffix,
signed_data: operation.signedDataJws.toCompactJws()
};
});

const deactivateOperations = deactivateOperationArray.map(operation => {
return {
did_suffix: operation.didUniqueSuffix,
signed_data: operation.signedDataJws.toCompactJws()
};
});

const anchorFileModel = {
writer_lock_id: writerLockId,
map_file_uri: mapFileHash,
operations: {
create: createOperations,
recover: recoverOperations,
deactivate: deactivateOperations
}
};

return anchorFileModel;
}

/**
* Creates an anchor file buffer.
*/
public static async createBuffer (
writerLockId: string | undefined,
mapFileHash: string,
createOperations: CreateOperation[],
recoverOperations: RecoverOperation[],
deactivateOperations: DeactivateOperation[]
): Promise<Buffer> {
const anchorFileModel = await AnchorFile.createModel(writerLockId, mapFileHash, createOperations, recoverOperations, deactivateOperations);
const anchorFileJson = JSON.stringify(anchorFileModel);
const anchorFileBuffer = Buffer.from(anchorFileJson);

return Compressor.compress(anchorFileBuffer);
}
}
71 changes: 71 additions & 0 deletions lib/core/versions/0.8.0/AnchoredDataSerializer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import AnchoredData from './models/AnchoredData';
import ErrorCode from './ErrorCode';
import ProtocolParameters from './ProtocolParameters';
import SidetreeError from '../../../common/SidetreeError';

/**
* Encapsulates functionality to serialize/deserialize data that read/write to
* the blockchain.
*/
export default class AnchoredDataSerializer {

/** Delimiter between logical parts in anchor string. */
public static readonly delimiter = '.';

/**
* Converts the given inputs to the string that is to be written to the blockchain.
*
* @param dataToBeAnchored The data to serialize.
*/
public static serialize (dataToBeAnchored: AnchoredData): string {
// Concatenate the inputs w/ the delimiter and return
return `${dataToBeAnchored.numberOfOperations}${AnchoredDataSerializer.delimiter}${dataToBeAnchored.anchorFileHash}`;
}

/**
* Deserializes the given string that is read from the blockchain into data.
*
* @param serializedData The data to be deserialized.
*/
public static deserialize (serializedData: string): AnchoredData {

const splitData = serializedData.split(AnchoredDataSerializer.delimiter);

if (splitData.length !== 2) {
throw new SidetreeError(ErrorCode.AnchoredDataIncorrectFormat, `Input is not in correct format: ${serializedData}`);
}

const numberOfOperations = AnchoredDataSerializer.parsePositiveInteger(splitData[0]);

if (numberOfOperations > ProtocolParameters.maxOperationsPerBatch) {
throw new SidetreeError(
ErrorCode.AnchoredDataNumberOfOperationsGreaterThanMax,
`Number of operations ${numberOfOperations} must be less than or equal to ${ProtocolParameters.maxOperationsPerBatch}`
);
}

return {
anchorFileHash: splitData[1],
numberOfOperations: numberOfOperations
};
}

private static parsePositiveInteger (input: string): number {
// NOTE:
// /<expression>/ denotes regex.
// ^ denotes beginning of string.
// $ denotes end of string.
// [1-9] denotes leading '0' not allowed.
// \d* denotes followed by 0 or more decimal digits.
const isPositiveInteger = /^[1-9]\d*$/.test(input);

if (!isPositiveInteger) {
throw new SidetreeError(
ErrorCode.AnchoredDataNumberOfOperationsNotPositiveInteger,
`Number of operations '${input}' is not a positive integer without leading zeros.`
);
}

return Number(input);
}
}
100 changes: 100 additions & 0 deletions lib/core/versions/0.8.0/BatchWriter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
import AnchoredData from './models/AnchoredData';
import AnchoredDataSerializer from './AnchoredDataSerializer';
import AnchorFile from './AnchorFile';
import ChunkFile from './ChunkFile';
import CreateOperation from './CreateOperation';
import DeactivateOperation from './DeactivateOperation';
import FeeManager from './FeeManager';
import ICas from '../../interfaces/ICas';
import IBatchWriter from '../../interfaces/IBatchWriter';
import IBlockchain from '../../interfaces/IBlockchain';
import IOperationQueue from './interfaces/IOperationQueue';
import LogColor from '../../../common/LogColor';
import MapFile from './MapFile';
import Operation from './Operation';
import OperationType from '../../enums/OperationType';
import ProtocolParameters from './ProtocolParameters';
import RecoverOperation from './RecoverOperation';
import UpdateOperation from './UpdateOperation';
import ValueTimeLockModel from '../../../common/models/ValueTimeLockModel';
import ValueTimeLockVerifier from './ValueTimeLockVerifier';

/**
* Implementation of the `IBatchWriter`.
*/
export default class BatchWriter implements IBatchWriter {
public constructor (
private operationQueue: IOperationQueue,
private blockchain: IBlockchain,
private cas: ICas) { }

public async write () {
const normalizedFee = await this.blockchain.getFee(this.blockchain.approximateTime.time);
const currentLock = await this.blockchain.getWriterValueTimeLock();
const numberOfOpsAllowed = this.getNumberOfOperationsToWrite(currentLock);

// Get the batch of operations to be anchored on the blockchain.
const queuedOperations = await this.operationQueue.peek(numberOfOpsAllowed);
const numberOfOperations = queuedOperations.length;

// Do nothing if there is nothing to batch together.
if (queuedOperations.length === 0) {
console.info(`No queued operations to batch.`);
return;
}

console.info(LogColor.lightBlue(`Batch size = ${LogColor.green(numberOfOperations)}`));

const operationModels = await Promise.all(queuedOperations.map(async (queuedOperation) => Operation.parse(queuedOperation.operationBuffer)));
const createOperations = operationModels.filter(operation => operation.type === OperationType.Create) as CreateOperation[];
const recoverOperations = operationModels.filter(operation => operation.type === OperationType.Recover) as RecoverOperation[];
const updateOperations = operationModels.filter(operation => operation.type === OperationType.Update) as UpdateOperation[];
const deactivateOperations = operationModels.filter(operation => operation.type === OperationType.Deactivate) as DeactivateOperation[];

// Create the chunk file buffer from the operation models.
// NOTE: deactivate operations don't have delta.
const chunkFileBuffer = await ChunkFile.createBuffer(createOperations, recoverOperations, updateOperations);

// Write the chunk file to content addressable store.
const chunkFileHash = await this.cas.write(chunkFileBuffer);
console.info(LogColor.lightBlue(`Wrote chunk file ${LogColor.green(chunkFileHash)} to content addressable store.`));

// Write the map file to content addressable store.
const mapFileBuffer = await MapFile.createBuffer(chunkFileHash, updateOperations);
const mapFileHash = await this.cas.write(mapFileBuffer);
console.info(LogColor.lightBlue(`Wrote map file ${LogColor.green(mapFileHash)} to content addressable store.`));

// Write the anchor file to content addressable store.
const writerLock = currentLock ? currentLock.identifier : undefined;
const anchorFileBuffer = await AnchorFile.createBuffer(writerLock, mapFileHash, createOperations, recoverOperations, deactivateOperations);
const anchorFileHash = await this.cas.write(anchorFileBuffer);
console.info(LogColor.lightBlue(`Wrote anchor file ${LogColor.green(anchorFileHash)} to content addressable store.`));

// Anchor the data to the blockchain
const dataToBeAnchored: AnchoredData = {
anchorFileHash,
numberOfOperations
};

const stringToWriteToBlockchain = AnchoredDataSerializer.serialize(dataToBeAnchored);
const fee = FeeManager.computeMinimumTransactionFee(normalizedFee, numberOfOperations);
console.info(LogColor.lightBlue(`Writing data to blockchain: ${LogColor.green(stringToWriteToBlockchain)} with minimum fee of: ${LogColor.green(fee)}`));

await this.blockchain.write(stringToWriteToBlockchain, fee);

// Remove written operations from queue after batch writing has completed successfully.
await this.operationQueue.dequeue(queuedOperations.length);
}

private getNumberOfOperationsToWrite (valueTimeLock: ValueTimeLockModel | undefined): number {
const maxNumberOfOpsAllowedByProtocol = ProtocolParameters.maxOperationsPerBatch;
const maxNumberOfOpsAllowedByLock = ValueTimeLockVerifier.calculateMaxNumberOfOperationsAllowed(valueTimeLock);

if (maxNumberOfOpsAllowedByLock > maxNumberOfOpsAllowedByProtocol) {
// tslint:disable-next-line: max-line-length
console.info(`Maximum number of operations allowed by value time lock: ${maxNumberOfOpsAllowedByLock}; Maximum number of operations allowed by protocol: ${maxNumberOfOpsAllowedByProtocol}`);
}

return Math.min(maxNumberOfOpsAllowedByLock, maxNumberOfOpsAllowedByProtocol);
}
}

0 comments on commit f6562a2

Please sign in to comment.