Skip to content

Commit

Permalink
Merge pull request #582 from forcedotcom/sm/file-move-composability
Browse files Browse the repository at this point in the history
refactor: registry from STL
  • Loading branch information
iowillhoit committed May 13, 2024
2 parents 3813a9a + 7e78049 commit f986a1b
Show file tree
Hide file tree
Showing 13 changed files with 134 additions and 99 deletions.
165 changes: 72 additions & 93 deletions src/shared/localShadowRepo.ts
Expand Up @@ -43,11 +43,14 @@ const redirectToCliRepoError = (e: unknown): never => {
};

type FileInfo = { filename: string; hash: string; basename: string };
type StringMap = Map<string, string>;
type AddAndDeleteMaps = { addedMap: StringMap; deletedMap: StringMap };

type ShadowRepoOptions = {
orgId: string;
projectPath: string;
packageDirs: NamedPackageDir[];
registry: RegistryAccess;
};

// https://isomorphic-git.org/docs/en/statusMatrix#docsNav
Expand Down Expand Up @@ -76,6 +79,7 @@ export class ShadowRepo {
private status!: StatusRow[];
private logger!: Logger;
private readonly isWindows: boolean;
private readonly registry: RegistryAccess;

/** do not try to add more than this many files at a time through isogit. You'll hit EMFILE: too many open files even with graceful-fs */
private readonly maxFileAdd: number;
Expand All @@ -85,6 +89,7 @@ export class ShadowRepo {
this.projectPath = options.projectPath;
this.packageDirs = options.packageDirs;
this.isWindows = os.type() === 'Windows_NT';
this.registry = options.registry;

this.maxFileAdd = env.getNumber(
'SF_SOURCE_TRACKING_BATCH_SIZE',
Expand Down Expand Up @@ -356,14 +361,17 @@ export class ShadowRepo {
}

private async detectMovedFiles(): Promise<void> {
const matchingFilenames = getMatches(await this.getStatus());
if (!matchingFilenames) return;
const { deletedFilenamesWithMatches, addedFilenamesWithMatches } = matchingFilenames;
const { deletedFilenamesWithMatches, addedFilenamesWithMatches } = getMatches(await this.getStatus()) ?? {};
if (!deletedFilenamesWithMatches || !addedFilenamesWithMatches) return;

// We found file adds and deletes with the same basename
// The have likely been moved, confirm by comparing their hashes (oids)
const movedFilesMarker = Performance.mark('@salesforce/source-tracking', 'localShadowRepo.detectMovedFiles');

// Track how long it takes to gather the oid information from the git trees
const getInfoMarker = Performance.mark('@salesforce/source-tracking', 'localShadowRepo.detectMovedFiles#getInfo', {
addedFiles: addedFilenamesWithMatches.length,
deletedFiles: deletedFilenamesWithMatches.length,
});

const getInfo = async (targetTree: Walker, filenameSet: Set<string>): Promise<FileInfo[]> =>
// Unable to properly type the return value of git.walk without using "as", ignoring linter
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
Expand All @@ -382,38 +390,39 @@ export class ShadowRepo {
: undefined,
});

// Track how long it takes to gather the oid information from the git trees
const getInfoMarker = Performance.mark('@salesforce/source-tracking', 'localShadowRepo.detectMovedFiles#getInfo', {
addedFiles: addedFilenamesWithMatches.length,
deletedFiles: deletedFilenamesWithMatches.length,
});

// We found file adds and deletes with the same basename
// The have likely been moved, confirm by comparing their hashes (oids)
const [addedInfo, deletedInfo] = await Promise.all([
getInfo(git.WORKDIR(), new Set(addedFilenamesWithMatches)),
getInfo(git.TREE({ ref: 'HEAD' }), new Set(deletedFilenamesWithMatches)),
]);

getInfoMarker?.stop();

const matches = await compareHashes(addedInfo, deletedInfo);
const { moveLogs, addedFilesWithMatchingTypes, deletedFilesWithMatchingTypes } = compareTypes(
matches,
this.projectPath
);

if (addedFilesWithMatchingTypes.length && deletedFilesWithMatchingTypes.length) {
this.logger.debug('Files have moved. Committing moved files:');
this.logger.debug(moveLogs.join('\n'));
movedFilesMarker?.addDetails({ filesMoved: moveLogs.length });
const matchingNameAndHashes = compareHashes(await buildMaps(addedInfo, deletedInfo));
if (matchingNameAndHashes.size === 0) {
return movedFilesMarker?.stop();
}
const matches = removeNonMatches(matchingNameAndHashes, this.registry);

// Commit the moved files and refresh the status
await this.commitChanges({
deletedFiles: deletedFilesWithMatchingTypes,
deployedFiles: addedFilesWithMatchingTypes,
message: 'Committing moved files',
});
if (matches.size === 0) {
return movedFilesMarker?.stop();
}

this.logger.debug(
['Files have moved. Committing moved files:', ...matches.entries()]
.map(([add, del]) => `File ${add} was moved to ${del}`)
.join('\n')
);
movedFilesMarker?.addDetails({ filesMoved: matches.size });

// Commit the moved files and refresh the status
await this.commitChanges({
deletedFiles: [...matches.values()],
deployedFiles: [...matches.keys()],
message: 'Committing moved files',
});

movedFilesMarker?.stop();
}
}
Expand All @@ -429,9 +438,9 @@ const packageDirToRelativePosixPath =
const normalize = (filepath: string): string => path.normalize(filepath);
const ensurePosix = (filepath: string): string => filepath.split(path.sep).join(path.posix.sep);

const buildMaps = (info: FileInfo[]): Array<Map<string, string>> => {
const map: Map<string, string> = new Map();
const ignore: Map<string, string> = new Map();
const buildMap = (info: FileInfo[]): StringMap[] => {
const map: StringMap = new Map();
const ignore: StringMap = new Map();
info.forEach((i) => {
const key = `${i.hash}#${i.basename}`;
// If we find a duplicate key, we need to remove it and ignore it in the future.
Expand Down Expand Up @@ -479,19 +488,10 @@ const getMatches = (
const isDeleted = (status: StatusRow): boolean => status[WORKDIR] === 0;
const isAdded = (status: StatusRow): boolean => status[HEAD] === 0 && status[WORKDIR] === 2;

/** Build maps using the added/deleted filenames and return the matches. Log the non-matches */
const compareHashes = async (addedInfo: FileInfo[], deletedInfo: FileInfo[]): Promise<Map<string, string>> => {
const compareHashesMarker = Performance.mark('@salesforce/source-tracking', 'localShadowRepo.compareHashes');
const matches = new Map();
const [addedMap, addedIgnoredMap] = buildMaps(addedInfo);
const [deletedMap, deletedIgnoredMap] = buildMaps(deletedInfo);

for (const [addedKey, addedValue] of addedMap) {
const deletedValue = deletedMap.get(addedKey);
if (deletedValue) {
matches.set(addedValue, deletedValue);
}
}
/** build maps of the add/deletes with filenames, returning the matches Logs if non-matches */
const buildMaps = async (addedInfo: FileInfo[], deletedInfo: FileInfo[]): Promise<AddAndDeleteMaps> => {
const [addedMap, addedIgnoredMap] = buildMap(addedInfo);
const [deletedMap, deletedIgnoredMap] = buildMap(deletedInfo);

// If we detected any files that have the same basename and hash, emit a warning and send telemetry
// These files will still show up as expected in the `sf project deploy preview` output
Expand All @@ -508,8 +508,20 @@ const compareHashes = async (addedInfo: FileInfo[], deletedInfo: FileInfo[]): Pr
lifecycle.emitTelemetry({ eventName: 'moveFileHashBasenameCollisionsDetected' }),
]);
}
return { addedMap, deletedMap };
};

/** builds a map of the values from both maps */
const compareHashes = ({ addedMap, deletedMap }: AddAndDeleteMaps): StringMap => {
const matches: StringMap = new Map();

for (const [addedKey, addedValue] of addedMap) {
const deletedValue = deletedMap.get(addedKey);
if (deletedValue) {
matches.set(addedValue, deletedValue);
}
}

compareHashesMarker?.stop();
return matches;
};

Expand All @@ -526,56 +538,23 @@ const resolveType = (resolver: MetadataResolver, filenames: string[]): SourceCom
})
.filter(sourceComponentGuard);

// Compare the types of the added and deleted files. If they have a parent, the parent name and type must also match
const compareTypes = (
matches: Map<string, string>,
projectPath: string
): { moveLogs: string[]; addedFilesWithMatchingTypes: string[]; deletedFilesWithMatchingTypes: string[] } => {
const compareTypesMarker = Performance.mark('@salesforce/source-tracking', 'localShadowRepo.compareTypes');
const logger = Logger.childFromRoot('ShadowRepo.compareTypes');
const moveLogs = [];
const addedFilesWithMatchingTypes = [];
const deletedFilesWithMatchingTypes = [];
const registry = new RegistryAccess(undefined, projectPath);

const removeNonMatches = (matches: StringMap, registry: RegistryAccess): StringMap => {
const resolverAdded = new MetadataResolver(registry, VirtualTreeContainer.fromFilePaths([...matches.keys()]));
const resolverDeleted = new MetadataResolver(registry, VirtualTreeContainer.fromFilePaths([...matches.values()]));

for (const [addedFile, deletedFile] of matches) {
const resolvedAdded = resolveType(resolverAdded, [addedFile])[0];
const resolvedDeleted = resolveType(resolverDeleted, [deletedFile])[0];

if (!resolvedAdded || !resolvedDeleted) {
logger.warn(`Unable to resolve type for '${addedFile}' or '${deletedFile}'`);
continue;
}

if (resolvedAdded.type.name !== resolvedDeleted.type.name) {
logger.warn(`Type mismatch for '${addedFile}' and '${deletedFile}'`);
continue;
}

logger.warn('resolvedAdded.parent', resolvedAdded.parent);
logger.warn('resolvedDeleted.parent', resolvedDeleted.parent);

// Name of the parent needs to match, we do not want to allow moving a child to a different parent
// For example a custom field from one object to another (Broker__c to Account__c)
if (resolvedAdded.parent?.name !== resolvedDeleted.parent?.name) {
logger.warn(`Parent name mismatch for '${addedFile}' and '${deletedFile}'`);
continue;
}

// The parent type needs to match, we do not want to allow moving a child to a different parent type
if (resolvedAdded.parent?.type.name !== resolvedDeleted.parent?.type.name) {
logger.warn(`Parent type mismatch for '${addedFile}' and '${deletedFile}'`);
continue;
}

addedFilesWithMatchingTypes.push(addedFile);
deletedFilesWithMatchingTypes.push(deletedFile);
moveLogs.push(`File '${deletedFile}' was moved to '${addedFile}'`);
}

compareTypesMarker?.stop();
return { moveLogs, addedFilesWithMatchingTypes, deletedFilesWithMatchingTypes };
return new Map(
[...matches.entries()].filter(([addedFile, deletedFile]) => {
// we're only ever using the first element of the arrays
const [resolvedAdded] = resolveType(resolverAdded, [addedFile]);
const [resolvedDeleted] = resolveType(resolverDeleted, [deletedFile]);
return (
// they could match, or could both be undefined (because unresolved by SDR)
resolvedAdded?.type.name === resolvedDeleted?.type.name &&
// parent names match, if resolved and there are parents
resolvedAdded?.parent?.name === resolvedDeleted?.parent?.name &&
// parent types match, if resolved and there are parents
resolvedAdded?.parent?.type.name === resolvedDeleted?.parent?.type.name
);
})
);
};
1 change: 1 addition & 0 deletions src/sourceTracking.ts
Expand Up @@ -512,6 +512,7 @@ export class SourceTracking extends AsyncCreatable {
orgId: this.orgId,
projectPath: normalize(this.projectPath),
packageDirs: this.packagesDirs,
registry: this.registry,
});
// loads the status from file so that it's cached
await this.localRepo.getStatus();
Expand Down
2 changes: 2 additions & 0 deletions test/nuts/local/commitPerf.nut.ts
Expand Up @@ -7,6 +7,7 @@
import path from 'node:path';
import { TestSession } from '@salesforce/cli-plugins-testkit';
import { expect } from 'chai';
import { RegistryAccess } from '@salesforce/source-deploy-retrieve';
import { ShadowRepo } from '../../../src/shared/localShadowRepo';

describe('perf testing for big commits', () => {
Expand All @@ -32,6 +33,7 @@ describe('perf testing for big commits', () => {
orgId: 'fakeOrgId',
projectPath: session.project.dir,
packageDirs: [{ path: 'force-app', name: 'force-app', fullPath: path.join(session.project.dir, 'force-app') }],
registry: new RegistryAccess(),
});
});

Expand Down
2 changes: 2 additions & 0 deletions test/nuts/local/localTrackingFileMovesDecomposedChild.nut.ts
Expand Up @@ -9,6 +9,7 @@ import * as path from 'node:path';
import { TestSession } from '@salesforce/cli-plugins-testkit';
import { expect } from 'chai';
import * as fs from 'graceful-fs';
import { RegistryAccess } from '@salesforce/source-deploy-retrieve';
import { ShadowRepo } from '../../../src/shared/localShadowRepo';

describe('ignores moved files that are children of a decomposed metadata type', () => {
Expand All @@ -34,6 +35,7 @@ describe('ignores moved files that are children of a decomposed metadata type',
orgId: 'fakeOrgId',
projectPath: session.project.dir,
packageDirs: [{ path: 'force-app', name: 'force-app', fullPath: path.join(session.project.dir, 'force-app') }],
registry: new RegistryAccess(),
});
});

Expand Down
29 changes: 25 additions & 4 deletions test/nuts/local/localTrackingFileMovesImage.nut.ts
Expand Up @@ -9,9 +9,11 @@ import * as path from 'node:path';
import { TestSession } from '@salesforce/cli-plugins-testkit';
import { expect } from 'chai';
import * as fs from 'graceful-fs';
import { RegistryAccess } from '@salesforce/source-deploy-retrieve';
import { ShadowRepo } from '../../../src/shared/localShadowRepo';

describe('it detects image file moves ', () => {
const registry = new RegistryAccess();
let session: TestSession;
let repo: ShadowRepo;
let filesToSync: string[];
Expand All @@ -34,19 +36,38 @@ describe('it detects image file moves ', () => {
orgId: 'fakeOrgId',
projectPath: session.project.dir,
packageDirs: [{ path: 'force-app', name: 'force-app', fullPath: path.join(session.project.dir, 'force-app') }],
registry,
});
});

it('should show 0 files (images) in git status after moving them', async () => {
// Commit the existing class files
filesToSync = await repo.getChangedFilenames();
await repo.commitChanges({ deployedFiles: filesToSync })
await repo.commitChanges({ deployedFiles: filesToSync });

// move all the classes to the new folder
fs.mkdirSync(path.join(session.project.dir, 'force-app', 'main', 'default', 'staticresources', 'bike_assets_new'), { recursive: true });
fs.mkdirSync(path.join(session.project.dir, 'force-app', 'main', 'default', 'staticresources', 'bike_assets_new'), {
recursive: true,
});
fs.renameSync(
path.join(session.project.dir, 'force-app', 'main', 'default', 'staticresources', 'bike_assets', 'CyclingGrass.jpg'),
path.join(session.project.dir, 'force-app', 'main', 'default', 'staticresources', 'bike_assets_new', 'CyclingGrass.jpg'),
path.join(
session.project.dir,
'force-app',
'main',
'default',
'staticresources',
'bike_assets',
'CyclingGrass.jpg'
),
path.join(
session.project.dir,
'force-app',
'main',
'default',
'staticresources',
'bike_assets_new',
'CyclingGrass.jpg'
)
);

await repo.getStatus(true);
Expand Down
4 changes: 3 additions & 1 deletion test/nuts/local/localTrackingFileMovesScale.nut.ts
Expand Up @@ -9,6 +9,7 @@ import * as path from 'node:path';
import { TestSession } from '@salesforce/cli-plugins-testkit';
import { expect } from 'chai';
import * as fs from 'graceful-fs';
import { RegistryAccess } from '@salesforce/source-deploy-retrieve';
import { ShadowRepo } from '../../../src/shared/localShadowRepo';

const dirCount = 20;
Expand Down Expand Up @@ -61,13 +62,14 @@ describe(`handles local files moves of ${classCount.toLocaleString()} classes ($
orgId: 'fakeOrgId',
projectPath: session.project.dir,
packageDirs: [{ path: 'force-app', name: 'force-app', fullPath: path.join(session.project.dir, 'force-app') }],
registry: new RegistryAccess(),
});
});

it('should show 0 files in git status after moving them', async () => {
// Commit the existing class files
filesToSync = await repo.getChangedFilenames();
await repo.commitChanges({ deployedFiles: filesToSync })
await repo.commitChanges({ deployedFiles: filesToSync });

// move all the classes to the new folder
fs.mkdirSync(path.join(session.project.dir, 'force-app', 'main', 'foo'), { recursive: true });
Expand Down
3 changes: 3 additions & 0 deletions test/nuts/local/localTrackingScale.nut.ts
Expand Up @@ -9,6 +9,7 @@ import path from 'node:path';
import { TestSession } from '@salesforce/cli-plugins-testkit';
import { expect } from 'chai';
import * as fs from 'graceful-fs';
import { RegistryAccess } from '@salesforce/source-deploy-retrieve';
import { ShadowRepo } from '../../../src/shared/localShadowRepo';

const dirCount = 200;
Expand All @@ -18,6 +19,7 @@ const classCount = dirCount * classesPerDir;
describe(`verify tracking handles an add of ${classCount.toLocaleString()} classes (${(
classCount * 2
).toLocaleString()} files across ${dirCount.toLocaleString()} folders)`, () => {
const registry = new RegistryAccess();
let session: TestSession;
let repo: ShadowRepo;
let filesToSync: string[];
Expand Down Expand Up @@ -61,6 +63,7 @@ describe(`verify tracking handles an add of ${classCount.toLocaleString()} class
orgId: 'fakeOrgId',
projectPath: session.project.dir,
packageDirs: [{ path: 'force-app', name: 'force-app', fullPath: path.join(session.project.dir, 'force-app') }],
registry,
});
});

Expand Down

0 comments on commit f986a1b

Please sign in to comment.