Skip to content

Commit

Permalink
refactor: better monorepo structure
Browse files Browse the repository at this point in the history
  • Loading branch information
ethansnow2012 committed May 1, 2024
1 parent 2ca2fd4 commit fcab7e8
Show file tree
Hide file tree
Showing 37 changed files with 833 additions and 15 deletions.
62 changes: 62 additions & 0 deletions packages/core/cbj_representation.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
[
{
"path": "~/package.json",
"content": "{\n \"name\": \"core\",\n \"version\": \"0.1.0\",\n \"private\": true,\n \"description\": \"Core functionality for the project.\",\n \"main\": \"dist/index.js\",\n \"scripts\": {\n \"dev\": \"ts-node ./index.ts\",\n \"test\": \"mocha -r ts-node/register -r tsconfig-paths/register './**/*.spec.ts'\",\n \"build\": \"npm run test && tsc\"\n },\n \"dependencies\": {\n \"ignore\": \"^5.3.1\"\n },\n \"devDependencies\": {\n \"@types/node\": \"^14.0.0\",\n \"typescript\": \"^4.0.0\",\n \"mocha\": \"^8.0.0\",\n \"@types/mocha\": \"^8.0.0\"\n }\n}\n"
},
{
"path": "~/src/__test__/basic.spec.ts",
"content": "import { expect } from 'chai';\nimport { execSync } from 'child_process';\nimport compress from '../compress';\nimport decompress from '../decompress';\n\ndescribe('E2E Test for basic functionality.', () => {\n it('Go through compress/decompress without change anything.', () => {\n compress()\n decompress()\n const diff = execSync('git status --porcelain').toString()\n expect(diff).to.be.eq('');\n });\n});\n\n\n\n\n"
},
{
"path": "~/src/__test__/init.spec.ts",
"content": "import { expect } from 'chai';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport init from '../init';\n\ndescribe('init function', () => {\n \n const destPath = path.join('./', 'cbj_representation.json');\n\n // Clean up before and after tests\n beforeEach(() => {\n if (fs.existsSync(destPath)) {\n fs.unlinkSync(destPath);\n }\n });\n\n afterEach(() => {\n if (fs.existsSync(destPath)) {\n fs.unlinkSync(destPath);\n }\n });\n\n it('should copy cbj_representation.json from ./packages/core/src/assets to ./', () => {\n const srcPath = path.join('./packages/core/src/assets', 'cbj_representation.json');\n // Ensure the source file exists for the test\n if (!fs.existsSync(srcPath)) {\n fs.writeFileSync(srcPath, '{}'); // Create a dummy file\n }\n\n init();\n\n expect(fs.existsSync(destPath)).to.be.true;\n expect(fs.readFileSync(destPath, 'utf8')).to.equal(JSON.stringify(JSON.parse(fs.readFileSync(srcPath, 'utf8'))) );\n });\n\n it('should handle the absence of cbj_representation.json in ./packages/core/src/assets', () => {\n const srcPath = path.join('./', 'cbj_representation.json');\n \n init();\n\n expect(fs.existsSync(destPath)).to.be.true;\n });\n});"
},
{
"path": "~/src/__test__/mutation.spec.ts",
"content": "import { expect } from 'chai';\r\nimport fs from 'fs';\r\nimport path from 'path';\r\nimport compress from '../compress'\r\nimport decompress from '../decompress';\r\nimport { FileData } from '../type';\r\n\r\n\r\ndescribe('E2E Test for File Operations', () => {\r\n const tempFileName = 'tempFile.js';\r\n const tempFilePath = path.join(process.cwd(), tempFileName);\r\n const cbjRepresentationPath = path.join(process.cwd(), 'cbj_representation.json');\r\n\r\n it('should add, compress, check, remove, decompress, and recover a file', () => {\r\n // Step 1: Add a random file \r\n fs.writeFileSync(tempFilePath, 'console.log(\"This is a temporary file.\");');\r\n\r\n // Step 2: Compress\r\n compress();\r\n\r\n // Step 3: Check cbj_representation has the content\r\n const cbjRepresentation = fs.readFileSync(cbjRepresentationPath, 'utf8');\r\n expect(cbjRepresentation.includes(`\"path\":`+' ' +`\"~/tempFile.js\",`)).to.be.true;\r\n\r\n // Step 4: Remove the random file\r\n fs.unlinkSync(tempFilePath);\r\n expect(fs.existsSync(tempFilePath)).to.be.false;\r\n\r\n // // Step 5: Decompress\r\n decompress();\r\n \r\n // Step 6: Check the file recovered\r\n const fileRecovered = fs.existsSync(tempFilePath);\r\n\r\n // Clean up (remove the temporary file after test)\r\n fs.unlinkSync(tempFilePath);\r\n });\r\n \r\n const testFilePath = path.join('./packages/core', 'testFile.txt');\r\n const originalContent = 'Original content';\r\n const modifiedContent = 'Modified content';\r\n it('should detect and include changes in an existing file', () => {\r\n // before\r\n // Create a test file with original content\r\n fs.writeFileSync(testFilePath, originalContent);\r\n // Run compress to include the original file\r\n compress();\r\n // before end\r\n\r\n // Modify the test file\r\n fs.writeFileSync(testFilePath, modifiedContent);\r\n \r\n // Run compress again\r\n compress();\r\n\r\n // Read the cbj_representation.json and check if it includes the modified content\r\n const cbjRepresentation: FileData[] = JSON.parse(fs.readFileSync('./cbj_representation.json', 'utf8'));\r\n const testFileData = cbjRepresentation.find(fileData => fileData.path.includes('testFile.txt'));\r\n\r\n expect(testFileData).to.exist;\r\n expect(testFileData!.content).to.equal(modifiedContent);\r\n\r\n // after \r\n if (fs.existsSync(testFilePath)) {\r\n fs.unlinkSync(testFilePath);\r\n }\r\n if (fs.existsSync('./cbj_representation.json')) {\r\n fs.unlinkSync('./cbj_representation.json');\r\n }\r\n // after end\r\n });\r\n});"
},
{
"path": "~/src/assets/cbj_representation.json",
"content": "[\r\n {\r\n \"path\": \"C:\\\\Users\\\\ethan\\\\Desktop\\\\code\\\\my\\\\gg\\\\package.json\",\r\n \"content\": \"{\\n \\\"name\\\": \\\"gg\\\",\\n \\\"version\\\": \\\"1.0.0\\\",\\n \\\"description\\\": \\\"\\\",\\n \\\"main\\\": \\\"index.js\\\",\\n \\\"scripts\\\": {\\n \\\"test\\\": \\\"echo \\\\\\\"Error: no test specified\\\\\\\" && exit 1\\\"\\n },\\n \\\"author\\\": \\\"\\\",\\n \\\"license\\\": \\\"ISC\\\",\\n \\\"dependencies\\\": {\\n \\\"cbj\\\": \\\"^0.0.8\\\"\\n }\\n}\\n\"\r\n },\r\n {\r\n \"path\": \"C:\\\\Users\\\\ethan\\\\Desktop\\\\code\\\\my\\\\gg\\\\src\\\\index.ts\",\r\n \"content\": \"console.log('Hello, world!')\"\r\n }\r\n]\r\n "
},
{
"path": "~/src/compress.ts",
"content": "\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport ignore from 'ignore';\nimport cbjConfig, { readConfig } from './defaultConfig'\nimport { FileData } from './type'\nimport { homeChar } from './constants'\n\ntype Ignore = ReturnType<typeof ignore>;\n\n\nfunction readGitignore(): string[] {\n const forceIgnorePath = ['.git', '.gitignore', 'node_modules', 'dist', 'build', 'out', 'coverage', '/cbj_representation.json', 'cbj.config.js', '*-lock.yaml']\n const gitignorePath = '.gitignore';\n console.log('reading gitignorePath:', gitignorePath);\n if (fs.existsSync(gitignorePath)) {\n return forceIgnorePath.concat(fs.readFileSync(gitignorePath, 'utf8').split('\\n'));\n }\n return forceIgnorePath;\n}\n\nfunction isImageFile(file: string): boolean {\n const imageExtensions = ['.jpg', '.jpeg', '.png', '.gif', '.bmp', '.tiff', 'ico'];\n return imageExtensions.some(ext => file.endsWith(ext));\n}\nfunction readFilesRecursively(dir: string, ig: Ignore, _cbjConfig: typeof cbjConfig): FileData[] {\n let results: FileData[] = [];\n\n fs.readdirSync(dir).forEach(file => {\n file = path.resolve(dir, file);\n if (ig.ignores(path.relative(_cbjConfig.dirPath, file)) || isImageFile(file)) {\n return;\n }\n const stat = fs.statSync(file);\n\n if (stat && stat.isDirectory()) {\n results = results.concat(readFilesRecursively(file, ig, _cbjConfig));\n } else {\n results.push({\n path: file,\n content: fs.readFileSync(file, 'utf8')\n });\n }\n });\n\n return results;\n}\n\nfunction generateBatchJsonFiles(dir: string, ig: Ignore, batchCount: number): void {\n const filesData = readFilesRecursively(dir, ig, cbjConfig);\n const batchSize = Math.ceil(filesData.length / batchCount);\n const homeDir = process.cwd()\n for (let i = 0; i < filesData.length; i += batchSize) {\n const batchFiles = filesData.slice(i, i + batchSize);\n const fileDataBatch = batchFiles.map((file) => {\n return {\n path: file.path.replace(homeDir, homeChar).replace(/\\\\/g, '/'),\n content: file.content\n }\n });\n const jsonContent = JSON.stringify(fileDataBatch, null, 2);\n let outputFileName = ''\n if (batchCount === 1) {\n outputFileName = `cbj_representation.json`;\n }else{\n outputFileName = `cbj_representation_${Math.floor(i / batchSize) + 1}.json`;\n }\n \n fs.writeFileSync(outputFileName, jsonContent);\n console.log(`Batch(with batchCount ${batchCount}) file data written to ${outputFileName}`);\n }\n}\n\nexport default function compress() {\n console.log('compressing...');\n let cbjConfig = readConfig();\n const ig = ignore();\n const gitignoreRules = readGitignore();\n ig.add(gitignoreRules);\n const dirPath = cbjConfig.dirPath;\n const batchCount = cbjConfig.batchCount;\n\n generateBatchJsonFiles(dirPath, ig, batchCount);\n}\n"
},
{
"path": "~/src/constants.ts",
"content": "export const homeChar = '~';"
},
{
"path": "~/src/decompress.ts",
"content": "\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { readConfig } from './defaultConfig'\nimport { FileData } from './type'\nimport { execSync } from 'child_process';\nimport { homeChar } from './constants'\n\nfunction processBatchJsonFiles(jsonFilePath: string): void {\n try {\n const fileDataBatch: FileData[] = JSON.parse(fs.readFileSync(jsonFilePath, { encoding: 'utf8' }));\n \n fileDataBatch.forEach(fileData => {\n const normalizedPath = path.normalize(fileData.path).replace(/^([A-Z]:\\\\|\\\\\\\\)/, '/');\n const homeDir = process.cwd()\n const absPath = normalizedPath.replace(homeChar, homeDir)\n if (!fs.existsSync(absPath)) {\n fs.mkdirSync(path.dirname(absPath), { recursive: true });\n }\n fs.writeFileSync(absPath, fileData.content, { encoding: 'utf8' });\n console.log(`${fs.existsSync(absPath) ? 'Updated' : 'Created'}: ${normalizedPath}`);\n });\n } catch (error) {\n console.error(`Error processing ${jsonFilePath}:`, error);\n }\n}\n\n// Function to check for uncommitted changes using Git\nfunction checkForUncommittedChanges(): boolean {\n try {\n const result = execSync('git status --porcelain').toString();\n return result !== '';\n } catch (error) {\n console.error('Error checking for uncommitted changes:', error);\n return false;\n }\n}\n\nexport default function decompress() {\n if (checkForUncommittedChanges()) {\n console.log('Warning: There are uncommitted changes. Please commit or stash them before running decompress.');\n return;\n }\n let cbjConfig = readConfig();\n const intputFileName = cbjConfig.intputFileName || 'cbj_representation';\n if (intputFileName.includes('.')){\n throw new Error('intputFileName: ' + intputFileName + 'should not include file extension');\n }\n fs.readdirSync('./').forEach(file => {\n if (file === `${intputFileName}.json` || (file.startsWith(`${intputFileName}_`) && path.extname(file) === '.json')) {\n processBatchJsonFiles(path.resolve('./', file));\n }\n });\n}\n"
},
{
"path": "~/src/defaultConfig.ts",
"content": "const _cbjConfig = {\r\n dirPath: './',\r\n outputFileName: 'cbj_representation',\r\n intputFileName: 'cbj_representation',\r\n batchCount: 1\r\n};\r\nexport default _cbjConfig\r\n\r\nexport function readConfig() {\r\n try {\r\n const configPath = process.cwd() + '\\\\cbj.config.js'\r\n console.log('trying to read configModule:', configPath);\r\n const configModule = require(configPath);\r\n if (configModule) {\r\n console.log('Config module read.');\r\n }\r\n return (configModule || _cbjConfig) as typeof _cbjConfig;\r\n\r\n } catch (error) {\r\n console.log('No cbj.config.ts found, using default configuration.');\r\n return _cbjConfig\r\n }\r\n}"
},
{
"path": "~/src/index.ts",
"content": "#!/usr/bin/env node\r\nimport * as process from 'process';\r\nimport init from \"./init\";\r\nimport compress from \"./compress\";\r\nimport decompress from \"./decompress\";\r\n\r\n\r\n// Main logic to parse command line arguments\r\nconst args = process.argv.slice(2);\r\nswitch (args[0]) {\r\n case 'init':\r\n init();\r\n break;\r\n case 'compress':\r\n compress();\r\n break;\r\n case 'decompress':\r\n decompress();\r\n break;\r\n default:\r\n console.log('Unknown command. Use \"compress\" or \"decompress\".');\r\n}"
},
{
"path": "~/src/init.ts",
"content": "import * as fs from 'fs';\nimport * as path from 'path';\nimport cbj_representation from './assets/cbj_representation.json';\n\nexport default function init() {\n const destPath = path.join('./', 'cbj_representation.json');\n\n if (cbj_representation) {\n fs.writeFileSync(destPath, JSON.stringify(cbj_representation));\n console.log('cbj_representation.json has been copied to the project root.');\n } else {\n console.log('cbj_representation.json does not exist in ./packages/core/src/assets');\n }\n}"
},
{
"path": "~/src/test/mocha.opts",
"content": "--require ts-node/register\n--timeout 5000\n--exit\n"
},
{
"path": "~/src/type.ts",
"content": "export interface FileData {\r\n path: string;\r\n content: string;\r\n}"
},
{
"path": "~/tempFile.js",
"content": "console.log(\"This is a temporary file.\");"
},
{
"path": "~/tsconfig.json",
"content": "{\n \"extends\": \"../../tsconfig.json\",\n \"compilerOptions\": {\n \"outDir\": \"./dist\",\n \"rootDir\": \"./src\",\n \"baseUrl\": \".\",\n \"paths\": {\n \"~/*\": [\"packages/core/src/*\"]\n }\n \n },\n \"exclude\": [\"node_modules\", \"dist\"]\n }"
}
]
17 changes: 17 additions & 0 deletions packages/core/dist/__test__/basic.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const chai_1 = require("chai");
const child_process_1 = require("child_process");
const compress_1 = __importDefault(require("../compress"));
const decompress_1 = __importDefault(require("../decompress"));
describe('E2E Test for basic functionality.', () => {
it('Go through compress/decompress without change anything.', () => {
(0, compress_1.default)();
(0, decompress_1.default)();
const diff = (0, child_process_1.execSync)('git status --porcelain').toString();
(0, chai_1.expect)(diff).to.be.eq('');
});
});
61 changes: 61 additions & 0 deletions packages/core/dist/__test__/init.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const chai_1 = require("chai");
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const init_1 = __importDefault(require("../init"));
describe('init function', () => {
const destPath = path.join('./', 'cbj_representation.json');
// Clean up before and after tests
beforeEach(() => {
if (fs.existsSync(destPath)) {
fs.unlinkSync(destPath);
}
});
afterEach(() => {
if (fs.existsSync(destPath)) {
fs.unlinkSync(destPath);
}
});
it('should copy cbj_representation.json from ./packages/core/src/assets to ./', () => {
const srcPath = path.join('./packages/core/src/assets', 'cbj_representation.json');
// Ensure the source file exists for the test
if (!fs.existsSync(srcPath)) {
fs.writeFileSync(srcPath, '{}'); // Create a dummy file
}
(0, init_1.default)();
(0, chai_1.expect)(fs.existsSync(destPath)).to.be.true;
(0, chai_1.expect)(fs.readFileSync(destPath, 'utf8')).to.equal(JSON.stringify(JSON.parse(fs.readFileSync(srcPath, 'utf8'))));
});
it('should handle the absence of cbj_representation.json in ./packages/core/src/assets', () => {
const srcPath = path.join('./', 'cbj_representation.json');
(0, init_1.default)();
(0, chai_1.expect)(fs.existsSync(destPath)).to.be.true;
});
});

0 comments on commit fcab7e8

Please sign in to comment.