diff --git a/Readme.md b/Readme.md index 611a505f49..6356507648 100644 --- a/Readme.md +++ b/Readme.md @@ -1,3 +1,116 @@ -# Node.js basics +# ๐Ÿง  Node.js Basics โ€“ Review Branch -## !!! Please don't submit Pull Requests to this repository !!! +This repository is a **review-enhanced fork** of [AlreadyBored/node-nodejs-basics](https://github.com/AlreadyBored/node-nodejs-basics), structured to align with the [RS School Node.js 2025 curriculum](https://github.com/AlreadyBored/nodejs-assignments). It includes modular implementations of core Node.js concepts, with a focus on clarity, testability, and assignment compliance. + +> ๐Ÿ“Œ **Branch:** `review` โ€” 28 commits ahead, 1 behind upstream `main` + +--- + +## ๐Ÿ“š Project Goals + +- Build a solid foundation in Node.js core modules and APIs +- Practice asynchronous programming and stream handling +- Implement CLI tools, file operations, and compression +- Explore advanced topics like child processes and worker threads +- Prepare for horizontal scaling with the Cluster API (Section 8) + +--- + +## ๐Ÿ“ Folder Structure + +``` +src/ +โ”œโ”€โ”€ cli/ # Command-line interface logic and argument parsing +โ”œโ”€โ”€ cp/ # Child process operations +โ”œโ”€โ”€ fs/ # File system utilities +โ”œโ”€โ”€ hash/ # Hashing functionality +โ”œโ”€โ”€ modules/ # Node.js core modules +โ”œโ”€โ”€ streams/ # Stream-based operations +โ”œโ”€โ”€ wt/ # Worker threads +โ”œโ”€โ”€ zip/ # Compression and decompression logic +``` + +Each folder corresponds to a specific RS School assignment section. All modules are self-contained and can be executed independently. + +--- + +## โš™๏ธ Setup Instructions + +1. **Clone the repository** + ```bash + git clone https://github.com/toby-28/node-nodejs-basics.git + cd node-nodejs-basics + ``` + +2. **Install dependencies** + ```bash + npm install + ``` + +3. **Run a specific module** + ```bash + node src//.js + ``` + + Replace `` and `` with the appropriate path. For example: + ```bash + node src/fs/files/create.js + ``` + +--- + +## ๐Ÿงช Testing + +Some modules include test scripts or validation logic. To run tests (if available): + +```bash +npm test +``` + +> โœ… Ensure Node.js v18+ is installed to support modern APIs like `stream/promises` and `worker_threads`. + +--- + +## ๐Ÿงต Section 8: Horizontal Scaling (Coming Soon) + +The `review` branch is being actively updated to include: +- A custom load balancer using the **Cluster API** +- Round-robin request distribution +- Graceful worker lifecycle management + +Once implemented, this logic will live in a dedicated file (e.g., `src/cluster/multi.ts`) and be documented here. + +--- + +## ๐Ÿง  Learning Outcomes + +By completing this project, youโ€™ll gain hands-on experience with: + +- Node.js runtime and architecture +- CLI tool development +- File system and stream manipulation +- Cryptographic hashing +- Child processes and worker threads +- Compression and decompression +- Modular code organization +- Cluster-based horizontal scaling + +--- + +## ๐Ÿค Contributing + +This branch is maintained for review and educational purposes. Contributions are welcome via: + +- Pull requests +- Issue reports +- Suggestions for clarity or structure + +--- + +## ๐Ÿ“„ License + +This project inherits the license from the original repository. See [LICENSE](https://github.com/AlreadyBored/node-nodejs-basics/blob/main/LICENSE) for details. + +--- + +Made with ๐Ÿ’ก by [@toby-28](https://github.com/toby-28) โ€” based on the work of [@AlreadyBored](https://github.com/AlreadyBored) diff --git a/src/cli/args.js b/src/cli/args.js index 9e3622f791..b14e97d799 100644 --- a/src/cli/args.js +++ b/src/cli/args.js @@ -1,5 +1,10 @@ const parseArgs = () => { - // Write your code here + const args = process.argv.slice(2); + for (let i = 0; i < args.length; i += 2) { + const key = args[i].replace(/^--/, ""); + const value = args[i + 1]; + console.log(`${key} is ${value}`); + } }; parseArgs(); diff --git a/src/cli/env.js b/src/cli/env.js index e3616dc8e7..819c192fb7 100644 --- a/src/cli/env.js +++ b/src/cli/env.js @@ -1,5 +1,10 @@ const parseEnv = () => { - // Write your code here + const rssVars = Object.entries(process.env) + .filter(([key]) => key.startsWith("RSS_")) + .map(([key, value]) => `${key}=${value}`) + .join("; "); + + console.log(rssVars); }; parseEnv(); diff --git a/src/cp/cp.js b/src/cp/cp.js index 72c6addc9c..6c0cb6835c 100644 --- a/src/cp/cp.js +++ b/src/cp/cp.js @@ -1,6 +1,19 @@ -const spawnChildProcess = async (args) => { - // Write your code here +import { spawn } from 'child_process'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +export const spawnChildProcess = async (args) => { + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); + const scriptPath = path.join(__dirname, 'files', 'script.js'); + + const child = spawn('node', [scriptPath, ...args], { + stdio: ['pipe', 'pipe', 'inherit'] + }); + + process.stdin.pipe(child.stdin); + child.stdout.pipe(process.stdout); }; // Put your arguments in function call to test this functionality -spawnChildProcess( /* [someArgument1, someArgument2, ...] */); +spawnChildProcess(['hello', 'world']); diff --git a/src/fs/copy.js b/src/fs/copy.js index e226075b4c..0025ed0573 100644 --- a/src/fs/copy.js +++ b/src/fs/copy.js @@ -1,5 +1,26 @@ +import { access, cp } from "fs/promises"; +import { constants } from "fs"; +import path from "path"; +import { fileURLToPath } from "url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + const copy = async () => { - // Write your code here + const dir = path.join(__dirname, "files"); + const source = path.join(dir); + const destination = path.join(__dirname, "files_copy"); + + try { + await access(source, constants.F_OK); + await access(destination, constants.F_OK); + throw new Error("FS operation failed"); + } catch (err) { + if (err.code === "ENOENT") { + await cp(source, destination, { recursive: true }); + } else { + throw err; + } + } }; await copy(); diff --git a/src/fs/create.js b/src/fs/create.js index 6ede285599..29f46ec3b6 100644 --- a/src/fs/create.js +++ b/src/fs/create.js @@ -1,5 +1,20 @@ +import { writeFile, access } from "fs/promises"; +import { constants } from "fs"; +import path from "path"; +import { fileURLToPath } from "url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + const create = async () => { - // Write your code here + const dir = path.join(__dirname, "files"); + const filePath = path.join(dir, "fresh.txt"); + + try { + await access(filePath, constants.F_OK); + throw new Error("FS operation failed"); + } catch { + await writeFile(filePath, "I am fresh and young", { flag: "wx" }); + } }; await create(); diff --git a/src/fs/delete.js b/src/fs/delete.js index a70b13766c..b5e8344d09 100644 --- a/src/fs/delete.js +++ b/src/fs/delete.js @@ -1,5 +1,20 @@ +import { unlink, access } from "fs/promises"; +import { constants } from "fs"; +import path from "path"; +import { fileURLToPath } from "url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + const remove = async () => { - // Write your code here + const dir = path.join(__dirname, "files"); + const filePath = path.join(dir, "fileToRemove.txt"); + + try { + await access(filePath, constants.F_OK); + await unlink(filePath); + } catch { + throw new Error("FS operation failed"); + } }; await remove(); diff --git a/src/fs/list.js b/src/fs/list.js index 0c0fa21f7e..f2822d0b28 100644 --- a/src/fs/list.js +++ b/src/fs/list.js @@ -1,5 +1,21 @@ +import { readdir, access } from "fs/promises"; +import { constants } from "fs"; +import path from "path"; +import { fileURLToPath } from "url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + const list = async () => { - // Write your code here + const dir = path.join(__dirname, "files"); + const dirPath = path.join(dir); + + try { + await access(dirPath, constants.F_OK); + const files = await readdir(dirPath); + console.log(files); + } catch { + throw new Error("FS operation failed"); + } }; await list(); diff --git a/src/fs/read.js b/src/fs/read.js index e3938be563..240b086bbe 100644 --- a/src/fs/read.js +++ b/src/fs/read.js @@ -1,5 +1,21 @@ +import { readFile, access } from "fs/promises"; +import { constants } from "fs"; +import path from "path"; +import { fileURLToPath } from "url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + const read = async () => { - // Write your code here + const dir = path.join(__dirname, "files"); + const filePath = path.join(dir, "fileToRead.txt"); + + try { + await access(filePath, constants.F_OK); + const content = await readFile(filePath, "utf-8"); + console.log(content); + } catch { + throw new Error("FS operation failed"); + } }; await read(); diff --git a/src/fs/rename.js b/src/fs/rename.js index b1d65b0c86..dc5aca88b2 100644 --- a/src/fs/rename.js +++ b/src/fs/rename.js @@ -1,5 +1,26 @@ -const rename = async () => { - // Write your code here +import { rename, access } from "fs/promises"; +import { constants } from "fs"; +import path from "path"; +import { fileURLToPath } from "url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + +const renameFile = async () => { + const dir = path.join(__dirname, "files"); + const oldPath = path.join(dir, "wrongFilename.txt"); + const newPath = path.join(dir, "properFilename.md"); + + try { + await access(oldPath, constants.F_OK); + await access(newPath, constants.F_OK); + throw new Error("FS operation failed"); + } catch (err) { + if (err.code === "ENOENT") { + await rename(oldPath, newPath); + } else { + throw err; + } + } }; -await rename(); +await renameFile(); diff --git a/src/hash/calcHash.js b/src/hash/calcHash.js index e37c17ed62..86f137de12 100644 --- a/src/hash/calcHash.js +++ b/src/hash/calcHash.js @@ -1,5 +1,27 @@ -const calculateHash = async () => { - // Write your code here +import { createHash } from "crypto"; +import { createReadStream } from "fs"; +import path from "path"; +import { fileURLToPath } from "url"; + +const calcHash = async () => { + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); + const filePath = path.join(__dirname, "files", "fileToCalculateHashFor.txt"); + + const hash = createHash("sha256"); + const stream = createReadStream(filePath); + + stream.on("error", () => { + console.error("FS operation failed"); + }); + + stream.on("data", (chunk) => { + hash.update(chunk); + }); + + stream.on("end", () => { + console.log(hash.digest("hex")); + }); }; -await calculateHash(); +await calcHash(); diff --git a/src/modules/cjsToEsm.cjs b/src/modules/cjsToEsm.cjs deleted file mode 100644 index 089bd2db13..0000000000 --- a/src/modules/cjsToEsm.cjs +++ /dev/null @@ -1,34 +0,0 @@ -const path = require('node:path'); -const { release, version } = require('node:os'); -const { createServer: createServerHttp } = require('node:http'); - -require('./files/c.cjs'); - -const random = Math.random(); - -const unknownObject = random > 0.5 ? require('./files/a.json') : require('./files/b.json'); - -console.log(`Release ${release()}`); -console.log(`Version ${version()}`); -console.log(`Path segment separator is "${path.sep}"`); - -console.log(`Path to current file is ${__filename}`); -console.log(`Path to current directory is ${__dirname}`); - -const myServer = createServerHttp((_, res) => { - res.end('Request accepted'); -}); - -const PORT = 3000; - -console.log(unknownObject); - -myServer.listen(PORT, () => { - console.log(`Server is listening on port ${PORT}`); - console.log('To terminate it, use Ctrl+C combination'); -}); - -module.exports = { - unknownObject, - myServer, -}; diff --git a/src/modules/esm.mjs b/src/modules/esm.mjs new file mode 100644 index 0000000000..958a168b01 --- /dev/null +++ b/src/modules/esm.mjs @@ -0,0 +1,50 @@ +import path from "node:path"; +import { release, version } from "node:os"; +import { createServer as createServerHttp } from "node:http"; +import { fileURLToPath, pathToFileURL } from "node:url"; +import { readFile } from "node:fs/promises"; + +// Reconstruct __filename and __dirname for ESM +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Dynamically import CommonJS module +await import(pathToFileURL(path.join(__dirname, "files", "c.cjs")).href); + +// Dynamically import JSON files +const aJson = JSON.parse( + await readFile(path.join(__dirname, "files", "a.json"), "utf-8") +); +const bJson = JSON.parse( + await readFile(path.join(__dirname, "files", "b.json"), "utf-8") +); + +// Randomly select one JSON object +const random = Math.random(); +const unknownObject = random > 0.5 ? aJson : bJson; + +// Log system info +console.log(`Release ${release()}`); +console.log(`Version ${version()}`); +console.log(`Path segment separator is "${path.sep}"`); +console.log(`Path to current file is ${__filename}`); +console.log(`Path to current directory is ${__dirname}`); + +// Create HTTP server +const myServer = createServerHttp((_, res) => { + res.end("Request accepted"); +}); + +const PORT = 3000; + +// Log selected JSON object +console.log(unknownObject); + +// Start server +myServer.listen(PORT, () => { + console.log(`Server is listening on port ${PORT}`); + console.log("To terminate it, use Ctrl+C combination"); +}); + +// Export values +export { unknownObject, myServer }; diff --git a/src/streams/read.js b/src/streams/read.js index e3938be563..e0b1319f98 100644 --- a/src/streams/read.js +++ b/src/streams/read.js @@ -1,5 +1,23 @@ +import { createReadStream } from "fs"; +import path from "path"; +import { fileURLToPath } from "url"; + const read = async () => { - // Write your code here + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); + const filePath = path.join(__dirname, "files", "fileToRead.txt"); + + return new Promise((resolve, reject) => { + const stream = createReadStream(filePath, "utf-8"); + + stream.pipe(process.stdout); + + stream.on("end", resolve); + stream.on("error", () => { + console.error("FS operation failed"); + reject(); + }); + }); }; -await read(); +await read(); \ No newline at end of file diff --git a/src/streams/transform.js b/src/streams/transform.js index 9e6c15fe84..2a57fdc4e5 100644 --- a/src/streams/transform.js +++ b/src/streams/transform.js @@ -1,5 +1,14 @@ +import { Transform } from 'stream'; + const transform = async () => { - // Write your code here + const reverseStream = new Transform({ + transform(chunk, _, callback) { + const reversed = chunk.toString().split('').reverse().join(''); + callback(null, reversed); + } + }); + + process.stdin.pipe(reverseStream).pipe(process.stdout); }; await transform(); diff --git a/src/streams/write.js b/src/streams/write.js index 84aa11e7cb..7fe0b6d62e 100644 --- a/src/streams/write.js +++ b/src/streams/write.js @@ -1,5 +1,18 @@ +import { createWriteStream } from "fs"; +import path from "path"; +import { fileURLToPath } from "url"; + const write = async () => { - // Write your code here + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); + const filePath = path.join(__dirname, "files", "fileToWrite.txt"); + + const stream = createWriteStream(filePath); + process.stdin.pipe(stream); + + stream.on("error", () => { + console.error("FS operation failed"); + }); }; await write(); diff --git a/src/wt/main.js b/src/wt/main.js index e2ef054d41..3d92b209bb 100644 --- a/src/wt/main.js +++ b/src/wt/main.js @@ -1,5 +1,41 @@ -const performCalculations = async () => { - // Write your code here +import { Worker } from 'worker_threads'; +import os from 'os'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +const main = async () => { + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); + const workerPath = path.join(__dirname, 'worker.js'); + + const numCores = os.cpus().length; + const startValue = 10; + + const promises = Array.from({ length: numCores }, (_, i) => { + return new Promise((resolve) => { + const worker = new Worker(workerPath); + const value = startValue + i; + + worker.postMessage(value); + + worker.on('message', (result) => { + resolve({ status: 'resolved', data: result }); + }); + + worker.on('error', () => { + resolve({ status: 'error', data: null }); + }); + + worker.on('exit', (code) => { + if (code !== 0) { + resolve({ status: 'error', data: null }); + } + }); + }); + }); + + const results = await Promise.all(promises); + console.log(results); }; -await performCalculations(); +await main(); diff --git a/src/wt/worker.js b/src/wt/worker.js index 405595394d..4c2b0f8fa4 100644 --- a/src/wt/worker.js +++ b/src/wt/worker.js @@ -1,8 +1,18 @@ +import { parentPort } from 'worker_threads'; + // n should be received from main thread const nthFibonacci = (n) => n < 2 ? n : nthFibonacci(n - 1) + nthFibonacci(n - 2); const sendResult = () => { // This function sends result of nthFibonacci computations to main thread + parentPort.on('message', (n) => { + try { + const result = nthFibonacci(n); + parentPort.postMessage(result); + } catch { + parentPort.postMessage(null); + } + }); }; sendResult(); diff --git a/src/zip/compress.js b/src/zip/compress.js index d55209587e..f37669e6dc 100644 --- a/src/zip/compress.js +++ b/src/zip/compress.js @@ -1,5 +1,23 @@ +import { createReadStream, createWriteStream } from 'fs'; +import { createGzip } from 'zlib'; +import path from 'path'; +import { fileURLToPath } from 'url'; + const compress = async () => { - // Write your code here + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); + + const sourcePath = path.join(__dirname, 'files', 'fileToCompress.txt'); + const destinationPath = path.join(__dirname, 'files', 'archive.gz'); + + const readable = createReadStream(sourcePath); + const writable = createWriteStream(destinationPath); + const gzip = createGzip(); + + readable.pipe(gzip).pipe(writable); + + readable.on('error', () => console.error('FS operation failed')); + writable.on('error', () => console.error('FS operation failed')); }; await compress(); diff --git a/src/zip/decompress.js b/src/zip/decompress.js index 8aaf26c8a4..07b79b3e09 100644 --- a/src/zip/decompress.js +++ b/src/zip/decompress.js @@ -1,5 +1,23 @@ +import { createReadStream, createWriteStream } from 'fs'; +import { createGunzip } from 'zlib'; +import path from 'path'; +import { fileURLToPath } from 'url'; + const decompress = async () => { - // Write your code here + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); + + const sourcePath = path.join(__dirname, 'files', 'archive.gz'); + const destinationPath = path.join(__dirname, 'files', 'fileToCompress.txt'); + + const readable = createReadStream(sourcePath); + const writable = createWriteStream(destinationPath); + const gunzip = createGunzip(); + + readable.pipe(gunzip).pipe(writable); + + readable.on('error', () => console.error('FS operation failed')); + writable.on('error', () => console.error('FS operation failed')); }; await decompress();