ETE Practice Questions
INT222
(Unit I - Handling Data I/O in Node.js):
Create a robust Node.js script for a log management system. Your script must perform the following tasks sequentially:
A. Create a readable stream to read a large text file named 'system.log'.
B. Create a writable stream to a new file named 'archive.log.gz'.
C. Use the Zlib module to compress the data as it flows from the readable stream to the writable stream (piping).
D. Implement a 'finish' event listener to print "Log archiving completed" to the console.
E. Use the 'fs' module to delete the original 'system.log' only after the compression is successfully finished.
const fs = require('fs');
const zlib = require('zlib');
const path = require('path');
const inputFile = path.join(__dirname, 'system.log');
const outputFile = path.join(__dirname, 'archive.log.gz');
// Step A: Create Readable Stream
const readStream = fs.createReadStream(inputFile);
// Step B: Create Writable Stream
const writeStream = fs.createWriteStream(outputFile);
// Step C: Create Gzip object for compression
const gzip = zlib.createGzip();
console.log('Starting compression...');
// Pipe: Read -> Compress -> Write
readStream.pipe(gzip).pipe(writeStream);
// Step D: Handle finish event
writeStream.on('finish', () => {
console.log('Log archiving completed.');
// Step E: Delete original file after success
fs.unlink(inputFile, (err) => {
if (err) {
console.error('Error deleting original file:', err);
} else {
console.log('Original log file deleted successfully.');
}
});
});
// Error handling
readStream.on('error', (err) => console.error('Read Error:', err));
writeStream.on('error', (err) => console.error('Write Error:', err));
(Unit I - Getting Started with Node.js):
Demonstrate the use of EventEmitter and async/await to simulate an order processing system. Create a Node.js script that:
A. Imports the events module and initializes an EventEmitter instance.
B. Defines an async function processOrder(orderId) that simulates a database delay (2 seconds) using setTimeout wrapped in a Promise.
C. Emits an event named 'orderReceived' before processing starts, and 'orderShipped' after processing ends.
D. Registers listeners for these events that log messages to the console with timestamps.
E. Triggers the flow by calling processOrder for three different order IDs.
const EventEmitter = require('events');
// Step A: Initialize EventEmitter
class OrderEmitter extends EventEmitter {}
const myEmitter = new OrderEmitter();
// Step D: Register Listeners
myEmitter.on('orderReceived', (id) => {
console.log(`[${new Date().toISOString()}] Order ${id} received. Starting processing...`);
});
myEmitter.on('orderShipped', (id) => {
console.log(`[${new Date().toISOString()}] Order ${id} has been processed and shipped.`);
});
// Step B: Async function with simulated delay
async function processOrder(orderId) {
// Step C: Emit start event
myEmitter.emit('orderReceived', orderId);
// Simulate 2 second delay
await new Promise((resolve) => setTimeout(resolve, 2000));
// Step C: Emit end event
myEmitter.emit('orderShipped', orderId);
}
// Step E: Trigger flow
async function runOrders() {
await processOrder(101);
await processOrder(102);
await processOrder(103);
}
runOrders();
(Unit II - Implementing HTTP Services):
Build a pure Node.js HTTP server (without Express) to handle basic routing and JSON responses. The server should:
A. Listen on port 3000.
B. Handle a GET request to /api/users that returns a JSON object containing a list of dummy users.
C. Handle a URL containing a query parameter (e.g., /api/users?id=1) to filter the result (manual parsing required).
D. Return a 404 Status Code and a plain text message "Route not found" for any other URL.
E. Ensure correct Content-Type headers are set for both JSON and Text responses.
const http = require('http');
const url = require('url');
const users = [
{ id: 1, name: 'Alice' },
{ id: 2, name: 'Bob' },
{ id: 3, name: 'Charlie' }
];
const server = http.createServer((req, res) => {
// Parse URL and Query parameters
const parsedUrl = url.parse(req.url, true);
const path = parsedUrl.pathname;
const query = parsedUrl.query;
// Step B & C: Handle /api/users with optional filtering
if (path === '/api/users' && req.method === 'GET') {
res.setHeader('Content-Type', 'application/json');
if (query.id) {
const user = users.find(u => u.id == query.id);
if (user) {
res.writeHead(200);
res.end(JSON.stringify(user));
} else {
res.writeHead(404);
res.end(JSON.stringify({ error: 'User not found' }));
}
} else {
res.writeHead(200);
res.end(JSON.stringify(users));
}
}
// Step D: Handle 404
else {
res.setHeader('Content-Type', 'text/plain');
res.writeHead(404);
res.end('Route not found');
}
});
// Step A: Listen on port 3000
server.listen(3000, () => {
console.log('Server running on port 3000');
});
(Unit II - Basic Websites With Node.JS):
Create an Express.js application that handles form submissions securely using body-parser and express-validator.
A. Set up an Express server running on port 5000.
B. Middleware: Use body-parser to parse JSON bodies.
C. Create a POST route /register.
D. Inside the route, use express-validator to ensure the 'email' field is a valid email and 'password' is at least 6 characters long.
E. If validation fails, return a 400 status with errors. If it passes, return a 200 status with the message "User registered".
const express = require('express');
const bodyParser = require('body-parser');
const { body, validationResult } = require('express-validator');
const app = express();
// Step B: Middleware
app.use(bodyParser.json());
// Step C: POST Route
app.post('/register', [
// Step D: Validation Rules
body('email').isEmail().withMessage('Must be a valid email'),
body('password').isLength({ min: 6 }).withMessage('Password must be at least 6 chars long')
], (req, res) => {
// Step E: Check validation results
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
// Success scenario
res.status(200).json({ message: 'User registered successfully', data: req.body });
});
// Step A: Server setup
app.listen(5000, () => {
console.log('Server running on port 5000');
});
(Unit II - Basic Websites With Node.JS):
Implement a modular routing system using express.Router. Create a file structure concept where you have a main app and a separate router file for 'products'.
A. In the main app file, import Express and your custom router.
B. In the router file, define three routes: GET / (list all products), POST / (add product), and GET /:id (get product by ID).
C. Mount the router in the main app at the path /api/products.
D. Ensure the POST route accepts data using body-parser.
E. Write the code for both files (assuming they are in the same directory).
// --- products.js (The Router File) ---
const express = require('express');
const router = express.Router();
// Step B: Define routes
// GET / (Matches /api/products/)
router.get('/', (req, res) => {
res.send('List of all products');
});
// GET /:id (Matches /api/products/:id)
router.get('/:id', (req, res) => {
res.send(`Details for product ID: ${req.params.id}`);
});
// POST / (Matches /api/products/)
router.post('/', (req, res) => {
res.send(`Product added: ${req.body.name}`);
});
module.exports = router;
// --- app.js (The Main App File) ---
const express = require('express');
const bodyParser = require('body-parser');
const productRouter = require('./products'); // Step A: Import Router
const app = express();
// Step D: Body Parser
app.use(bodyParser.json());
// Step C: Mount Router
app.use('/api/products', productRouter);
app.listen(3000, () => {
console.log('Server running on port 3000');
});
(Unit III - Socket Services in Node.js):
Create a basic real-time chat server using Socket.IO. Your solution must include the server-side code to handle the following logic:
A. Initialize an HTTP server and upgrade it with Socket.IO.
B. Listen for a 'connection' event and log "A user connected".
C. Listen for a custom event 'chatMessage' sent by a client.
D. Broadcast the received message to ALL connected clients (including the sender) using io.emit.
E. Handle the 'disconnect' event to log "A user disconnected".
const express = require('express');
const http = require('http');
const { Server } = require('socket.io');
const app = express();
const server = http.createServer(app);
// Step A: Initialize Socket.IO
const io = new Server(server);
io.on('connection', (socket) => {
// Step B: Connection log
console.log('A user connected:', socket.id);
// Step C: Listen for 'chatMessage'
socket.on('chatMessage', (msg) => {
console.log('Message received:', msg);
// Step D: Broadcast to all clients
io.emit('message', msg);
});
// Step E: Disconnect log
socket.on('disconnect', () => {
console.log('A user disconnected');
});
});
server.listen(3000, () => {
console.log('Socket.IO server running on port 3000');
});
(Unit III - Creating Middlewares):
Develop an Express application that implements session management and custom middleware.
A. Install and setup express-session.
B. Create a custom middleware function named checkAuth that checks if req.session.isLoggedIn is true. If not, respond with 403 Forbidden.
C. Create a route /login that sets req.session.isLoggedIn = true and req.session.user = 'Admin'.
D. Create a route /dashboard that uses the checkAuth middleware. If accessed, return "Welcome to Dashboard".
E. Create a route /logout that destroys the session.
const express = require('express');
const session = require('express-session');
const app = express();
// Step A: Setup Session
app.use(session({
secret: 'mySecretKey',
resave: false,
saveUninitialized: true,
cookie: { secure: false } // Set true if using HTTPS
}));
// Step B: Custom Middleware
const checkAuth = (req, res, next) => {
if (req.session.isLoggedIn) {
next();
} else {
res.status(403).send('Forbidden: Please login first');
}
};
// Step C: Login Route
app.get('/login', (req, res) => {
req.session.isLoggedIn = true;
req.session.user = 'Admin';
res.send('Logged in successfully');
});
// Step D: Protected Route
app.get('/dashboard', checkAuth, (req, res) => {
res.send(`Welcome to Dashboard, ${req.session.user}`);
});
// Step E: Logout Route
app.get('/logout', (req, res) => {
req.session.destroy((err) => {
if (err) return res.status(500).send('Error logging out');
res.send('Logged out');
});
});
app.listen(3000, () => console.log('Server started on port 3000'));
(Unit I - Handling Data I/O in Node.js):
Write a Node.js script that uses the fs module to perform a series of synchronous and asynchronous file operations.
A. Check if a directory named 'data' exists. If not, create it synchronously.
B. Asynchronously write a JSON object { "status": "active" } to data/config.json.
C. Upon successful write, read the file back asynchronously.
D. Parse the JSON content and log the 'status' property to the console.
E. Handle any errors that might occur during the read/write process.
const fs = require('fs');
const path = require('path');
const dirPath = path.join(__dirname, 'data');
const filePath = path.join(dirPath, 'config.json');
const dataObj = { status: "active" };
// Step A: Check and create directory synchronously
if (!fs.existsSync(dirPath)) {
try {
fs.mkdirSync(dirPath);
console.log('Directory created.');
} catch (err) {
console.error('Error creating directory:', err);
}
}
// Step B: Write file asynchronously
fs.writeFile(filePath, JSON.stringify(dataObj), (err) => {
if (err) {
return console.error('Write Error:', err);
}
console.log('File written successfully.');
// Step C: Read file asynchronously
fs.readFile(filePath, 'utf8', (readErr, content) => {
if (readErr) {
return console.error('Read Error:', readErr);
}
// Step D: Parse and Log
try {
const parsedData = JSON.parse(content);
console.log('Status property:', parsedData.status);
} catch (parseErr) {
console.error('JSON Parse Error:', parseErr);
}
});
});
(Unit IV - Getting Started with MongoDB):
Using the native MongoDB driver (not Mongoose), write a Node.js script to perform CRUD operations.
A. Connect to a local MongoDB instance at mongodb://localhost:27017.
B. Select the database inventoryDB and collection items.
C. Insert a new document: { name: "Laptop", qty: 10 }.
D. Find the document where name is "Laptop" and log it.
E. Update that document to set qty to 20.
F. Close the connection when done.
const { MongoClient } = require('mongodb');
const url = 'mongodb://localhost:27017';
const client = new MongoClient(url);
const dbName = 'inventoryDB';
async function main() {
try {
// Step A: Connect
await client.connect();
console.log('Connected successfully to server');
const db = client.db(dbName);
const collection = db.collection('items');
// Step C: Insert
const insertResult = await collection.insertOne({ name: "Laptop", qty: 10 });
console.log('Inserted document:', insertResult.insertedId);
// Step D: Find
const foundDoc = await collection.findOne({ name: "Laptop" });
console.log('Found document:', foundDoc);
// Step E: Update
const updateResult = await collection.updateOne(
{ name: "Laptop" },
{ $set: { qty: 20 } }
);
console.log('Updated count:', updateResult.modifiedCount);
} catch (err) {
console.error(err);
} finally {
// Step F: Close
await client.close();
}
}
main();
(Unit IV - Introduction to Mongoose):
Create a Node.js script using Mongoose to define a schema and perform database operations.
A. Connect to MongoDB using Mongoose.
B. Define a Schema for a Student with fields: name (String, required), age (Number), and major (String).
C. Create a Model from the schema.
D. Create an async function that creates a new student named "John Doe" with age 22.
E. Within the same function, find the student by name and log the result.
const mongoose = require('mongoose');
// Step A: Connect
mongoose.connect('mongodb://localhost:27017/universityDB')
.then(() => console.log('Connected to MongoDB'))
.catch(err => console.error('Connection error:', err));
// Step B: Define Schema
const studentSchema = new mongoose.Schema({
name: { type: String, required: true },
age: Number,
major: String
});
// Step C: Create Model
const Student = mongoose.model('Student', studentSchema);
// Step D & E: CRUD Operations
async function runDBOperations() {
try {
// Create
const newStudent = new Student({ name: 'John Doe', age: 22, major: 'CS' });
await newStudent.save();
console.log('Student Saved');
// Find
const foundStudent = await Student.findOne({ name: 'John Doe' });
console.log('Found Student:', foundStudent);
} catch (err) {
console.error(err);
} finally {
mongoose.connection.close();
}
}
runDBOperations();
(Unit V - Introduction to PostgreSQL):
Write a Node.js script using the pg (node-postgres) library to interact with a PostgreSQL database.
A. Configure a Client with connection details (user, host, database, password, port).
B. Connect to the database.
C. Execute a SQL query to CREATE a table named courses (columns: id SERIAL PRIMARY KEY, title TEXT, credits INT) if it doesn't exist.
D. Execute a SQL query to INSERT a record: title='Web Dev', credits=3.
E. Execute a SQL query to SELECT all rows from courses and log the output.
const { Client } = require('pg');
// Step A: Configuration
const client = new Client({
user: 'postgres',
host: 'localhost',
database: 'university',
password: 'password123',
port: 5432,
});
async function runPostgres() {
try {
// Step B: Connect
await client.connect();
console.log('Connected to PostgreSQL');
// Step C: Create Table
const createTableQuery = `
CREATE TABLE IF NOT EXISTS courses (
id SERIAL PRIMARY KEY,
title TEXT,
credits INT
);
`;
await client.query(createTableQuery);
console.log('Table created or already exists');
// Step D: Insert
const insertQuery = 'INSERT INTO courses(title, credits) VALUES($1, $2) RETURNING *';
const insertRes = await client.query(insertQuery, ['Web Dev', 3]);
console.log('Inserted:', insertRes.rows[0]);
// Step E: Select
const selectRes = await client.query('SELECT * FROM courses');
console.log('All Courses:', selectRes.rows);
} catch (err) {
console.error('Error:', err);
} finally {
await client.end();
}
}
runPostgres();
(Unit III - Socket Services in Node.js):
Implement a Socket.IO feature specifically for 'Rooms'.
A. Server: Allow a client to emit a 'joinRoom' event with a room name. Join the socket to that room.
B. Server: Listen for 'roomMessage' (containing roomName and message). Broadcast this message ONLY to sockets in that specific room using .to(roomName).emit(...).
C. Client (Simulated code snippet): Show how a client would emit 'joinRoom' for room "TechSupport" and then send a message to it.
D. Ensure you handle the case where the user hasn't joined the room yet (basic error logging).
// --- SERVER SIDE ---
const io = require('socket.io')(3000);
io.on('connection', (socket) => {
console.log('User connected:', socket.id);
// Step A: Join Room
socket.on('joinRoom', (roomName) => {
socket.join(roomName);
console.log(`Socket ${socket.id} joined room ${roomName}`);
socket.emit('notification', `You joined ${roomName}`);
});
// Step B: Room Message
socket.on('roomMessage', ({ roomName, message }) => {
if (socket.rooms.has(roomName)) {
// Send to everyone in room EXCEPT sender (or use io.to for everyone)
socket.to(roomName).emit('message', message);
console.log(`Message sent to ${roomName}: ${message}`);
} else {
// Step D: Error handling
console.log('User tried to send message to a room they are not in.');
socket.emit('error', 'You must join the room first.');
}
});
});
// --- CLIENT SIDE SNIPPET (Conceptual) ---
/*
const socket = io('http://localhost:3000');
// Step C: Join and Send
socket.emit('joinRoom', 'TechSupport');
setTimeout(() => {
socket.emit('roomMessage', {
roomName: 'TechSupport',
message: 'Hello, I need help!'
});
}, 1000);
*/
(Unit VI - Testing and Deployment):
Create a Node.js script to perform a basic Integration Test on a REST API endpoint.
A. Assume an API exists at http://localhost:3000/api/status.
B. Use the native fetch API (available in Node 18+) or http module to send a GET request to this endpoint.
C. Check if the response status code is 200.
D. Check if the response body contains {"status":"ok"}.
E. Log "Test Passed" or "Test Failed" based on the results.
// Note: This requires Node.js v18+ for native fetch, or a polyfill
const API_URL = 'http://localhost:3000/api/status';
async function testApi() {
console.log(`Running test against ${API_URL}...`);
try {
// Step B: Send Request
const response = await fetch(API_URL);
// Step C: Check Status
if (response.status !== 200) {
throw new Error(`Expected status 200, got ${response.status}`);
}
// Step D: Check Body
const data = await response.json();
if (data.status === 'ok') {
// Step E: Success
console.log('Test Passed: API is healthy.');
} else {
throw new Error(`Expected {status: "ok"}, got ${JSON.stringify(data)}`);
}
} catch (error) {
// Step E: Failure
console.error('Test Failed:', error.message);
process.exit(1);
}
}
testApi();
(Unit II & IV - Express + Mongoose):
Create a complete Express route that performs an Update operation (CRUD) using Mongoose.
A. Assume a Mongoose model Product is already imported.
B. Create a PUT route /products/:id.
C. Use req.params.id to identify the document.
D. Use Product.findByIdAndUpdate to update the document with data from req.body.
E. Ensure the new: true option is used so the response contains the updated document, and handle errors (e.g., ID not found).
const express = require('express');
const router = express.Router();
// Assuming Product model is defined in models/Product.js
const Product = require('./models/Product');
// Step B: PUT Route
router.put('/products/:id', async (req, res) => {
const productId = req.params.id;
const updateData = req.body;
try {
// Step C & D: Update Operation
const updatedProduct = await Product.findByIdAndUpdate(
productId,
updateData,
{ new: true, runValidators: true } // Step E: Options
);
if (!updatedProduct) {
return res.status(404).json({ error: 'Product not found' });
}
res.status(200).json(updatedProduct);
} catch (err) {
res.status(500).json({ error: err.message });
}
});
module.exports = router;
(Unit I - Handling Data I/O in Node.js):
Demonstrate the use of the fs module to watch a directory for changes.
A. Create a script that uses fs.watch on a folder named 'uploads'.
B. When a file is renamed or added (eventType 'rename'), log the filename.
C. Only if the filename ends with '.txt', read the file content asynchronously.
D. Print the content to the console.
E. Ensure the script handles the case where the file might be deleted immediately (error handling on read).
const fs = require('fs');
const path = require('path');
const watchDir = path.join(__dirname, 'uploads');
// Ensure directory exists for demo
if (!fs.existsSync(watchDir)) fs.mkdirSync(watchDir);
console.log(`Watching for changes in ${watchDir}...`);
// Step A: Watch
fs.watch(watchDir, (eventType, filename) => {
if (filename) {
// Step B: Log change
console.log(`Event: ${eventType}, File: ${filename}`);
// Step C: Filter for .txt
if (path.extname(filename) === '.txt') {
const fullPath = path.join(watchDir, filename);
// Check if file still exists (handling deletion)
fs.access(fullPath, fs.constants.F_OK, (err) => {
if (!err) {
// Read content
fs.readFile(fullPath, 'utf8', (readErr, data) => {
if (readErr) console.error('Error reading file:', readErr);
else console.log('File Content:', data); // Step D
});
}
});
}
}
});
(Unit III - Creating Middlewares):
Implement a global error-handling middleware in Express.
A. Create a basic Express server with one route /error that deliberately throws a new Error("Something went wrong").
B. Create an application-level middleware using app.use defined after your routes.
C. The middleware function must accept 4 arguments: (err, req, res, next).
D. Inside the middleware, log the error stack.
E. Send a JSON response with status 500 and the error message.
const express = require('express');
const app = express();
// Step A: Route that throws error
app.get('/error', (req, res, next) => {
try {
throw new Error('Something went wrong intentionally!');
} catch (err) {
next(err); // Pass to error handler
}
});
// Normal route
app.get('/', (req, res) => res.send('Hello World'));
// Step B & C: Error Handling Middleware (Must be last)
app.use((err, req, res, next) => {
// Step D: Log Stack
console.error('Error Stack:', err.stack);
// Step E: Send Response
res.status(500).json({
status: 'error',
message: err.message
});
});
app.listen(3000, () => console.log('Server running on port 3000'));
(Unit V - Introduction to PostgreSQL):
Create a transactional operation script using pg (PostgreSQL) and async/await.
A. Connect to the database.
B. Start a transaction using BEGIN.
C. Insert a user into a users table.
D. Insert a profile into a profiles table using the ID returned from the user insertion.
E. If both succeed, COMMIT. If any error occurs, ROLLBACK the transaction and log the error.
const { Client } = require('pg');
const client = new Client({ /* config */ });
async function createProfileTransaction(username, bio) {
try {
await client.connect();
// Step B: Start Transaction
await client.query('BEGIN');
// Step C: Insert User
const userRes = await client.query(
'INSERT INTO users(username) VALUES($1) RETURNING id',
[username]
);
const userId = userRes.rows[0].id;
// Step D: Insert Profile using userId
await client.query(
'INSERT INTO profiles(user_id, bio) VALUES($1, $2)',
[userId, bio]
);
// Step E: Commit
await client.query('COMMIT');
console.log('Transaction Completed Successfully');
} catch (e) {
// Step E: Rollback on error
await client.query('ROLLBACK');
console.error('Transaction Failed, Rolled back.', e);
} finally {
await client.end();
}
}
createProfileTransaction('janedoe', 'Software Developer');
(Unit II & III - HTTP & Middleware):
Create an Express application that parses Cookies.
A. Install and use cookie-parser middleware.
B. Create a route /set-cookie that sets a cookie named 'user_prefs' with value 'dark_mode' that expires in 1 hour.
C. Create a route /get-cookie that reads the 'user_prefs' cookie.
D. If the cookie exists, return "Theme: [value]". If not, return "No preferences set".
const express = require('express');
const cookieParser = require('cookie-parser');
const app = express();
// Step A: Use Middleware
app.use(cookieParser());
// Step B: Set Cookie
app.get('/set-cookie', (req, res) => {
// 3600000 ms = 1 hour
res.cookie('user_prefs', 'dark_mode', { maxAge: 3600000, httpOnly: true });
res.send('Cookie has been set');
});
// Step C: Get Cookie
app.get('/get-cookie', (req, res) => {
const prefs = req.cookies.user_prefs;
// Step D: Conditional Return
if (prefs) {
res.send(`Theme: ${prefs}`);
} else {
res.send('No preferences set');
}
});
app.listen(3000);
(Unit VI - Testing and Deployment):
Simulate a deployment configuration check script.
A. Create a script that checks if NODE_ENV environment variable is set to 'production'.
B. If not, log a warning.
C. Check if a PORT environment variable is set; if not, default to 8080.
D. Print the final configuration object that would be used to start the server.
E. Explain in a comment (within the code) why checking NODE_ENV is important for Express apps (hint: performance/error messages).
// Step A: Access Environment Variables
const env = process.env.NODE_ENV;
// Step C: Default Port Logic
const port = process.env.PORT || 8080;
console.log('--- Deployment Config Check ---');
// Step B: Production Check
if (env !== 'production') {
console.warn('WARNING: NODE_ENV is not set to "production". Application is running in development mode.');
} else {
console.log('Success: Running in Production mode.');
}
// Step D: Print Config
const config = {
environment: env || 'development',
port: port,
databaseUrl: process.env.DATABASE_URL || 'localhost'
};
console.log('Final Configuration:', config);
/*
Step E Explanation:
Checking NODE_ENV is crucial because Express acts differently in 'production'.
In production, view caching is enabled, and error messages sent to the client
are less verbose (stack traces are hidden) for security and performance.
*/
(Unit I - Stream Module):
Implement a file copy mechanism using only Streams (no fs.copyFile allowed) to handle large files efficiently.
A. Define a function copyLargeFile(source, destination).
B. Inside, create a Read Stream from source.
C. Create a Write Stream to destination.
D. Pipe the read stream to the write stream.
E. Listen for the 'data' event on the read stream to calculate and log the total bytes processed so far (progress indicator).
const fs = require('fs');
const path = require('path');
function copyLargeFile(source, destination) {
// Step B & C: Create Streams
const readStream = fs.createReadStream(source);
const writeStream = fs.createWriteStream(destination);
let totalBytes = 0;
// Step E: Progress Indicator
readStream.on('data', (chunk) => {
totalBytes += chunk.length;
console.log(`Processed ${totalBytes} bytes...`);
});
readStream.on('end', () => {
console.log('File copy finished successfully.');
});
readStream.on('error', (err) => {
console.error('Error reading file:', err);
});
// Step D: Pipe
readStream.pipe(writeStream);
}
// Usage Example
const src = path.join(__dirname, 'largefile.txt');
const dest = path.join(__dirname, 'copy_largefile.txt');
// Create a dummy file first to make the script testable
fs.writeFileSync(src, 'A'.repeat(100000));
copyLargeFile(src, dest);
(Unit I - Getting Started with Node.js):
Create a Node.js CLI (Command Line Interface) script that interacts with the process object.
A. Accept a user argument from the command line (e.g., node script.js --user=Alice).
B. Parse this argument to extract the name.
C. Check if the environment variable GREETING_TYPE is set. If 'formal', log "Good day, [Name]". If 'casual', log "Hey, [Name]". Default to 'Hello' if unset.
D. Measure the execution time of the script using console.time and console.timeEnd.
E. Exit the process with code 0 for success or 1 if no name argument was provided.
// Run with: GREETING_TYPE=formal node script.js --user=Alice
console.time('ExecutionTime');
const args = process.argv.slice(2);
const userArg = args.find(arg => arg.startsWith('--user='));
if (!userArg) {
console.error('Error: Please provide a user argument like --user=Name');
process.exit(1);
}
// Step B: Parse argument
const name = userArg.split('=')[1];
// Step C: Check Environment Variable
const greetingType = process.env.GREETING_TYPE;
let greeting = 'Hello';
if (greetingType === 'formal') {
greeting = 'Good day';
} else if (greetingType === 'casual') {
greeting = 'Hey';
}
console.log(`${greeting}, ${name}!`);
// Step D: End Timer
console.timeEnd('ExecutionTime');
// Step E: Exit Success
process.exit(0);
(Unit I - Handling Data I/O in Node.js):
Write a script using the path and fs modules to organize a directory of mixed files.
A. Assume a folder downloads exists containing .txt, .json, and .jpg files.
B. Create subdirectories text, data, and images inside downloads if they don't exist.
C. Read the directory contents.
D. Iterate through the files, detect the extension using path.extname(), and move (rename) the file into the corresponding subdirectory.
E. Log each move operation.
const fs = require('fs');
const path = require('path');
const baseDir = path.join(__dirname, 'downloads');
// Ensure base dir exists for demo purposes
if (!fs.existsSync(baseDir)) fs.mkdirSync(baseDir);
// Step B: Create subdirectories
['text', 'data', 'images'].forEach(sub => {
const subPath = path.join(baseDir, sub);
if (!fs.existsSync(subPath)) fs.mkdirSync(subPath);
});
// Step C: Read directory
fs.readdir(baseDir, (err, files) => {
if (err) throw err;
files.forEach(file => {
const oldPath = path.join(baseDir, file);
// Skip directories
if (fs.lstatSync(oldPath).isDirectory()) return;
const ext = path.extname(file).toLowerCase();
let folder = '';
// Step D: Detect and assign folder
if (ext === '.txt') folder = 'text';
else if (ext === '.json') folder = 'data';
else if (ext === '.jpg') folder = 'images';
if (folder) {
const newPath = path.join(baseDir, folder, file);
// Step D: Move file
fs.rename(oldPath, newPath, (err) => {
if (err) console.error(`Failed to move ${file}`);
else console.log(`Moved ${file} to ${folder}/`);
});
}
});
});
(Unit II - Implementing HTTP Services):
Implement a Node.js HTTP server that serves a static HTML file.
A. Create an index.html file programmatically (using fs) with basic content "<h1>Hello Node</h1>".
B. Create an HTTP server listening on port 8000.
C. When a request receives /, read the HTML file.
D. Serve the file with a status code of 200 and Content-Type text/html.
E. Handle the error case where the file might be missing by returning a 500 status.
const http = require('http');
const fs = require('fs');
const path = require('path');
const filePath = path.join(__dirname, 'index.html');
// Step A: Create dummy HTML
fs.writeFileSync(filePath, '<h1>Hello Node</h1>');
const server = http.createServer((req, res) => {
if (req.url === '/') {
// Step C: Read File
fs.readFile(filePath, (err, content) => {
if (err) {
// Step E: Error Handling
res.writeHead(500, { 'Content-Type': 'text/plain' });
res.end('Server Error: File not found');
} else {
// Step D: Serve HTML
res.writeHead(200, { 'Content-Type': 'text/html' });
res.end(content);
}
});
} else {
res.writeHead(404);
res.end('Not Found');
}
});
// Step B: Listen
server.listen(8000, () => {
console.log('Server running on port 8000');
});
(Unit II - Basic Websites With Node.JS):
Demonstrate the use of app.all() and URL parameters in Express.
A. Create an Express route using app.all('/secret', ...).
B. Inside this route, log the HTTP method used (GET, POST, PUT, etc.).
C. If the method is GET, send "Access Granted". For any other method, send "Method Not Allowed" with status 405.
D. Create another route with a regex path to match any URL starting with /ab and ending with cd (e.g., /abcd, /ab123cd).
E. Respond with the captured path.
const express = require('express');
const app = express();
// Step A: app.all()
app.all('/secret', (req, res) => {
// Step B: Log Method
console.log(`Request Method: ${req.method}`);
// Step C: Conditional Logic
if (req.method === 'GET') {
res.send('Access Granted');
} else {
res.status(405).send('Method Not Allowed');
}
});
// Step D: Regex Routing
// Matches /abcd, /abRANDOMcd, etc.
app.get(/\/ab.*cd$/, (req, res) => {
// Step E: Respond with path
res.send(`Matched Regex Route: ${req.path}`);
});
app.listen(3000, () => console.log('Server on 3000'));
(Unit III - Creating Middlewares):
Implement cookie-session (as specified in the syllabus) for a shopping cart feature.
A. Configure cookie-session with a secret key and a 24-hour expiry.
B. Create a route /add-to-cart/:item.
C. Inside the route, check if req.session.cart exists. If not, initialize it as an empty array.
D. Push the item from the URL parameter into the cart array.
E. Create a route /my-cart that displays the JSON contents of the cart.
const express = require('express');
const cookieSession = require('cookie-session');
const app = express();
// Step A: Configure cookie-session
app.use(cookieSession({
name: 'session',
keys: ['key1', 'key2'],
maxAge: 24 * 60 * 60 * 1000 // 24 hours
}));
// Step B: Add to Cart
app.get('/add-to-cart/:item', (req, res) => {
const item = req.params.item;
// Step C: Initialize if needed
if (!req.session.cart) {
req.session.cart = [];
}
// Step D: Update Session
req.session.cart.push(item);
res.send(`Added ${item} to cart.`);
});
// Step E: View Cart
app.get('/my-cart', (req, res) => {
const cart = req.session.cart || [];
res.json({ cartItems: cart });
});
app.listen(3000, () => console.log('Shopping Cart App Running'));
(Unit IV - Getting Started with MongoDB):
Write a Node.js script to perform database management operations (Drop and Create) using the native MongoDB driver.
A. Connect to MongoDB.
B. Check if a collection named logs exists.
C. If it exists, drop the logs collection and log "Collection Dropped".
D. Create a new collection logs.
E. Insert a default document { created: new Date(), msg: "Init" }.
const { MongoClient } = require('mongodb');
const url = 'mongodb://localhost:27017';
const client = new MongoClient(url);
const dbName = 'adminTools';
async function manageDB() {
try {
await client.connect();
const db = client.db(dbName);
// Step B: Check collections
const collections = await db.listCollections({ name: 'logs' }).toArray();
// Step C: Drop if exists
if (collections.length > 0) {
await db.collection('logs').drop();
console.log('Collection Dropped');
}
// Step D: Create Collection (explicitly or via insert)
// Creating explicitly to match requirement
await db.createCollection('logs');
console.log('Collection Created');
// Step E: Insert Default
await db.collection('logs').insertOne({
created: new Date(),
msg: 'Init'
});
console.log('Default log inserted');
} catch (err) {
console.error(err);
} finally {
await client.close();
}
}
manageDB();
(Unit IV - Introduction to Mongoose):
Implement Schema Custom Validators in Mongoose.
A. Define a User schema.
B. Add a field username (String).
C. Add a field website (String). Implement a custom validator for website that returns false if the string does not start with "http".
D. Add a custom error message "Website must start with http".
E. Try to save a user with an invalid website and catch/log the specific validation error.
const mongoose = require('mongoose');
mongoose.connect('mongodb://localhost:27017/validationDemo');
// Step A: Schema
const userSchema = new mongoose.Schema({
username: String,
website: {
type: String,
// Step C & D: Custom Validator
validate: {
validator: function(v) {
return v.startsWith('http');
},
message: props => `${props.value} is not a valid website URL! Website must start with http`
}
}
});
const User = mongoose.model('User', userSchema);
async function testValidation() {
const user = new User({
username: 'Dave',
website: 'www.dave.com' // Invalid: missing http
});
try {
// Step E: Save and Catch
await user.save();
} catch (err) {
console.error('Validation Error Caught:');
console.error(err.message);
} finally {
mongoose.connection.close();
}
}
testValidation();
(Unit V - Introduction to PostgreSQL):
Write a Node.js script using pg to perform a bulk insert and a parameterized query search.
A. Connect to PostgreSQL.
B. Prepare an array of arrays representing 3 students: [['Alice', 90], ['Bob', 85], ['Charlie', 92]].
C. Use a loop or a dynamic query string to INSERT all students into a grades table (name, score) in one go (or sequential awaits).
D. Perform a SELECT query using a parameter ($1) to find students with a score greater than 88.
E. Log the names of the high-scoring students.
const { Client } = require('pg');
const client = new Client({ user: 'postgres', host: 'localhost', database: 'school', port: 5432 });
async function bulkOperations() {
try {
await client.connect();
// Setup table
await client.query('CREATE TABLE IF NOT EXISTS grades (id SERIAL, name TEXT, score INT)');
// Step B: Data
const students = [['Alice', 90], ['Bob', 85], ['Charlie', 92]];
// Step C: Insert (Sequential for simplicity in Node, though bulk SQL is better)
for (const student of students) {
await client.query('INSERT INTO grades(name, score) VALUES($1, $2)', student);
}
console.log('Bulk insert complete.');
// Step D: Parameterized Search
const cutoff = 88;
const res = await client.query('SELECT name FROM grades WHERE score > $1', [cutoff]);
// Step E: Log Results
console.log(`Students with score > ${cutoff}:`);
res.rows.forEach(row => console.log(row.name));
} catch (err) {
console.error(err);
} finally {
await client.end();
}
}
bulkOperations();
(Unit VI - Testing and Deployment):
Create a unit test script using Node.js's built-in assert module.
A. Define a function add(a, b) that returns a + b.
B. Define a function fetchData() that returns a Promise resolving to "data".
C. Write a test case using assert.strictEqual to verify add(2, 3) equals 5.
D. Write an async test case to verify fetchData() resolves to "data", using assert.match or strict equality.
E. Wrap these in a try-catch block to print "All tests passed" or the specific error.
const assert = require('assert');
// Step A: Function to test
function add(a, b) {
return a + b;
}
// Step B: Async function
async function fetchData() {
return "data";
}
async function runTests() {
try {
console.log('Running tests...');
// Step C: Sync Test
const result1 = add(2, 3);
assert.strictEqual(result1, 5, 'Add(2,3) should be 5');
// Step D: Async Test
const result2 = await fetchData();
assert.strictEqual(result2, 'data', 'fetchData should return "data"');
// Success
console.log('All tests passed successfully.');
} catch (err) {
// Step E: Failure
console.error('Test Failed!');
console.error(err.message);
process.exit(1);
}
}
runTests();
(Unit II - Basic Websites With Node.JS):
Create a robust Express setup using express-validator specifically for sanitization.
A. Create a POST route /comment.
B. Use body('comment') to select the input.
C. Chain sanitizers: .trim() to remove whitespace and .escape() to convert HTML characters (prevent XSS).
D. Inside the route handler, retrieve the sanitized value.
E. Return the sanitized comment back to the user to demonstrate the change.
const express = require('express');
const bodyParser = require('body-parser');
const { body, validationResult } = require('express-validator');
const app = express();
app.use(bodyParser.json());
// Step A: Route
app.post('/comment', [
// Step B & C: Sanitization
body('comment')
.trim()
.escape()
], (req, res) => {
// Step D: Check for validation errors (optional but good practice)
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
// Step E: Access sanitized data
// Note: express-validator puts sanitized data back in req.body or check via matchedData
const sanitizedComment = req.body.comment;
res.json({
original: "(Not shown securely)",
sanitized: sanitizedComment,
message: "Script tags should be escaped now."
});
});
app.listen(3000, () => console.log('Sanitizer running'));
(Unit III - Creating Middlewares):
Create a custom "Maintenance Mode" middleware.
A. Define a variable isMaintenanceMode = true.
B. Create a middleware function that checks this variable.
C. If true, send a 503 Service Unavailable status with a JSON message "Site under maintenance".
D. If false, allow the request to proceed to the next handler.
E. Create a test route /status that returns "System Operational" and apply the middleware globally.
const express = require('express');
const app = express();
// Step A: Config
let isMaintenanceMode = true;
// Step B: Middleware Definition
const maintenanceMiddleware = (req, res, next) => {
// Step C: Check Mode
if (isMaintenanceMode) {
res.status(503).json({
error: 'Service Unavailable',
message: 'Site under maintenance. Please try again later.'
});
} else {
// Step D: Proceed
next();
}
};
// Apply globally
app.use(maintenanceMiddleware);
// Step E: Test Route
app.get('/status', (req, res) => {
res.send('System Operational');
});
app.listen(3000, () => {
console.log('Server running. Try accessing /status');
});
(Unit I - Node.js Modules):
Create a custom local module and use it.
A. Create a file mathUtils.js.
B. In mathUtils.js, export an object containing two functions: square(n) and random(min, max).
C. Create a main file app.js.
D. Import the local module.
E. Use the imported functions to generate a random number between 1 and 10, square it, and log the result.
// --- mathUtils.js ---
const square = (n) => n * n;
const random = (min, max) => {
return Math.floor(Math.random() * (max - min + 1)) + min;
};
module.exports = {
square,
random
};
// --- app.js ---
// Step D: Import
const math = require('./mathUtils');
// Step E: Use functions
const randNum = math.random(1, 10);
const squared = math.square(randNum);
console.log(`Random Number: ${randNum}`);
console.log(`Squared: ${squared}`);
(Unit III - Socket Services in Node.js):
Create a Socket.IO server that handles user typing indicators.
A. Server: Listen for a 'typing' event from a client.
B. Server: Broadcast a 'userTyping' event to all other connected clients (excluding the sender).
C. Server: Listen for 'stopTyping' and broadcast 'userStoppedTyping'.
D. Use socket.broadcast.emit specifically for this task.
E. Log to the server console whenever a typing event starts or stops.
const { Server } = require('socket.io');
const io = new Server(3000);
io.on('connection', (socket) => {
console.log(`User connected: ${socket.id}`);
// Step A: Listen for typing
socket.on('typing', () => {
console.log(`${socket.id} is typing...`);
// Step B & D: Broadcast to others only
socket.broadcast.emit('userTyping', { userId: socket.id });
});
// Step C: Listen for stopTyping
socket.on('stopTyping', () => {
console.log(`${socket.id} stopped typing.`);
// Broadcast stop
socket.broadcast.emit('userStoppedTyping', { userId: socket.id });
});
});
console.log('Socket server running on 3000');
(Unit IV - Introduction to Mongoose):
Demonstrate how to query specific fields (Projection) and sort results in Mongoose.
A. Assume a Product model exists with fields: name, price, category, stock.
B. Create an async function getCheapProducts().
C. Find all products where price is less than 50.
D. Select ONLY the name and price fields (exclude _id if possible, or just include specific fields).
E. Sort the results by price in ascending order and log them.
const mongoose = require('mongoose');
const Product = require('./models/Product'); // Assumed path
async function getCheapProducts() {
try {
await mongoose.connect('mongodb://localhost:27017/shop');
// Step C: Find filter
const products = await Product.find({ price: { $lt: 50 } })
// Step D: Projection (select name and price, exclude _id)
.select('name price -_id')
// Step E: Sort (ascending price)
.sort({ price: 1 });
console.log('Cheap Products:', products);
} catch (err) {
console.error(err);
} finally {
mongoose.connection.close();
}
}
getCheapProducts();
(Unit V - Introduction to PostgreSQL):
Write a script to handle SQL constraints/errors gracefully.
A. Create a table users with email as a UNIQUE column.
B. Insert a user with email "test@example.com".
C. Immediately try to insert another user with the exact same email.
D. Catch the error.
E. Check if the error code matches the PostgreSQL unique violation code ('23505') and log "Email already exists". For any other error, log "Unknown Error".
const { Client } = require('pg');
const client = new Client({ /* config */ });
async function testConstraint() {
try {
await client.connect();
await client.query('CREATE TABLE IF NOT EXISTS users (email TEXT UNIQUE)');
// Step B: First Insert
await client.query("INSERT INTO users (email) VALUES ('test@example.com')");
console.log('First insert success');
// Step C: Duplicate Insert
await client.query("INSERT INTO users (email) VALUES ('test@example.com')");
} catch (err) {
// Step D & E: Error Handling
if (err.code === '23505') {
console.error('Error: Email already exists (Unique Violation)');
} else {
console.error('Unknown Error:', err);
}
} finally {
// Cleanup for re-running test
await client.query('DROP TABLE users');
await client.end();
}
}
testConstraint();
(Unit VI - Testing and Deployment):
Simulate a third-party API rendering scenario.
A. Create an Express route /weather.
B. Inside the route, simulate a fetch to a third-party Weather API (mock the data: { temp: 25, city: 'London' }).
C. Construct an HTML string (Server Side Rendering) that embeds this data: <h1>Weather in London</h1><p>Temperature: 25C</p>.
D. Send this HTML string as the response.
E. Ensure the content type is set to text/html.
const express = require('express');
const app = express();
// Step A: Route
app.get('/weather', async (req, res) => {
// Step B: Mock Third Party Data
const mockApiCall = () => Promise.resolve({ temp: 25, city: 'London' });
const weatherData = await mockApiCall();
// Step C: Render HTML String
const html = `
<!DOCTYPE html>
<html>
<head><title>Weather Report</title></head>
<body>
<h1>Weather in ${weatherData.city}</h1>
<p>Temperature: ${weatherData.temp}°C</p>
</body>
</html>
`;
// Step D & E: Send Response
res.setHeader('Content-Type', 'text/html');
res.send(html);
});
app.listen(3000);
(Unit I - Handling Data I/O in Node.js):
Work with Node.js Buffer class.
A. Create a Buffer from the string "Hello World".
B. Convert the buffer to Base64 format and log it.
C. Modify the first byte of the original buffer to match the ASCII code for 'Y'.
D. Convert the modified buffer back to a string.
E. Log the final string (Should read "Yello World").
// Step A: Create Buffer
const buf = Buffer.from('Hello World', 'utf8');
// Step B: Base64
console.log('Base64:', buf.toString('base64'));
// Step C: Modify first byte (ASCII for 'Y' is 89)
buf[0] = 89;
// Step D: Convert to String
const modifiedStr = buf.toString('utf8');
// Step E: Log result
console.log('Modified String:', modifiedStr);
(Unit III - Creating Middlewares):
Implement a middleware that validates a custom API Key header.
A. Create a middleware validateApiKey.
B. Retrieve the value of the header x-api-key.
C. Compare it against a hardcoded secret "12345".
D. If it matches, call next().
E. If it fails, return status 401 and JSON { error: "Unauthorized" }.
F. Mount this middleware only on routes starting with /api.
const express = require('express');
const app = express();
// Step A: Middleware
const validateApiKey = (req, res, next) => {
// Step B: Get Header
const apiKey = req.headers['x-api-key'];
// Step C: Validate
if (apiKey === '12345') {
// Step D: Success
next();
} else {
// Step E: Fail
res.status(401).json({ error: 'Unauthorized' });
}
};
// Step F: Mount on /api
app.use('/api', validateApiKey);
// Test routes
app.get('/api/data', (req, res) => res.send('Secure Data'));
app.get('/public', (req, res) => res.send('Public Data'));
app.listen(3000);
(Unit VI - Testing and Deployment):
Simulate a GitHub Actions workflow YAML generation (Concept Check).
A. You are required to deploy a Node.js app using GitHub.
B. Write a Javascript string that contains the content of a basic .github/workflows/node.js.yml file.
C. The YAML content must include: Trigger on push to 'main', runs-on 'ubuntu-latest', steps to checkout code, setup Node.js version 16, install dependencies (npm ci), and run tests (npm test).
D. Log this string to the console.
// Step B: Define YAML content
const workflowYaml = `
name: Node.js CI
on:
push:
branches: [ "main" ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [16.x]
steps:
- uses: actions/checkout@v3
- name: Use Node.js
uses: actions/setup-node@v3
with:
node-version: \${{ matrix.node-version }}
- name: Install Dependencies
run: npm ci
- name: Run Tests
run: npm test
`;
// Step D: Log it
console.log(workflowYaml);
(Unit II - Implementing HTTP Services):
Manually parse a JSON Request Body in a native HTTP server (No Express).
A. Create a server.
B. Listen for POST requests.
C. Collect data chunks from req into an array.
D. On req 'end', concatenate chunks and convert to string.
E. Parse the string as JSON. If successful, respond with the parsed object's message property in uppercase. If parsing fails, return 400.
const http = require('http');
const server = http.createServer((req, res) => {
if (req.method === 'POST') {
let body = [];
// Step C: Collect chunks
req.on('data', (chunk) => {
body.push(chunk);
});
req.on('end', () => {
// Step D: Concat and Stringify
const parsedBody = Buffer.concat(body).toString();
try {
// Step E: Parse JSON
const json = JSON.parse(parsedBody);
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ response: json.message.toUpperCase() }));
} catch (e) {
// Handle Parse Error
res.writeHead(400);
res.end('Invalid JSON');
}
});
}
});
server.listen(3000);