Data Scientist with a designer's instinct — I build dashboards, train ML models, design products, and bridge the gap between raw data and human experience.
I don't just analyse data — I find the human story inside it and make it impossible to ignore.
CS Engineering from JNTUK (GPA 3.5) · MS Computer Science from Montclair State University (GPA 3.7). Built self-driving cars, educational analytics platforms, highway detection systems, and full AI products — while developing deep passion for human-centred design.
Currently a Data Scientist at Savi Technologies, New Jersey — teaching students programming and data analysis. Where making complex things simple and beautiful became my calling.
Each project has three tabs: Preview, actual Code with syntax highlighting, and a simulated Output panel. Run buttons open the real platform.
# Autonomous Self-Driving Car — Lane Following # Author: Your Name | Montclair State Robotics import cv2 import numpy as np import time class PIDController: def __init__(self, Kp=0.45, Ki=0.001, Kd=0.25): self.Kp, self.Ki, self.Kd = Kp, Ki, Kd self._integral = self._prev_error = 0.0 self._last_time = time.time() def compute(self, error): dt = max(time.time() - self._last_time, 1e-6) self._integral += error * dt deriv = (error - self._prev_error) / dt out = self.Kp * error + self.Ki * self._integral + self.Kd * deriv self._prev_error = error return float(np.clip(out, -1.0, 1.0)) def detect_lanes(frame): gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) blur = cv2.GaussianBlur(gray, (5,5), 0) edges = cv2.Canny(blur, 50, 150) h, w = frame.shape[:2] mask = np.zeros_like(edges) cv2.fillPoly(mask, np.array([[ (0,h),(0,int(h*.55)),(w,int(h*.55)),(w,h) ]], np.int32), 255) return cv2.bitwise_and(edges, mask) def compute_steering(frame, left, right): w = frame.shape[1] if left and right: mid = (left[2] + right[2]) // 2 elif left: mid = left[2] + 150 elif right: mid = right[2] - 150 else: return 0.0 return float(np.clip((mid - w//2) / (w/2), -1.0, 1.0)) def run(source=0): cap = cv2.VideoCapture(source) pid = PIDController(Kp=0.45, Ki=0.001, Kd=0.25) while True: ret, frame = cap.read() if not ret: break roi = detect_lanes(frame) lines = cv2.HoughLinesP(roi,1,np.pi/180,30,40,120) dev = compute_steering(frame, left_line, right_line) steer = pid.compute(dev) # motor.set_steering(steer) ← hardware call cap.release()
# Object Detection — YOLOv3 + OpenCV # Author: Your Name | JNTUK Bachelor's Project import cv2, numpy as np CONF = 0.50 # confidence threshold NMS = 0.40 # non-max suppression SIZE = 416 # YOLO input size CLASSES = ["person","bicycle","car","motorbike", "cat","dog","chair","book","laptop" ... 71 more COCO classes] def detect(image_path): net = cv2.dnn.readNetFromDarknet( "yolov3.cfg", "yolov3.weights") img = cv2.imread(image_path) h, w = img.shape[:2] blob = cv2.dnn.blobFromImage( img, 1/255.0, (SIZE,SIZE), swapRB=True) net.setInput(blob) outputs = net.forward(get_output_layers(net)) boxes, confs, ids = [], [], [] for out in outputs: for det in out: scores = det[5:] cid = int(np.argmax(scores)) conf = float(scores[cid]) if conf > CONF: cx, cy = int(det[0]*w), int(det[1]*h) bw, bh = int(det[2]*w), int(det[3]*h) boxes.append([cx-bw//2,cy-bh//2,bw,bh]) confs.append(conf) ids.append(cid) nms = cv2.dnn.NMSBoxes(boxes,confs,CONF,NMS) for i in nms.flatten(): x,y,bw,bh = boxes[i] label = f"{CLASSES[ids[i]]} {confs[i]:.0%}" cv2.rectangle(img,(x,y),(x+bw,y+bh),color,2) cv2.putText(img,label,(x,y-5),...) cv2.imwrite("result.jpg", img)
# Highway Vehicle Detection & Speed Estimation # Author: Your Name | NJIT Image Processing Course import cv2, numpy as np from collections import defaultdict MIN_AREA = 1200 # minimum contour area (px²) SL1_Y = 0.45 # speed line 1 position SL2_Y = 0.65 # speed line 2 position DIST_M = 5.0 # real-world distance between lines def classify_vehicle(w, h): if w*h > 25000 and w/h > 1.8: return "TRUCK", (0,180,255) elif w*h > 8000: return "CAR", (0,230,80) else: return "MOTO", (255,140,0) def process_video(path): cap = cv2.VideoCapture(path) bg = cv2.createBackgroundSubtractorMOG2( history=200, varThreshold=50) count = 0 speeds, cross_t = {}, {} while True: ret, frame = cap.read() if not ret: break H,W = frame.shape[:2] # Background subtraction → contours mask = bg.apply(frame) mask = cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel) cnts,_ = cv2.findContours( mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) for c in cnts: if cv2.contourArea(c) < MIN_AREA: continue x,y,w,h = cv2.boundingRect(c) vtype,col = classify_vehicle(w,h) cv2.rectangle(frame,(x,y),(x+w,y+h),col,2) return count
-- Educational Analytics Platform Schema -- Author: Your Name | HCI Course — Montclair CREATE TABLE students ( student_id SERIAL PRIMARY KEY, name VARCHAR(120) NOT NULL, email VARCHAR(200) UNIQUE, grade_level SMALLINT ); CREATE TABLE submissions ( submission_id SERIAL PRIMARY KEY, student_id INT REFERENCES students(student_id), quiz_id INT REFERENCES quizzes(quiz_id), total_score NUMERIC(5,2), submitted_at TIMESTAMPTZ DEFAULT NOW() ); -- Average score per student SELECT s.name, ROUND(AVG(sub.total_score),1) AS avg_score, MIN(sub.total_score) AS lowest, MAX(sub.total_score) AS highest FROM students s JOIN submissions sub ON s.student_id = sub.student_id GROUP BY s.name ORDER BY avg_score DESC; -- Students needing support (avg < 70) SELECT s.name, ROUND(AVG(sub.total_score),1) AS avg_score, 'Needs Support' AS status FROM students s JOIN submissions sub ON s.student_id = sub.student_id GROUP BY s.student_id, s.name HAVING AVG(sub.total_score) < 70 ORDER BY avg_score;
-- Cloud Connector — Multi-Tenant Schema -- Author: Your Name | Arete IT Services -- Impact: +35% sales after deployment CREATE TABLE tenants ( tenant_id INT IDENTITY(1,1) PRIMARY KEY, tenant_code VARCHAR(50) UNIQUE NOT NULL, plan_tier VARCHAR(20) DEFAULT 'starter', is_active BIT DEFAULT 1 ); CREATE TABLE cloud_resources ( resource_id INT IDENTITY(1,1) PRIMARY KEY, tenant_id INT NOT NULL REFERENCES tenants(tenant_id), resource_type VARCHAR(50), status VARCHAR(20) DEFAULT 'running', cost_per_hour DECIMAL(10,6) ); -- Row-Level Security: tenants only see their rows CREATE FUNCTION dbo.fn_TenantPredicate (@tenant_id INT) RETURNS TABLE WITH SCHEMABINDING AS RETURN SELECT 1 AS result WHERE @tenant_id = CAST( SESSION_CONTEXT(N'tenant_id') AS INT); CREATE SECURITY POLICY CloudResourcesPolicy ADD FILTER PREDICATE dbo.fn_TenantPredicate(tenant_id) ON dbo.cloud_resources; -- Stored proc: set tenant context on login CREATE PROCEDURE dbo.sp_TenantLogin @tenant_code VARCHAR(50) AS BEGIN DECLARE @tid INT SELECT @tid = tenant_id FROM tenants WHERE tenant_code = @tenant_code EXEC sp_set_session_context N'tenant_id', @tid END
// YuktiAI — Agent Configuration // 6 AI agents running the entire business const agents = [ { id: "vikram-bda", name: "Vikram — Business Development", model: "deepseek/deepseek-r1:free", tools: ["apollo_scrape", "lead_score", "supabase_insert"], schedule: "0 9 * * 1-5" // 9am weekdays }, { id: "arya-soa", name: "Arya — Sales Outreach", model: "google/gemini-flash-1.5:free", tools: ["gmail_send", "calendly_book", "ab_test"], sequence: [0, 3, 7, 10, 14] // email days }, { id: "priya-rtsa", name: "Priya — Support Agent", model: "anthropic/claude-haiku-4-5", tools: ["crisp_chat", "vector_search"], sla_hours: 2 } ]; async function runAgent(agentId, task) { const agent = agents.find(a => a.id === agentId); const res = await fetch("https://openrouter.ai/api/v1/chat/completions", { method: "POST", body: JSON.stringify({ model: agent.model, messages: [{ role: "user", content: task }] }) }); return (await res.json()).choices[0].message.content; }
💡 Replace all GitHub/Colab/Netlify URLs above with your real links after uploading your code files
Whether it's a job opportunity, a collaboration, or a conversation about data and design — I'd love to hear from you.
I reply within 24 hours