summaryrefslogtreecommitdiff
path: root/backend/src/controllers
diff options
context:
space:
mode:
Diffstat (limited to 'backend/src/controllers')
-rw-r--r--backend/src/controllers/getRobots.ts56
1 files changed, 56 insertions, 0 deletions
diff --git a/backend/src/controllers/getRobots.ts b/backend/src/controllers/getRobots.ts
new file mode 100644
index 0000000..b634306
--- /dev/null
+++ b/backend/src/controllers/getRobots.ts
@@ -0,0 +1,56 @@
+import { Request, Response } from "express";
+import { QueryResult } from "pg";
+import db from "../database/postgres.js";
+import redisClient from "../database/redis.js";
+import { ErrorResponse } from "../types/error.js";
+import { Robot, RobotsResponse } from "../types/robot.js";
+
+const CACHE_TTL = 10;
+const ROBOTS_CACHE_KEY = "allMyRobots";
+
+async function getRobots(_req: Request, res: Response) {
+ try {
+ // Check if Redis has cached data to send to frontend
+ const cachedData = await redisClient.get(ROBOTS_CACHE_KEY);
+
+ if (cachedData) {
+ console.log("Data served from Redis cache.");
+ const robots: Robot[] = JSON.parse(cachedData);
+
+ const response: RobotsResponse = {
+ source: "cache",
+ data: robots,
+ };
+
+ return res.status(200).json(response);
+ }
+
+ // Else query database, then save in Redis cache and send to frontend
+ const robotsQuery: QueryResult<Robot> = await db.query(
+ "SELECT * FROM robots ORDER BY id;"
+ );
+ const robots = robotsQuery.rows;
+
+ await redisClient.set(ROBOTS_CACHE_KEY, JSON.stringify(robots), {
+ EX: CACHE_TTL,
+ });
+
+ console.log("Cache miss: data queried from PostgreSQL.");
+ const response: RobotsResponse = {
+ source: "database",
+ data: robots,
+ };
+
+ return res.status(200).json(response);
+ } catch (error) {
+ console.error("Failed to load the robots: ", error);
+ const errorResponse: ErrorResponse = {
+ message: "Internal server error while loading the robots.",
+ error,
+ };
+
+ return res.status(500).json(errorResponse);
+ }
+}
+
+export default getRobots;