Compare commits

..

66 commits

Author SHA1 Message Date
smfahim25
7103e802b0 update 2025-03-20 15:59:17 +06:00
smfahim25
27339ba1bd update 2025-03-20 14:33:41 +06:00
smfahim25
f3416f86f6 added schema 2025-03-20 12:12:06 +06:00
smfahim25
38b514f3a9 update cors 2025-03-19 14:58:24 +06:00
smfahim25
657a8341c6 try to 2025-03-19 14:48:48 +06:00
smfahim25
a525e9388d try to connect minio 2025-03-19 14:19:00 +06:00
smfahim25
e75ec62fa9 h 2025-03-19 14:12:17 +06:00
smfahim25
1e8ce2bb8a f 2025-03-19 14:10:23 +06:00
smfahim25
048f9bc9a7 try to 2025-03-19 14:09:42 +06:00
smfahim25
f3aa472e7e update 2025-03-19 14:07:46 +06:00
smfahim25
bfa9fee43f minio setup 2025-03-19 14:04:54 +06:00
smfahim25
c1bd49a018 try to solve swagger issue 2025-03-19 14:00:02 +06:00
smfahim25
d68db66cc4 try to solve swagger issue 2025-03-19 13:02:19 +06:00
smfahim25
1f57c95a4f update 2025-03-19 12:56:43 +06:00
smfahim25
31e9508dc2 try to solve swagger issue 2025-03-19 12:55:04 +06:00
smfahim25
38fe798582 try to solve swagger issue 2025-03-19 12:54:37 +06:00
smfahim25
75ad9eeceb Merge branch 'canvas_updated_be' 2025-03-19 12:51:45 +06:00
smfahim25
0ec6755c97 update 2025-03-19 12:34:43 +06:00
smfahim25
81332dff5f update 2025-03-19 12:29:40 +06:00
smfahim25
25fa736170 update 2025-03-19 12:23:58 +06:00
smfahim25
420d4cb5f8 swagger issue 2025-03-19 12:22:13 +06:00
smfahim25
f4c1c9fcb0 try to solve swagger issue 2025-03-19 12:18:57 +06:00
smfahim25
994c9fcb02 try to solve swagger issue 2025-03-19 12:17:22 +06:00
smfahim25
eab5cf7e05 remove prefix 2025-03-19 12:14:55 +06:00
smfahim25
9550fc151b remove prefix 2025-03-19 11:59:06 +06:00
smfahim25
d4a1cd4369 remove prefix 2025-03-19 11:53:31 +06:00
smfahim25
dee2bf2fc0 remove prefix 2025-03-19 11:48:32 +06:00
smfahim25
be29aed2a2 remove prefix 2025-03-19 11:45:27 +06:00
smfahim25
e5132b2270 remove prefix 2025-03-19 11:43:12 +06:00
smfahim25
6f6a9562ee remove prefix 2025-03-19 11:38:42 +06:00
smfahim25
dac2478198 remove prefix 2025-03-19 11:30:52 +06:00
smfahim25
c20bf7a178 remove prefix 2025-03-19 11:23:47 +06:00
smfahim25
d58e78bb0e remove prefix 2025-03-19 11:19:36 +06:00
smfahim25
9ed91b4a06 update 00000 2025-03-19 10:54:15 +06:00
smfahim25
6bbf2137b5 update 00000 2025-03-19 10:50:33 +06:00
smfahim25
40d600ab5b update 00000 2025-03-19 10:45:42 +06:00
smfahim25
f9da806ec9 update 00000 2025-03-19 10:39:39 +06:00
smfahim25
2f80b6dc56 update 00000 2025-03-19 10:37:06 +06:00
smfahim25
e13b787836 update 00000 2025-03-19 10:29:15 +06:00
smfahim25
a7f383a0c8 update 00000 2025-03-19 10:26:34 +06:00
smfahim25
17b33bc81a update 00000 2025-03-19 10:23:59 +06:00
smfahim25
48f8013dfe update 00000 2025-03-19 10:17:54 +06:00
smfahim25
af0d91ed94 update 00000 2025-03-19 10:12:05 +06:00
smfahim25
c562a82d70 update 00000 2025-03-19 10:06:43 +06:00
smfahim25
c194ca1495 update 00000 2025-03-19 09:50:44 +06:00
smfahim25
e1be753ce2 update 00000 2025-03-19 09:45:05 +06:00
smfahim25
b9f369353d update 2025-03-19 09:40:39 +06:00
7954b53c00 cors remove 2025-02-22 08:32:32 +00:00
55548f7388 no expose for coolify again 2025-02-22 08:31:40 +00:00
d621437a4d revert ac5e68536f
revert Merge branch 'canvas_be_v1'
2025-02-22 08:26:26 +00:00
5373ad9dba port exposed 2025-02-22 08:11:42 +00:00
94830af20d cors added for dev dashboard 2025-02-22 07:12:46 +00:00
Sanjib Kumar Sen
ac5e68536f Merge branch 'canvas_be_v1' 2025-02-20 15:31:26 +06:00
Sanjib Kumar Sen
3f979870d6 Merge branch 'main' into canvas_be_v1 2025-02-20 15:29:20 +06:00
9ebea74b45 Merge pull request 'dev' (#3) from dev into main
Reviewed-on: #3
2025-02-19 06:31:24 +00:00
Sanjib Kumar Sen
411e48f10c api root 2025-02-19 12:29:38 +06:00
Sanjib Kumar Sen
f91ab7749a no expose for coolify 2025-02-19 12:23:19 +06:00
e11a051730 Merge pull request 'dev' (#2) from dev into main
Reviewed-on: #2
2025-02-19 05:36:36 +00:00
Sanjib Kumar Sen
712c4ca029 db port changed 2025-02-19 11:35:37 +06:00
Sanjib Kumar Sen
74f00d4ba5 minio endpoint 2025-02-19 11:35:11 +06:00
Sanjib Kumar Sen
341234dfa2 port changed 2025-02-19 11:34:18 +06:00
b1e8b7a664 Merge pull request 'added nginx' (#1) from dev into main
Reviewed-on: #1
2025-02-19 05:27:46 +00:00
Sanjib Kumar Sen
53d2c7b46a coolify needs no port 2025-02-19 11:26:00 +06:00
Sanjib Kumar Sen
b247985a93 done 2025-02-19 10:57:22 +06:00
Sanjib Kumar Sen
0a2494cd7e added prod 2025-02-16 12:27:04 +06:00
247e520900 Delete.env 2025-02-13 05:16:40 +00:00
16 changed files with 743 additions and 462 deletions

37
Dockerfile Normal file
View file

@ -0,0 +1,37 @@
# Build stage
FROM oven/bun:1 AS builder
# Copy package files
COPY package.json .
COPY bun.lockb .
# Install dependencies
RUN bun install --frozen-lockfile
# Copy source code
COPY . .
# Build the application
RUN bun run build
EXPOSE 5005
# Production stage
# FROM debian:bookworm-slim
# WORKDIR /app
# # Copy only the compiled binary from builder
# COPY --from=builder /app/server .
# # Expose the port your app runs on
# EXPOSE 3000
# # Copy the entrypoint script
# COPY entrypoint.sh .
# Make the entrypoint script executable
RUN chmod +x ./entrypoint.sh
# Set the entrypoint
ENTRYPOINT ["./entrypoint.sh"]

53
docker-compose.yml Normal file
View file

@ -0,0 +1,53 @@
services:
api:
build:
context: .
dockerfile: Dockerfile
ports:
- "5005:5005"
depends_on:
db:
condition: service_healthy
minio:
condition: service_healthy
environment:
NODE_ENV: production
DATABASE_URL: ${DATABASE_URL}
db:
image: postgres:latest
environment:
POSTGRES_USER: ${DB_USER}
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_DB: ${DB_NAME}
# ports:
# - "${DB_PORT}:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${DB_USER} -d ${DB_NAME}"]
interval: 10s
timeout: 5s
retries: 5
start_period: 10s
minio:
image: minio/minio:latest
# ports:
# - "9000:9000"
# - "9001:9001"
environment:
MINIO_ROOT_USER: ${MINIO_ROOT_USER}
MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD}
command: server /data --console-address ":9001"
volumes:
- minio_data:/data
healthcheck:
test: ["CMD", "mc", "ready", "local"]
interval: 30s
timeout: 20s
retries: 3
volumes:
postgres_data:
minio_data:

View file

@ -1,10 +1,10 @@
import { defineConfig } from 'drizzle-kit';
import { ENV } from './src/config/env';
import { defineConfig } from "drizzle-kit";
import { ENV } from "./src/config/env";
export default defineConfig({
out: './drizzle',
schema: './src/db/schema.ts',
dialect: 'postgresql',
out: "./drizzle",
schema: "./src/db/schema.ts",
dialect: "postgresql",
dbCredentials: {
url: ENV.DATABASE_URL!,
},

9
entrypoint.sh Normal file
View file

@ -0,0 +1,9 @@
#!/bin/sh
set -e
# Run migrations
bun run db:migrate
# Start the application
echo "Starting the application..."
./server

View file

@ -3,10 +3,11 @@
"version": "1.0.50",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"db:studio": "drizzle-kit studio --port=3000",
"db:studio": "drizzle-kit studio",
"db:generate": "drizzle-kit generate",
"db:migrate": "drizzle-kit migrate",
"db:push": "drizzle-kit push:pg",
"build": "bun build --compile --minify-whitespace --minify-syntax --target bun --outfile server ./src/app.ts",
"dev": "bun run --watch src/app.ts"
},
"dependencies": {
@ -14,6 +15,7 @@
"@elysiajs/cookie": "^0.8.0",
"@elysiajs/cors": "^1.2.0",
"@elysiajs/swagger": "^1.2.0",
"drizzle-kit": "^0.30.2",
"dotenv": "^16.4.7",
"drizzle-orm": "^0.38.4",
"elysia": "latest",
@ -26,7 +28,6 @@
"devDependencies": {
"@types/pg": "^8.11.10",
"bun-types": "latest",
"drizzle-kit": "^0.30.2",
"tsx": "^4.19.2"
},
"module": "src/app.js"

View file

@ -1,5 +1,5 @@
import { createClerkClient } from "@clerk/backend";
import { ENV } from "../../config/env"
import { ENV } from "../../config/env";
import { users } from "../../db/schema";
import { db } from "../../db";
import { eq } from "drizzle-orm";
@ -7,7 +7,11 @@ import { eq } from "drizzle-orm";
// @ts-ignore
import jwt from "jsonwebtoken";
import { checkUserInDB, createUser, storeRefreshToken } from "../../helper/auth/auth.helper";
import {
checkUserInDB,
createUser,
storeRefreshToken,
} from "../../helper/auth/auth.helper";
import { verifyAuth } from "../../middlewares/auth.middlewares";
// Initialize Clerk with your API key
@ -17,11 +21,10 @@ export const getUserData = async (userId: string) => {
try {
const [user, checkInDB] = await Promise.all([
clerk.users.getUser(userId),
checkUserInDB(userId)
checkUserInDB(userId),
]);
if (user && !checkInDB.found) {
// Validate and transform user data
const userDBData = {
id: user.id,
@ -33,10 +36,18 @@ export const getUserData = async (userId: string) => {
const userData = await createUser(userDBData);
return { status: 200, message: "User retrieved successfully", data: userData };
return {
status: 200,
message: "User retrieved successfully",
data: userData,
};
}
if (user && checkInDB.found) {
return { status: 200, message: "User retrieved successfully", data: checkInDB };
return {
status: 200,
message: "User retrieved successfully",
data: checkInDB,
};
}
if (!user) {
return { status: 404, message: "User not found" };
@ -47,20 +58,36 @@ export const getUserData = async (userId: string) => {
}
};
export const updateUser = async (id: string, body: {
paid_status: string,
package_expire_date: string,
}) => {
export const updateUser = async (
id: string,
body: {
paid_status: string;
package_expire_date: string;
}
) => {
try {
const updateUserData = await db.update(users).set({ paid_status: body?.paid_status, expires_in: body?.package_expire_date }).where(eq(users.id, id)).returning({ updatedId: users.id });
return { status: 200, message: "User updated successfully", updateUserData };
const updateUserData = await db
.update(users)
.set({
paid_status: body?.paid_status,
expires_in: body?.package_expire_date,
})
.where(eq(users.id, id))
.returning({ updatedId: users.id });
return {
status: 200,
message: "User updated successfully",
updateUserData,
};
} catch (error: any) {
console.error("Error in updateUser:", error.message || error.toString());
return { status: 500, message: `An error occurred while updating the user` };
}
return {
status: 500,
message: `An error occurred while updating the user`,
};
}
};
export const generateToken = async (context: any) => {
try {
@ -71,16 +98,24 @@ export const generateToken = async (context: any) => {
if (access_cookie !== undefined || refresh_cookie !== undefined) {
const verify = await verifyAuth(context?.cookie);
return verify;
}
else if (access_cookie === undefined && refresh_cookie === undefined && userId !== undefined) {
} else if (
access_cookie === undefined &&
refresh_cookie === undefined &&
userId !== undefined
) {
const user = await checkUserInDB(userId);
if (user?.found === true) {
// generate access token
const accessToken = jwt.sign({ userId }, ENV.JWT_ACCESS_TOKEN_SECRET, { expiresIn: '3h' });
const accessToken = jwt.sign({ userId }, ENV.JWT_ACCESS_TOKEN_SECRET, {
expiresIn: "3h",
});
// generate refresh token
const refreshToken = jwt.sign({ userId }, ENV.JWT_REFRESH_TOKEN_SECRET, { expiresIn: '7d' });
const refreshToken = jwt.sign(
{ userId },
ENV.JWT_REFRESH_TOKEN_SECRET,
{ expiresIn: "7d" }
);
// store refresh token in db
const storeRToken = await storeRefreshToken(userId, refreshToken);
@ -90,7 +125,7 @@ export const generateToken = async (context: any) => {
value: accessToken,
httpOnly: true,
secure: true, // Set to true in production
sameSite: 'none', // Adjust based on your needs
sameSite: "none", // Adjust based on your needs
path: "/",
maxAge: 3 * 60 * 60, // 3 hours in seconds
});
@ -99,27 +134,33 @@ export const generateToken = async (context: any) => {
value: refreshToken,
httpOnly: true,
secure: true, // Set to true in production
sameSite: 'none', // Adjust based on your needs
sameSite: "none", // Adjust based on your needs
path: "/",
maxAge: 7 * 24 * 60 * 60, // 7 days in seconds
});
return { status: 201, message: "Token generated successfully", token: accessToken, user: user.user };
return {
status: 201,
message: "Token generated successfully",
token: accessToken,
user: user.user,
};
}
return { status: 500, message: "An error occurred while storing the refresh token" };
}
else {
return {
status: 500,
message: "An error occurred while storing the refresh token",
};
} else {
return { status: 404, message: "User not found" };
}
}
else {
} else {
return { status: 404, message: "Unauthorized!!!" };
}
} catch (error: any) {
console.error("Error in generateToken:", error.message || error.toString());
return { status: 500, message: `An error occurred while generating the token` };
return {
status: 500,
message: `An error occurred while generating the token`,
};
}
}
};

View file

@ -1,47 +1,80 @@
import Elysia, { t } from "elysia";
import { Elysia, t } from "elysia";
import { generateToken, getUserData, updateUser } from "./auth.controller";
import { verifyAuth } from "../../middlewares/auth.middlewares";
export const authRoute = new Elysia({
prefix: "/auth",
tags: ["Auth"],
export const authRoute = new Elysia({ prefix: "/auth" });
authRoute.get(
"/user/:userId",
async ({ params: { userId } }) => await getUserData(userId),
{
detail: {
description: "Routes for managing users",
tags: ["Auth"],
summary: "Get user data",
},
params: t.Object({
userId: t.String(),
}),
}
})
);
authRoute.get("/user/:userId", async ({ params: { userId } }) => await getUserData(userId), {
authRoute.post(
"/user/update/:userId",
async ({ params: { userId }, body }) => await updateUser(userId, body),
{
detail: {
tags: ["Auth"],
summary: "Update user",
},
params: t.Object({
userId: t.String()
})
});
authRoute.post("/user/update/:userId", async ({ params: { userId }, body }) => await updateUser(userId, body), {
params: t.Object({
userId: t.String()
userId: t.String(),
}),
body: t.Object({
paid_status: t.String(),
package_expire_date: t.String(),
})
});
}),
}
);
authRoute.get("/generate-token/:userId", async (context) => await generateToken(context));
authRoute.get(
"/generate-token/:userId",
async (context) => await generateToken(context),
{
detail: {
tags: ["Auth"],
summary: "Generate token",
},
params: t.Object({
userId: t.String(),
}),
}
);
authRoute.get("/user/me", async ({ cookie }) => {
authRoute.get(
"/user/me",
async ({ cookie }) => {
const authData = await verifyAuth(cookie);
if (authData.status !== 200) {
return authData;
}
else {
const userId: string | any = authData.userId;
} else {
const userId = authData.userId;
const response = await getUserData(userId);
if (response?.status === 200) {
return { ...response.data, token: authData.token, status: 200, message: "User data fetched successfully" };
}
else {
return {
...response.data,
token: authData.token,
status: 200,
message: "User data fetched successfully",
};
} else {
return response;
}
}
})
},
{
detail: {
tags: ["Auth"],
summary: "Get current user",
},
}
);

View file

@ -7,12 +7,13 @@ export const getAllDesign = async (token: string) => {
headers: {
Authorization: `Bearer ${token}`,
"Content-Type": "application/json",
}
},
});
const data = await response.json();
console.log(response);
return data;
} catch (error: any) {
console.log(error);
return { status: 500, message: error.message, token };
}
}
};

View file

@ -1,4 +1,4 @@
import Elysia from "elysia";
import { Elysia } from "elysia";
import { projectRoutes } from "./project/project.route";
import { uploadRoutes } from "./upload/upload.route";
import { authRoute } from "./auth/auth.route";
@ -6,17 +6,22 @@ import { downloadRoute } from "./downloadCount/download.count.route";
import { photoLibraryRoutes } from "./photoLibrary/photo.library.route";
import { designRoute } from "./design/design.route";
export const api = new Elysia({
prefix: "/api",
});
api.get("/", () => {
export const api = new Elysia({ prefix: "" })
.get("/", () => {
console.log("Root endpoint accessed");
return "Hello from PlanPostAI Canvas API";
})
.use(authRoute)
.use(projectRoutes)
.use(uploadRoutes)
.use(downloadRoute)
.use(photoLibraryRoutes)
.use(designRoute)
.onError(({ code, error, set }) => {
console.error(`API Error: ${code}`, error);
if (code === "NOT_FOUND") {
set.status = 404;
return "API Endpoint Not Found";
}
return "API Error Occurred";
});
api.use(authRoute);
api.use(projectRoutes);
api.use(uploadRoutes);
api.use(downloadRoute);
api.use(photoLibraryRoutes);
api.use(designRoute);

View file

@ -8,18 +8,26 @@ import { removeBucket } from "../../helper/upload/removeBucket";
export const getAllProjects = async (userId: string, token: string) => {
try {
// Fetch all projects for the given user
const allProjects = await db.select({
const allProjects = await db
.select({
id: projects.id,
name: projects.name,
description: projects.description,
preview_url: projects.preview_url,
object: projects.object,
}).from(projects).where(eq(projects.userId, userId));
})
.from(projects)
.where(eq(projects.userId, userId));
// Identify projects where 'object' is empty or 'object.objects' is empty
const projectsToDelete = allProjects.filter(proj =>
(proj.object && typeof proj.object === "object" && Object.keys(proj.object).length === 0) ||
(proj.object?.objects && Array.isArray(proj.object.objects) && proj.object.objects.length === 0)
const projectsToDelete = allProjects.filter(
(proj) =>
(proj.object &&
typeof proj.object === "object" &&
Object.keys(proj.object).length === 0) ||
(proj.object?.objects &&
Array.isArray(proj.object.objects) &&
proj.object.objects.length === 0)
);
// Delete projects with empty 'object' or empty 'object.objects'
@ -37,10 +45,15 @@ export const getAllProjects = async (userId: string, token: string) => {
);
// Get remaining projects
const remainingProjects = allProjects.filter(proj =>
const remainingProjects = allProjects.filter(
(proj) =>
!(
(proj.object && typeof proj.object === "object" && Object.keys(proj.object).length === 0) ||
(proj.object?.objects && Array.isArray(proj.object.objects) && proj.object.objects.length === 0)
(proj.object &&
typeof proj.object === "object" &&
Object.keys(proj.object).length === 0) ||
(proj.object?.objects &&
Array.isArray(proj.object.objects) &&
proj.object.objects.length === 0)
)
);
@ -48,29 +61,51 @@ export const getAllProjects = async (userId: string, token: string) => {
return { status: 404, message: "No projects found", token };
}
return { status: 200, message: "Projects fetched successfully", data: remainingProjects, token };
return {
status: 200,
message: "Projects fetched successfully",
data: remainingProjects,
token,
};
} catch (error: any) {
console.log(error.message);
return { status: 500, message: "An error occurred while fetching projects", token };
return {
status: 500,
message: "An error occurred while fetching projects",
token,
};
}
};
export const getEachProjects = async (id: string, token: string) => {
try {
const project = await db.select({
const project = await db
.select({
id: projects.id,
name: projects.name,
description: projects.description,
preview_url: projects.preview_url,
object: projects.object,
}).from(projects).where(eq(projects.id, id)).limit(1);
})
.from(projects)
.where(eq(projects.id, id))
.limit(1);
if (project.length === 0) {
return { status: 404, message: "Project not found", token };
}
return { status: 200, message: "Project fetched successfully", data: project[0], token };
return {
status: 200,
message: "Project fetched successfully",
data: project[0],
token,
};
} catch (error: any) {
console.log(error.message);
return { status: 500, message: "An error occurred while fetching projects", token };
return {
status: 500,
message: "An error occurred while fetching projects",
token,
};
}
};
@ -78,18 +113,34 @@ export const createProject = async (userId: string, token: string) => {
try {
const { id } = await createEmptyProject(userId);
const bucket = await createBucket(id);
return { status: 200, message: "New project created successfully", data: { id, bucketName: bucket }, token };
return {
status: 200,
message: "New project created successfully",
data: { id, bucketName: bucket },
token,
};
} catch (error: any) {
console.log(error.message);
return { status: 500, message: "An error occurred while creating projects", token }
return {
status: 500,
message: "An error occurred while creating projects",
token,
};
}
};
export const updateProject = async (id: string, body: any, token: string, user_id: string) => {
export const updateProject = async (
id: string,
body: any,
token: string,
user_id: string
) => {
try {
// 1. Validate if project exists
const existingProject = await db.select().from(projects).where(eq(projects.id, id));
const existingProject = await db
.select()
.from(projects)
.where(eq(projects.id, id));
if (existingProject.length === 0) {
return { status: 404, message: "Project not found", token };
}
@ -97,27 +148,40 @@ export const updateProject = async (id: string, body: any, token: string, user_i
const { object, name, description, preview_url } = body;
// The preview_url will come from client-side as well, where before updating the project a project capture will be taken and uploaded to the bucket. than the url will be sent to the server.And rest of them are normal process
const updatedProject = await db.update(projects).set({
const updatedProject = await db
.update(projects)
.set({
object,
name,
description,
preview_url,
userId: user_id,
}).where(eq(projects.id, id)).returning({
})
.where(eq(projects.id, id))
.returning({
id: projects.id,
object: projects.object,
name: projects.name,
description: projects.description,
preview_url: projects.preview_url
preview_url: projects.preview_url,
});
if (updatedProject.length === 0) {
return { status: 500, message: "Failed to update the project", token };
}
return { status: 200, message: "Project updated successfully", data: updatedProject[0], token };
return {
status: 200,
message: "Project updated successfully",
data: updatedProject[0],
token,
};
} catch (error: any) {
console.log("Error updating project:", error.message || error.toString());
return { status: 500, message: "An error occurred while updating the project", token };
return {
status: 500,
message: "An error occurred while updating the project",
token,
};
}
};
@ -146,16 +210,21 @@ export const deleteProject = async (id: string, token: string) => {
return {
status: bucketDeletionResult.status,
message: `Error deleting bucket: ${bucketDeletionResult.message}`,
token
token,
};
}
return { status: 200, message: "Project and associated bucket deleted successfully", token };
return {
status: 200,
message: "Project and associated bucket deleted successfully",
token,
};
}
} catch (error: any) {
console.log("Error in deleteProject:", error.message || error.toString());
return { status: 500, message: "An error occurred while deleting the project", token };
return {
status: 500,
message: "An error occurred while deleting the project",
token,
};
}
};

View file

@ -1,5 +1,11 @@
import { Elysia, t } from "elysia";
import { createProject, deleteProject, getAllProjects, getEachProjects, updateProject } from "./project.controller";
import {
createProject,
deleteProject,
getAllProjects,
getEachProjects,
updateProject,
} from "./project.controller";
import { verifyAuth } from "../../middlewares/auth.middlewares";
export const projectRoutes = new Elysia({
@ -7,29 +13,31 @@ export const projectRoutes = new Elysia({
tags: ["Projects"],
detail: {
description: "Routes for managing projects",
}
},
}).derive(async ({ cookie }) => {
const authData = await verifyAuth(cookie);
return { authData }; // Inject into context
});
projectRoutes.get("/each/:project_id", async ({ params: { project_id }, authData }) => {
if (authData.status !== 200)
return authData;
projectRoutes.get(
"/each/:project_id",
async ({ params: { project_id }, authData }) => {
if (authData.status !== 200) return authData;
else {
const token = authData.token;
const response = await getEachProjects(project_id, token);
return response;
}
}, {
},
{
params: t.Object({
project_id: t.String()
})
});
project_id: t.String(),
}),
}
);
projectRoutes.get("/", async ({ authData }: any) => {
if (authData.status !== 200)
return authData;
if (authData.status !== 200) return authData;
else {
const userId = authData.userId;
const token = authData.token;
@ -39,8 +47,7 @@ projectRoutes.get("/", async ({ authData }: any) => {
});
projectRoutes.post("/create", async ({ authData }: any) => {
if (authData.status !== 200)
return authData;
if (authData.status !== 200) return authData;
else {
const userId = authData.userId;
const token = authData.token;
@ -49,39 +56,49 @@ projectRoutes.post("/create", async ({ authData }: any) => {
}
});
projectRoutes.put("/update/:project_id", async ({ body, params: { project_id }, authData }) => {
if (authData.status !== 200)
return authData;
projectRoutes.put(
"/update/:project_id",
async ({ body, params: { project_id }, authData }) => {
if (authData.status !== 200) return authData;
else {
const token = authData.token;
const user_id = authData?.userId;
// sending user_id to the controller to update the project with the user_id, when user tried to design a existing project from the design project panel
const response = await updateProject(project_id, body, token, user_id as string);
const response = await updateProject(
project_id,
body,
token,
user_id as string
);
return response;
}
}, {
},
{
params: t.Object({
project_id: t.String()
project_id: t.String(),
}),
body: t.Object({
object: t.Record(t.String(), t.Any()), // Allows any JSON object
name: t.String(),
description: t.String(),
preview_url: t.String(),
})
});
}),
}
);
projectRoutes.delete("/delete/:project_id", async ({ params: { project_id }, authData }) => {
if (authData.status !== 200)
return authData;
projectRoutes.delete(
"/delete/:project_id",
async ({ params: { project_id }, authData }) => {
if (authData.status !== 200) return authData;
else {
const token = authData.token;
const response = await deleteProject(project_id, token);
return response;
}
}, {
},
{
params: t.Object({
project_id: t.String()
})
});
project_id: t.String(),
}),
}
);

View file

@ -1,60 +1,52 @@
import { Elysia } from "elysia";
import swagger from '@elysiajs/swagger';
import { ENV } from "./config/env";
import { Elysia, t } from "elysia";
import swagger from "@elysiajs/swagger";
import cors from "@elysiajs/cors";
import { ENV } from "./config/env";
import { api } from "./api";
const allowedOrigins = [
const app = new Elysia()
.use(
cors({
origin: [
"http://localhost:5175",
"http://localhost:5174",
"http://localhost:5173",
"https://dashboard.planpostai.com",
"https://dev.dashboard.planpostai.com",
"https://canvas.planpostai.com",
];
const app = new Elysia({
prefix: "",
tags: ["Default"],
})
.use(cors({
origin: allowedOrigins,
"https://canvasdev.planpostai.com",
],
methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS"],
allowedHeaders: ["Content-Type", "Authorization", "X-Requested-With", "Accept", "Origin", "Access-Control-Allow-Origin"],
allowedHeaders: [
"Content-Type",
"Authorization",
"X-Requested-With",
"Accept",
"Origin",
"Access-Control-Allow-Origin",
],
credentials: true,
}))
.use(swagger({
path: "/api/docs",
})
)
.get("/test", () => "Hello World", {})
.use(api)
.use(
swagger({
path: "/swagger",
documentation: {
openapi: "3.1.0",
info: {
title: "Canvas API",
version: "1.0.0",
description: "Canvas API Documentation",
},
tags: [
{
name: "Projects",
description: "All APIs related to Projects",
},
{
name: "Uploads",
description: "All APIs related to Uploads"
}
],
}
}))
.onError(({ code, error }) => {
if (code === 'NOT_FOUND')
return 'Not Found :(';
console.log("hello from app.ts under error");
console.error(error)
})
)
.listen(ENV.SERVER_PORT);
app.routes.forEach((route) => {
console.log(`Route: ${route.method} ${route.path}`);
});
// all routes here
app.use(api);
app.listen(ENV.SERVER_PORT, () => {
console.log(`🦊 Elysia is running at ${ENV.SERVER_URL}:${ENV.SERVER_PORT}`)
})
console.log(`🦊 Elysia is running at ${ENV.SERVER_URL}`);
console.log(`Swagger docs available at ${ENV.SERVER_URL}/swagger`);

View file

@ -1,4 +1,4 @@
import 'dotenv/config'
import "dotenv/config";
export const ENV = {
SERVER_URL: process.env.SERVER_URL,
@ -7,11 +7,11 @@ export const ENV = {
DATABASE_URL: process.env.DATABASE_URL,
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
MINIO_ENDPOINT: process.env.MINIO_ENDPOINT,
MINIO_ENDPOINT: process.env.MINIO_URL,
MINIO_PORT: process.env.MINIO_PORT,
CLERK_SECRET_KEY: process.env.CLERK_SECRET_KEY,
JWT_ACCESS_TOKEN_SECRET: process.env.JWT_ACCESS_TOKEN_SECRET,
JWT_REFRESH_TOKEN_SECRET: process.env.JWT_REFRESH_TOKEN_SECRET,
PEXELS_URL: process.env.PEXELS_URL,
PEXELS_ACCESS_KEY: process.env.PEXELS_ACCESS_KEY,
}
};

View file

@ -2,9 +2,8 @@ import { Client } from "minio";
import { ENV } from "../config/env";
export const minioClient = new Client({
endPoint: ENV.MINIO_ENDPOINT!,
port: ENV.MINIO_PORT,
useSSL: false,
endPoint: ENV.MINIO_ENDPOINT!.replace("http://", "").replace("https://", ""),
useSSL: ENV.MINIO_ENDPOINT!.startsWith("https"),
accessKey: ENV.MINIO_ACCESS_KEY,
secretKey: ENV.MINIO_SECRET_KEY,
})
});

View file

@ -1,4 +1,13 @@
import { boolean, integer, json, pgTable, text, timestamp, uuid, jsonb } from "drizzle-orm/pg-core";
import {
boolean,
integer,
json,
pgTable,
text,
timestamp,
uuid,
jsonb,
} from "drizzle-orm/pg-core";
export const users = pgTable("users", {
id: text("user_id").primaryKey().notNull(),
@ -33,3 +42,18 @@ export const uploads = pgTable("uploads", {
created_at: timestamp("created_at").defaultNow(),
updated_at: timestamp("updated_at").defaultNow(),
});
export const shapes = pgTable("shapes", {
id: uuid("shape_id").defaultRandom().primaryKey(),
shapes: text("shapes").notNull(),
created_at: timestamp("created_at").defaultNow(),
updated_at: timestamp("updated_at").defaultNow(),
});
export const category = pgTable("project_category", {
id: uuid("category_id").defaultRandom().primaryKey(),
user_id: uuid().references(() => users.id),
category: text("category").notNull(),
created_at: timestamp("created_at").defaultNow(),
updated_at: timestamp("updated_at").defaultNow(),
});