diff --git a/backend/eng.traineddata b/backend/eng.traineddata
new file mode 100644
index 0000000..6d11002
Binary files /dev/null and b/backend/eng.traineddata differ
diff --git a/backend/models/chat.model.js b/backend/models/chat.model.js
index dda14aa..daff858 100644
--- a/backend/models/chat.model.js
+++ b/backend/models/chat.model.js
@@ -3,18 +3,15 @@ import mongoose, {Schema} from 'mongoose';
const chatSchema = new Schema({
seller_id:{
type: Schema.Types.ObjectId,
- ref: 'User',
required: true
},
buyer_id:{
type: Schema.Types.ObjectId,
- ref: 'User',
required: true
},
messages:[{
sender:{
type: Schema.Types.ObjectId,
- ref: 'User',
required: true
},
text:{
diff --git a/backend/models/logistics.model.js b/backend/models/logistics.model.js
new file mode 100644
index 0000000..8cae437
--- /dev/null
+++ b/backend/models/logistics.model.js
@@ -0,0 +1,130 @@
+import mongoose, { Schema } from "mongoose";
+
+const logisticsSchema = new Schema(
+ {
+ trackingNumber: {
+ type: String,
+ required: true,
+ },
+ shipmentDocuments: {
+ type: [String],
+ required: true,
+ },
+ carrierCode: {
+ type: String,
+ required: true,
+ },
+ serviceId: {
+ type: String,
+ required: true,
+ },
+ serviceType: {
+ type: String,
+ required: true,
+ },
+ serviceCategory: {
+ type: String,
+ required: true,
+ },
+ totalBillingWeight: {
+ units: {
+ type: String,
+ enum: ["LB", "KG"],
+ required: true,
+ },
+ value: {
+ type: Number,
+ required: true,
+ },
+ },
+ surcharges: [
+ {
+ surchargeType: {
+ type: String,
+ required: true,
+ },
+ level: {
+ type: String,
+ required: true,
+ },
+ amount: {
+ type: Number,
+ required: true,
+ },
+ description: {
+ type: String,
+ // required: true,
+ },
+ },
+ ],
+ totalBaseCharge: {
+ type: Number,
+ required: true,
+ },
+ totalFreightDiscounts: {
+ type: Number,
+ required: true,
+ },
+ totalNetFreight: {
+ type: Number,
+ required: true,
+ },
+ totalSurcharges: {
+ type: Number,
+ required: true,
+ },
+ totalNetFedExCharge: {
+ type: Number,
+ required: true,
+ },
+ totalTaxes: {
+ type: Number,
+ required: true,
+ },
+ totalNetCharge: {
+ type: Number,
+ required: true,
+ },
+ totalRebates: {
+ type: Number,
+ required: true,
+ },
+ totalDutiesAndTaxes: {
+ type: Number,
+ required: true,
+ },
+ totalAncillaryFeesAndTaxes: {
+ type: Number,
+ required: true,
+ },
+ totalDutiesTaxesAndFees: {
+ type: Number,
+ required: true,
+ },
+ totalNetChargeWithDutiesAndTaxes: {
+ type: Number,
+ required: true,
+ },
+ trackingIds: [
+ {
+ trackingIdType: {
+ type: String,
+ required: true,
+ },
+ formId: {
+ type: String,
+ required: true,
+ },
+ trackingNumber: {
+ type: String,
+ required: true,
+ },
+ },
+ ],
+ },
+ {
+ timestamps: true,
+ }
+);
+
+export const Logistics = mongoose.model("Logistics", logisticsSchema);
diff --git a/backend/models/order.model.js b/backend/models/order.model.js
deleted file mode 100644
index a110de2..0000000
--- a/backend/models/order.model.js
+++ /dev/null
@@ -1,93 +0,0 @@
-import mongoose, { Schema } from "mongoose";
-
-const orderSchema = new Schema({
- seller_id: {
- type: mongoose.Schema.Types.ObjectId,
- ref: "User",
- required: true,
- },
- buyer_id: {
- type: mongoose.Schema.Types.ObjectId,
- ref: "User",
- required: true,
- },
- product_id: {
- type: mongoose.Schema.Types.ObjectId,
- ref: "Product",
- required: true,
- },
- quote_id: {
- type: mongoose.Schema.Types.ObjectId,
- ref: "Quote",
- required: true,
- },
- logistics_id: {
- type: mongoose.Schema.Types.ObjectId,
- ref: "Logistics",
- required: true,
- },
- payment_id: {
- type: mongoose.Schema.Types.ObjectId,
- ref: "Payment",
- required: true,
- },
- quantity: {
- type: Number,
- required: true,
- },
- price: {
- type: Number,
- required: true,
- },
- status: {
- type: String,
- enum: ["pending", "processing", "shipped", "delivered", "cancelled"],
- default: "pending",
- },
- shiping_address: {
- address_lane1: {
- type: String,
- required: true,
- },
- city:{
- type: String,
- required: true,
- },
- state:{
- type: String,
- required: true,
- },
- pincode:{
- type: String,
- required: true,
- },
- country:{
- type: String,
- required: true,
- },
- },
- billing_address: {
- address_lane1: {
- type: String,
- required: true,
- },
- city:{
- type: String,
- required: true,
- },
- state:{
- type: String,
- required: true,
- },
- pincode:{
- type: String,
- required: true,
- },
- country:{
- type: String,
- required: true,
- },
- },
-});
-
-export const Order = mongoose.model('Order', orderSchema);
diff --git a/backend/package-lock.json b/backend/package-lock.json
index b74b1bb..21bb35d 100644
--- a/backend/package-lock.json
+++ b/backend/package-lock.json
@@ -17,6 +17,7 @@
"bcrypt": "^5.1.1",
"canvas": "^3.1.0",
"child_process": "^1.0.2",
+ "cloudinary": "^2.5.1",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
"dotenv": "^16.4.7",
@@ -1895,6 +1896,19 @@
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==",
"license": "ISC"
},
+ "node_modules/cloudinary": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/cloudinary/-/cloudinary-2.5.1.tgz",
+ "integrity": "sha512-CNg6uU53Hl4FEVynkTGpt5bQEAQWDHi3H+Sm62FzKf5uQHipSN2v7qVqS8GRVqeb0T1WNV+22+75DOJeRXYeSQ==",
+ "license": "MIT",
+ "dependencies": {
+ "lodash": "^4.17.21",
+ "q": "^1.5.1"
+ },
+ "engines": {
+ "node": ">=9"
+ }
+ },
"node_modules/color-support": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
@@ -2018,7 +2032,6 @@
"version": "1.4.7",
"resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.7.tgz",
"integrity": "sha512-nGUvgXnotP3BsjiLX2ypbQnWoGUPIIfHQNZkkC668ntrzGWEZVW70HDEB1qnNGMicPje6EttlIgzo51YSwNQGw==",
- "license": "MIT",
"dependencies": {
"cookie": "0.7.2",
"cookie-signature": "1.0.6"
@@ -3107,6 +3120,12 @@
"node": ">=12.0.0"
}
},
+ "node_modules/lodash": {
+ "version": "4.17.21",
+ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
+ "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
+ "license": "MIT"
+ },
"node_modules/lodash.includes": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz",
@@ -3857,6 +3876,17 @@
"node": ">=6"
}
},
+ "node_modules/q": {
+ "version": "1.5.1",
+ "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz",
+ "integrity": "sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==",
+ "deprecated": "You or someone you depend on is using Q, the JavaScript Promise library that gave JavaScript developers strong feelings about promises. They can almost certainly migrate to the native JavaScript promise now. Thank you literally everyone for joining me in this bet against the odds. Be excellent to each other.\n\n(For a CapTP with native promises, see @endo/eventual-send and @endo/captp)",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.6.0",
+ "teleport": ">=0.2.0"
+ }
+ },
"node_modules/qs": {
"version": "6.13.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
diff --git a/backend/package.json b/backend/package.json
index bf6f8f4..3345997 100644
--- a/backend/package.json
+++ b/backend/package.json
@@ -19,6 +19,7 @@
"bcrypt": "^5.1.1",
"canvas": "^3.1.0",
"child_process": "^1.0.2",
+ "cloudinary": "^2.5.1",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
"dotenv": "^16.4.7",
diff --git a/backend/prisma/migrations/20250203170942_add_kyc_field/migration.sql b/backend/prisma/migrations/20250203170942_add_kyc_field/migration.sql
new file mode 100644
index 0000000..fea1734
--- /dev/null
+++ b/backend/prisma/migrations/20250203170942_add_kyc_field/migration.sql
@@ -0,0 +1,4 @@
+-- AlterTable
+ALTER TABLE "User" ADD COLUMN "is_company_docs_done" BOOLEAN NOT NULL DEFAULT false,
+ADD COLUMN "is_kyc_done" BOOLEAN NOT NULL DEFAULT false,
+ADD COLUMN "is_personal_docs_done" BOOLEAN NOT NULL DEFAULT false;
diff --git a/backend/prisma/schema.prisma b/backend/prisma/schema.prisma
index e0f5670..ccceb13 100644
--- a/backend/prisma/schema.prisma
+++ b/backend/prisma/schema.prisma
@@ -25,6 +25,9 @@ model User {
order_seller Order[] @relation(name: "SellerRelation")
userReview UserReview[] @relation(name: "ReviewReceiver")
userReviwed UserReview[] @relation(name: "ReviewGiver")
+ is_kyc_done Boolean @default(false)
+ is_personal_docs_done Boolean @default(false)
+ is_company_docs_done Boolean @default(false)
}
model UserProfile {
diff --git a/backend/processedImage.jpg b/backend/processedImage.jpg
new file mode 100644
index 0000000..16d813c
Binary files /dev/null and b/backend/processedImage.jpg differ
diff --git a/backend/server.js b/backend/server.js
index 209f335..507758b 100644
--- a/backend/server.js
+++ b/backend/server.js
@@ -1,5 +1,4 @@
import 'dotenv/config';
-
import express from "express";
import prisma from "./src/config/prisma_db.js";
import connectMongoDB from "./src/config/mongo_db.js";
@@ -11,11 +10,14 @@ import cookieParser from "cookie-parser";
import cors from "cors";
import kycRouter from "./src/routes/kyc.js";
// import { extractText } from "./src/microservices/kyc/aadhaar.js";
+
// payment routes
import PaymentRoutes from "./src/routes/PaymentRoutes.js";
// chat routes
import ChatRoutes from "./src/routes/ChatRoutes.js";
+import DocUploadRoutes from "./src/routes/doc_upload.js"
+
import {
verifyProduct,
upload,
@@ -45,7 +47,8 @@ connectMongoDB();
app.use(
cors({
- origin: ['http://localhost:5173'], // Allow all origins
+ origin: ["http://localhost:5173", "http://localhost:5174"],
+ exposedHeaders: ['set-cookie'],
methods: ["GET", "POST", "PUT", "DELETE", "OPTIONS"],
allowedHeaders: ["Content-Type", "Authorization"],
credentials: true // Allow cookies, if needed
@@ -66,7 +69,7 @@ app.use('/user-review',reviewRouter)
app.use("/payment", PaymentRoutes);
app.use('/chat', ChatRoutes);
-
+app.use("/docs",DocUploadRoutes);
// Start server
@@ -98,4 +101,4 @@ const gracefulShutdown = async () => {
// Handle shutdown signals
process.on("SIGTERM", gracefulShutdown);
-process.on("SIGINT", gracefulShutdown);
+process.on("SIGINT", gracefulShutdown);
\ No newline at end of file
diff --git a/backend/src/controllers/auth.js b/backend/src/controllers/auth.js
index cd5e54e..c51b3d2 100644
--- a/backend/src/controllers/auth.js
+++ b/backend/src/controllers/auth.js
@@ -17,6 +17,7 @@ export const register = async (req, res) => {
is_seller,
}
})
+ user.logged_in_as = role;
console.log(user);
@@ -51,17 +52,36 @@ export const register = async (req, res) => {
export const login = async (req, res) => {
try{
+ console.log(req.body);
const {email, password} = req.body;
console.log(email,password);
+ const role = req.body.role ? req.body.role : "buyer";
+ // const role = "buyer";
const user = await prisma.user.findUnique({
- where: { email }
+ where: { email },include:{
+ profile: true
+ }
})
-
+ // console.log(user);
if (!user || !(await bcrypt.compare(password, user.passwordHash))) {
return res.status(401).json({ error: 'Invalid credentials' });
}
-
+ // console.log(user);
+
+ // const profileData = await prisma.user.findUnique({
+ // where: {
+ // id: user.id
+ // },
+ // include:{
+ // profile: true
+ // }
+ // })
+ // destructure profile data to user doc itself
+ // user = {...user, ...profileData};
+ user.logged_in_as = role;
+ console.log(user);
+
const is_buyer = user.role === "buyer"
const token = jwt.sign({
@@ -76,10 +96,12 @@ export const login = async (req, res) => {
httpOnly: true,
}
+ console.log(token);
res.status(201).cookie('token', token, options).json({
success: true,
msg: 'User logged in',
user,
+ logged_in_as : role,
token
});
diff --git a/backend/src/controllers/profile.js b/backend/src/controllers/profile.js
index 8123da6..b1699ad 100644
--- a/backend/src/controllers/profile.js
+++ b/backend/src/controllers/profile.js
@@ -29,6 +29,7 @@ export const getProfile = async (req, res) => {
}
export const updateProfile = async (req,res) =>{
+ console.log(req.body);
const {companyName, address, GST, phoneNo,name}= req.body;
try{
const id= req.id
@@ -65,9 +66,10 @@ export const updateProfile = async (req,res) =>{
updatedUser
})
}catch(e){
+ console.log(e);
res.status(500).json({
success: false,
- error: 'Some error occurred: ' + error.message
+ error: 'Some error occurred: ' + e.message
})
}
}
\ No newline at end of file
diff --git a/backend/src/microservices/Ankur/DocUploadService/index.js b/backend/src/microservices/Ankur/DocUploadService/index.js
index 7ed2bb0..5a8c9bb 100644
--- a/backend/src/microservices/Ankur/DocUploadService/index.js
+++ b/backend/src/microservices/Ankur/DocUploadService/index.js
@@ -8,12 +8,15 @@ const morgan = require('morgan');
const helmet = require('helmet');
const path = require('path');
const { findAvailablePort } = require('./config/service.config');
+// const cors = require( "cors");
dotenv.config();
const app = express();
const PORT = process.env.PORT || 3001;
+app.use(cors())
+
// Security middleware
app.use(helmet({
contentSecurityPolicy: false // This allows loading of resources
diff --git a/backend/src/microservices/Ankur/DocUploadService/public/index.html b/backend/src/microservices/Ankur/DocUploadService/public/index.html
index 8ce107e..1d8b9c5 100644
--- a/backend/src/microservices/Ankur/DocUploadService/public/index.html
+++ b/backend/src/microservices/Ankur/DocUploadService/public/index.html
@@ -411,7 +411,9 @@
Your Documents
const formData = new FormData(event.target);
try {
- const response = await fetch('/api/documents', {
+ console.log("Helooooooooooooooooooo");
+
+ const response = await fetch('http://127.0.0.1:3005/api/documents', {
method: 'POST',
headers: {
'X-User-Id': TEST_USER.id,
@@ -419,6 +421,7 @@ Your Documents
},
body: formData
});
+ console.log(response);
if (!response.ok) {
throw new Error('Upload failed');
diff --git a/backend/src/microservices/Ankur/DocUploadService/routes/upload.routes.js b/backend/src/microservices/Ankur/DocUploadService/routes/upload.routes.js
index 20c7bec..87ac1b0 100644
--- a/backend/src/microservices/Ankur/DocUploadService/routes/upload.routes.js
+++ b/backend/src/microservices/Ankur/DocUploadService/routes/upload.routes.js
@@ -43,6 +43,8 @@ const router = express.Router();
router.post('/', auth, upload.array('files', 1), async (req, res) => {
try {
const { documentType, documentNumber } = req.body;
+ console.log(req.body);
+
const files = req.files;
if (!files || files.length === 0) {
@@ -52,7 +54,9 @@ router.post('/', auth, upload.array('files', 1), async (req, res) => {
if (!documentType || !documentNumber) {
return res.status(400).json({ error: 'Document type and number are required' });
}
-
+ console.log("Worlddddddddddddddddddddddddddddddd");
+ console.log(req.user._id, req.user.email);
+
const document = await uploadService.uploadFiles(
files,
req.user._id,
@@ -60,12 +64,15 @@ router.post('/', auth, upload.array('files', 1), async (req, res) => {
{ documentType, documentNumber }
);
+ // console.log(document);
+
+
// Clean up uploaded files
- files.forEach(file => {
- fs.unlink(file.path, err => {
- if (err) console.error('Error deleting temporary file:', err);
- });
- });
+ // files.forEach(file => {
+ // fs.unlink(file.path, err => {
+ // if (err) console.error('Error deleting temporary file:', err);
+ // });
+ // });
res.status(201).json(document);
} catch (error) {
diff --git a/backend/src/microservices/Ankur/DocUploadService/services/upload.service.js b/backend/src/microservices/Ankur/DocUploadService/services/upload.service.js
index f0f2ee6..6921ec5 100644
--- a/backend/src/microservices/Ankur/DocUploadService/services/upload.service.js
+++ b/backend/src/microservices/Ankur/DocUploadService/services/upload.service.js
@@ -6,6 +6,7 @@ class UploadService {
async uploadFiles(files, userId, userEmail, documentDetails) {
try {
// First, find or create user document record
+ console.log("Yayyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy");
let userDocs = await Document.findOne({ userId });
if (!userDocs) {
@@ -26,7 +27,8 @@ class UploadService {
unique_filename: true,
overwrite: true
});
-
+ console.log(result);
+
// Update the specific document type
userDocs.documents[documentDetails.documentType] = {
documentNumber: documentDetails.documentNumber,
@@ -39,6 +41,8 @@ class UploadService {
verificationComments: null,
verifiedAt: null
};
+ console.log(userDocs);
+
await userDocs.save();
return userDocs.documents[documentDetails.documentType];
diff --git a/backend/src/microservices/Anshul/README.md b/backend/src/microservices/Anshul/README.md
index 0283442..1cab813 100644
--- a/backend/src/microservices/Anshul/README.md
+++ b/backend/src/microservices/Anshul/README.md
@@ -1,9 +1,46 @@
-# API Documentation
+# CrossWave API Documentation
## Product Routes
+- Base Path: `/api/product`
-### Add Product
-- **Endpoint:** `POST /add-product`
+| Method | Endpoint | Description |
+|--------|----------|-------------|
+| POST | `/add-product` | Add new product with images |
+| GET | `/get-product` | Get all products |
+| GET | `/get-user-product/:seller` | Get products by seller |
+| DELETE | `/remove-product/:productId` | Delete product |
+| PUT | `/update-product/:productId` | Update product details |
+
+## Review Routes
+- Base Path: `/api/review`
+
+| Method | Endpoint | Description |
+|--------|----------|-------------|
+| POST | `/user-review/:reviewee_id` | Add user review |
+| GET | `/get-user-reviews/:reviewee_id` | Get reviews for user |
+| GET | `/user-avg-rating/:reviewee_id` | Get user average rating |
+| POST | `/product-review/:product_id` | Add product review |
+| GET | `/get-product-reviews/:product_id` | Get reviews for product |
+| GET | `/product-avg-rating/:product_id` | Get product average rating |
+
+## Order Routes
+- Base Path: `/api/order`
+
+| Method | Endpoint | Description |
+|--------|----------|-------------|
+| POST | `/addorder/:product_id` | Create new order |
+| PUT | `/update-status/:id` | Update order status |
+| GET | `/get-orders-buyer/:buyer_id` | Get buyer's orders |
+| GET | `/get-orders-seller/:seller_id` | Get seller's orders |
+
+## Detailed Route Documentation
+
+### Product Routes
+
+#### Add Product
+- **Endpoint:** `POST /api/product/add-product`
+- **Auth:** Required
+- **Content-Type:** multipart/form-data
- **Middleware:** `multer` (max 10 images)
- **Request Body:**
```json
@@ -39,7 +76,7 @@
}
```
-### Get All Products
+#### Get All Products
- **Endpoint:** `GET /get-product`
- **Response (200):**
```json
@@ -53,7 +90,7 @@
]
```
-### Get User Products
+#### Get User Products
- **Endpoint:** `GET /get-user-product/:seller`
- **Parameters:** `seller` (seller ID)
- **Response (200):**
@@ -67,7 +104,7 @@
]
```
-### Remove Product
+#### Remove Product
- **Endpoint:** `DELETE /remove-product/:productId`
- **Parameters:** `productId`
- **Response (200):**
@@ -77,7 +114,7 @@
}
```
-### Update Product
+#### Update Product
- **Endpoint:** `PUT /update-product/:productId`
- **Parameters:** `productId`
- **Request Body:**
@@ -90,9 +127,9 @@
}
```
-## Review Routes
+### Review Routes
-### Add Review
+#### Add Review
- **Endpoint:** `POST /review/:reviewee_id`
- **Parameters:** `reviewee_id`
- **Request Body:**
@@ -114,7 +151,7 @@
}
```
-### Get Reviews
+#### Get Reviews
- **Endpoint:** `GET /get-reviews/:reviewee_id`
- **Parameters:** `reviewee_id`
- **Response (200):**
@@ -128,7 +165,7 @@
]
```
-### Get Average Rating
+#### Get Average Rating
- **Endpoint:** `GET /avg-rating/:reviewee_id`
- **Parameters:** `reviewee_id`
- **Response (200):**
@@ -138,9 +175,9 @@
}
```
-## Order Routes
+### Order Routes
-### Create Order
+#### Create Order
- **Endpoint:** `POST /addorder`
- **Request Body:**
```json
@@ -170,7 +207,7 @@
}
```
-### Update Order Status
+#### Update Order Status
- **Endpoint:** `PUT /update-status/:id`
- **Parameters:** `id` (order ID)
- **Request Body:**
@@ -180,7 +217,7 @@
}
```
-### Get Buyer Orders
+#### Get Buyer Orders
- **Endpoint:** `GET /get-orders-buyer/:buyer_id`
- **Parameters:** `buyer_id`
- **Response (200):**
@@ -195,7 +232,7 @@
]
```
-### Get Seller Orders
+#### Get Seller Orders
- **Endpoint:** `GET /get-orders-seller/:seller_id`
- **Parameters:** `seller_id`
- **Response:** Same as buyer orders
diff --git a/backend/src/microservices/Anshul/app.js b/backend/src/microservices/Anshul/app.js
index 248d3f2..b63ab10 100644
--- a/backend/src/microservices/Anshul/app.js
+++ b/backend/src/microservices/Anshul/app.js
@@ -6,6 +6,7 @@ const app = express();
import orderRouter from './routes/order.routes.js'
import reviewRouter from './routes/review.routes.js'
import productRouter from './routes/product.routes.js'
+import logisticsRouter from './routes/logistics.routes.js'
app.use(
cors({
@@ -22,5 +23,6 @@ app.use(express.static("public"));
app.use('/orders',orderRouter)
app.use('/reviews',reviewRouter)
app.use('/products',productRouter)
+app.use('/logistics',logisticsRouter)
export { app };
diff --git a/backend/src/microservices/Anshul/controllers/logistics.controller.js b/backend/src/microservices/Anshul/controllers/logistics.controller.js
new file mode 100644
index 0000000..ee9387b
--- /dev/null
+++ b/backend/src/microservices/Anshul/controllers/logistics.controller.js
@@ -0,0 +1,279 @@
+import axios from "axios";
+import { Logistics } from "../models/logistics.model.js";
+
+export const createShipment = async (req, res, next) => {
+ try {
+ const { body } = req;
+
+ const accessToken = req.shipmentAuthToken;
+ if (!accessToken) {
+ return res.status(401).json({ message: "Unauthorized" });
+ }
+
+ const shipment = await axios.post(
+ "https://apis-sandbox.fedex.com/ship/v1/shipments",
+ body,
+ {
+ headers: {
+ Authorization: `Bearer ${accessToken}`,
+ "Content-Type": "application/json",
+ },
+ }
+ );
+
+ const data = shipment?.data?.output?.transactionShipments[0];
+ if (!data) {
+ throw new Error("Failed to create shipment");
+ }
+
+ console.log(data.completedPackageDetails);
+
+
+ const newShipment = await new Logistics({
+ trackingNumber: data.masterTrackingNumber,
+
+ carrierCode: data?.completedShipmentDetail?.carrierCode,
+
+ serviceId: data.completedShipmentDetail?.serviceDescription?.serviceId,
+
+ serviceType: data.serviceType,
+
+ serviceCategory: data.serviceCategory,
+
+ totalBillingWeight:
+ data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalBillingWeight,
+
+ surcharges: data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.surcharges,
+
+ totalBaseCharge:
+ data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalBaseCharge,
+
+ totalNetCharge: data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalNetCharge,
+
+ totalFreightDiscounts:
+ data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalFreightDiscounts,
+
+ totalNetFreight:
+ data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalNetFreight,
+
+ totalSurcharges:
+ data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalSurcharges,
+
+ totalNetFedExCharge:
+ data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalNetFedExCharge,
+
+ totalTaxes: data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalTaxes,
+
+ totalRebates: data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalRebates,
+
+ totalDutiesAndTaxes:
+ data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalDutiesAndTaxes,
+
+ totalAncillaryFeesAndTaxes:
+ data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalAncillaryFeesAndTaxes,
+
+ totalDutiesTaxesAndFees:
+ data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]?.totalDutiesTaxesAndFees,
+
+ totalNetChargeWithDutiesAndTaxes:
+ data.completedShipmentDetail?.shipmentRating?.shipmentRateDetails[0]
+ ?.totalNetChargeWithDutiesAndTaxes,
+
+ trackingIds: data.completedShipmentDetail?.completedPackageDetails[0]?.trackingIds,
+ });
+
+ const result = await newShipment.save();
+
+ res.status(201).json({
+ success: true,
+ result,
+ });
+ } catch (error) {
+ next(error);
+ }
+};
+
+export const cancelShipment = async (req, res, next) => {
+ try {
+ const accessToken = req.shipmentAuthToken;
+ if (!accessToken) {
+ return res.status(401).json({ message: "Unauthorized" });
+ }
+
+ const { accountNumber, trackingNumber } = req.body;
+ if (!accountNumber || !trackingNumber) {
+ return res.status(400).json({ message: "Missing required fields" });
+ }
+
+ const shipment = await axios.put(
+ "https://apis-sandbox.fedex.com/ship/v1/shipments/cancel",
+ {
+ accountNumber,
+ trackingNumber,
+ },
+ {
+ headers: {
+ Authorization: `Bearer ${accessToken}`,
+ "Content-Type": "application/json",
+ },
+ }
+ );
+
+ res.status(200).json({
+ success: true,
+ data: shipment.data.output,
+ });
+ } catch (error) {
+ next(error);
+ }
+};
+
+// export const getAsyncShipment = async (req, res, next) => {
+// try {
+// const accessToken = req.shipmentAuthToken;
+// if (!accessToken) {
+// return res.status(401).json({ message: "Unauthorized" });
+// }
+
+// const { accountNumber, jobId } = req.body;
+
+// if (!accountNumber || !jobId) {
+// return res.status(400).json({ message: "Missing required fields" });
+// }
+
+// const shipment = await axios.post(
+// `https://apis-sandbox.fedex.com/ship/v1/shipments/results`,
+// {
+// accountNumber,
+// jobId,
+// },
+// {
+// headers: {
+// Authorization: `Bearer ${accessToken}`,
+// "Content-Type": "application/json",
+// },
+// }
+// );
+
+// res.status(200).json({
+// success: true,
+// data: shipment.data.output,
+// });
+// } catch (err) {
+// console.log(err);
+// }
+// };
+
+// export const verifyShipment = async (req, res, next) => {
+// try {
+// const accessToken = req.shipmentAuthToken;
+// if (!accessToken) {
+// return res.status(401).json({ message: "Unauthorized" });
+// }
+
+// const { body } = req;
+
+// const shipment = await axios.post(
+// "https://apis-sandbox.fedex.com/ship/v1/shipments/packages/validate",
+// body,
+// {
+// headers: {
+// Authorization: `Bearer ${accessToken}`,
+// "Content-Type": "application/json",
+// },
+// }
+// );
+
+// res.status(200).json({
+// success: true,
+// data: shipment.data.output,
+// });
+// } catch (err) {
+// console.log(err);
+// }
+// };
+
+export const returnShipment = async (req, res) => {
+ const accessToken = req.shipmentAuthToken;
+ if (!accessToken) {
+ return res.status(401).json({ message: "Unauthorized" });
+ }
+
+ const { body } = req;
+
+ const shipment = await axios.post(
+ "https://apis-sandbox.fedex.com/ship/v1/shipments/tag",
+ body,
+ {
+ headers: {
+ Authorization: `Bearer ${accessToken}`,
+ "Content-Type": "application/json",
+ },
+ }
+ );
+
+ res.status(200).json({
+ success: true,
+ data: shipment.data.output,
+ });
+};
+
+export const createPickup = async (req, res) => {
+ const accessToken = req.shipmentAuthToken;
+ if (!accessToken) {
+ return res.status(401).json({ message: "Unauthorized" });
+ }
+
+ const { body } = req;
+ try {
+ const pickup = await axios.post(
+ "https://apis-sandbox.fedex.com/pickup/v1/pickups",
+ body,
+ {
+ headers: {
+ Authorization: `Bearer ${accessToken}`,
+ "Content-Type": "application/json",
+ },
+ }
+ );
+
+ const confirmationCode = pickup.data.output.pickupConfirmationCode;
+
+ res.status(200).json({
+ success: true,
+ data: pickup.data.output,
+ confirmationCode,
+ });
+ } catch (error) {
+ console.log(error);
+ }
+};
+
+export const cancelPickup = async (req, res) => {
+ const accessToken = req.shipmentAuthToken;
+ if (!accessToken) {
+ return res.status(401).json({ message: "Unauthorized" });
+ }
+
+ const { body } = req;
+
+ try {
+ const pickup = await axios.put(
+ "https://apis-sandbox.fedex.com/pickup/v1/pickups/cancel",
+ body,
+ {
+ headers: {
+ Authorization: `Bearer ${accessToken}`,
+ "Content-Type": "application/json",
+ },
+ }
+ );
+
+ res.status(200).json({
+ success: true,
+ data: pickup.data.output,
+ });
+ } catch (error) {
+ console.log(error);
+ }
+};
diff --git a/backend/src/microservices/Anshul/controllers/orders.controller.js b/backend/src/microservices/Anshul/controllers/orders.controller.js
index ba2055e..4219427 100644
--- a/backend/src/microservices/Anshul/controllers/orders.controller.js
+++ b/backend/src/microservices/Anshul/controllers/orders.controller.js
@@ -2,13 +2,11 @@ import { PrismaClient } from '@prisma/client'
const prisma = new PrismaClient()
-export const createOrder = async (req, res, next) => {
- const buyer_id = req.user._id;
- if (!buyer_id) {
- return res.status(401).json({ message: "Unauthorized" });
- }
-
+export const createOrder = async (req, res, next) => {
const {
+ buyer_id,
+ seller_id,
+ product_id,
quote_id,
logistics_id,
payment_id,
@@ -18,17 +16,6 @@ export const createOrder = async (req, res, next) => {
billing_address,
} = req.body;
- const {product_id} = req.params;
- const product = await prisma.product.findUnique({ product_id });
- if (!product) {
- return res.status(404).json({ message: "Product not found" });
- }
-
- const seller_id = product.seller;
- if (!seller_id) {
- return res.status(404).json({ message: "Seller not found" });
- }
-
if (
!quote_id ||
!logistics_id ||
@@ -43,9 +30,6 @@ export const createOrder = async (req, res, next) => {
if (buyer_id === seller_id) {
return res.status(400).json({ message: "Cannot place an order with yourself" });
}
- if (product.quantity < quantity) {
- return res.status(400).json({ message: "Insufficient quantity" });
- }
try {
const order = await prisma.order.create({
diff --git a/backend/src/microservices/Anshul/middlewares/logisticsAuth.middleware.js b/backend/src/microservices/Anshul/middlewares/logisticsAuth.middleware.js
new file mode 100644
index 0000000..edccfc1
--- /dev/null
+++ b/backend/src/microservices/Anshul/middlewares/logisticsAuth.middleware.js
@@ -0,0 +1,25 @@
+import axios from "axios";
+
+export const logisticsAuth = async (req, res, next) => {
+ try {
+ const fedexResponse = await axios.post(
+ "https://apis-sandbox.fedex.com/oauth/token",
+ {
+ grant_type: "client_credentials",
+ client_id: "l7208f1f278bfe47ae9979241347f93317",
+ client_secret: "7bf47444abcb4251b4b68c3135e63899",
+ },
+ {
+ headers: {
+ "Content-Type": "application/x-www-form-urlencoded",
+ },
+ }
+ );
+ const accessToken = fedexResponse.data.access_token;
+ req.shipmentAuthToken = accessToken;
+ next();
+ } catch (error) {
+ console.error("Error authenticating with FedEx:", error);
+ res.status(500).json({ error: "Failed to authenticate with FedEx" });
+ }
+};
diff --git a/backend/src/microservices/Anshul/package-lock.json b/backend/src/microservices/Anshul/package-lock.json
index b6cc6d9..7d89bb8 100644
--- a/backend/src/microservices/Anshul/package-lock.json
+++ b/backend/src/microservices/Anshul/package-lock.json
@@ -10,6 +10,7 @@
"license": "ISC",
"dependencies": {
"@prisma/client": "^6.3.0",
+ "axios": "^1.7.9",
"bcrypt": "^5.1.1",
"body-parser": "^1.20.3",
"cookie-parser": "^1.4.7",
@@ -222,6 +223,21 @@
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="
},
+ "node_modules/asynckit": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
+ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
+ },
+ "node_modules/axios": {
+ "version": "1.7.9",
+ "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
+ "integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
+ "dependencies": {
+ "follow-redirects": "^1.15.6",
+ "form-data": "^4.0.0",
+ "proxy-from-env": "^1.1.0"
+ }
+ },
"node_modules/balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
@@ -397,6 +413,17 @@
"color-support": "bin.js"
}
},
+ "node_modules/combined-stream": {
+ "version": "1.0.8",
+ "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
+ "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
+ "dependencies": {
+ "delayed-stream": "~1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@@ -525,6 +552,14 @@
"ms": "2.0.0"
}
},
+ "node_modules/delayed-stream": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
+ "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
"node_modules/delegates": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
@@ -718,6 +753,38 @@
"node": ">= 0.8"
}
},
+ "node_modules/follow-redirects": {
+ "version": "1.15.9",
+ "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz",
+ "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==",
+ "funding": [
+ {
+ "type": "individual",
+ "url": "https://github.com/sponsors/RubenVerborgh"
+ }
+ ],
+ "engines": {
+ "node": ">=4.0"
+ },
+ "peerDependenciesMeta": {
+ "debug": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/form-data": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
+ "integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
+ "dependencies": {
+ "asynckit": "^0.4.0",
+ "combined-stream": "^1.0.8",
+ "mime-types": "^2.1.12"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
"node_modules/forwarded": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
@@ -1681,6 +1748,11 @@
"node": ">= 0.10"
}
},
+ "node_modules/proxy-from-env": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
+ "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
+ },
"node_modules/pstree.remy": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz",
diff --git a/backend/src/microservices/Anshul/package.json b/backend/src/microservices/Anshul/package.json
index d0b600a..5d2ebff 100644
--- a/backend/src/microservices/Anshul/package.json
+++ b/backend/src/microservices/Anshul/package.json
@@ -13,6 +13,7 @@
"description": "",
"dependencies": {
"@prisma/client": "^6.3.0",
+ "axios": "^1.7.9",
"bcrypt": "^5.1.1",
"body-parser": "^1.20.3",
"cookie-parser": "^1.4.7",
diff --git a/backend/src/microservices/Anshul/routes/logistics.routes.js b/backend/src/microservices/Anshul/routes/logistics.routes.js
new file mode 100644
index 0000000..db95e16
--- /dev/null
+++ b/backend/src/microservices/Anshul/routes/logistics.routes.js
@@ -0,0 +1,15 @@
+import {Router} from 'express';
+import { cancelPickup, cancelShipment, createPickup, createShipment, getAsyncShipment, returnShipment, verifyShipment } from '../controllers/logistics.controller.js';
+import { logisticsAuth } from '../middlewares/logisticsAuth.middleware.js';
+
+const router = Router();
+
+router.route('/create-shipment').post(logisticsAuth,createShipment)
+router.route('/cancel-shipment').put(logisticsAuth,cancelShipment)
+router.route('/get-async-ship').post(logisticsAuth,getAsyncShipment)
+router.route('/verify-shipment').post(logisticsAuth,verifyShipment)
+router.route('/return-shipment').post(logisticsAuth,returnShipment)
+router.route('/create-pickup').post(logisticsAuth,createPickup)
+router.route('/cancel-pickup').put(logisticsAuth,cancelPickup)
+
+export default router;
\ No newline at end of file
diff --git a/backend/src/microservices/DocUpload/config/cloudinary.config.js b/backend/src/microservices/DocUpload/config/cloudinary.config.js
new file mode 100644
index 0000000..e185100
--- /dev/null
+++ b/backend/src/microservices/DocUpload/config/cloudinary.config.js
@@ -0,0 +1,26 @@
+// const cloudinary = require("cloudinary").v2;
+// const dotenv = require("dotenv");
+
+import { v2 as cloudinary } from "cloudinary";
+import dotenv from "dotenv";
+// const cloudinary = await import("cloudinary").then((mod) => mod.v2);
+
+dotenv.config();
+
+cloudinary.config({
+ cloud_name: process.env.CLOUDINARY_CLOUD_NAME,
+ api_key: process.env.CLOUDINARY_API_KEY,
+ api_secret: process.env.CLOUDINARY_API_SECRET,
+ secure: true,
+});
+
+// Configure default delivery settings
+cloudinary.config({
+ secure: true,
+ secure_distribution: null,
+ private_cdn: false,
+ cname: null,
+});
+
+// module.exports = cloudinary;
+export default cloudinary;
diff --git a/backend/src/microservices/DocUpload/config/database.js b/backend/src/microservices/DocUpload/config/database.js
new file mode 100644
index 0000000..5cfddaf
--- /dev/null
+++ b/backend/src/microservices/DocUpload/config/database.js
@@ -0,0 +1,64 @@
+const mongoose = require('mongoose');
+
+const connectDB = async () => {
+ try {
+ // Connection options
+ const options = {
+ useNewUrlParser: true,
+ useUnifiedTopology: true,
+ serverSelectionTimeoutMS: 5000,
+ // Additional options
+ autoIndex: true, // Build indexes
+ maxPoolSize: 10, // Maintain up to 10 socket connections
+ connectTimeoutMS: 10000, // Give up initial connection after 10 seconds
+ socketTimeoutMS: 45000, // Close sockets after 45 seconds of inactivity
+ };
+
+ const conn = await mongoose.connect(process.env.MONGODB_URI, options);
+
+ console.log(`MongoDB Connected: ${conn.connection.host}`);
+
+ // Connection event handlers
+ mongoose.connection.on('error', err => {
+ console.error(`MongoDB connection error: ${err}`);
+ });
+
+ mongoose.connection.on('disconnected', () => {
+ console.warn('MongoDB disconnected. Attempting to reconnect...');
+ });
+
+ mongoose.connection.on('reconnected', () => {
+ console.info('MongoDB reconnected');
+ });
+
+ // Graceful shutdown
+ process.on('SIGINT', async () => {
+ try {
+ await mongoose.connection.close();
+ console.log('MongoDB connection closed through app termination');
+ process.exit(0);
+ } catch (err) {
+ console.error('Error during MongoDB connection closure:', err);
+ process.exit(1);
+ }
+ });
+
+ } catch (error) {
+ console.error(`Error connecting to MongoDB: ${error.message}`);
+ process.exit(1);
+ }
+};
+
+// Optional: Add utility functions
+const isConnected = () => mongoose.connection.readyState === 1;
+
+const getConnectionState = () => {
+ const states = ['disconnected', 'connected', 'connecting', 'disconnecting'];
+ return states[mongoose.connection.readyState];
+};
+
+module.exports = {
+ connectDB,
+ isConnected,
+ getConnectionState
+};
\ No newline at end of file
diff --git a/backend/src/microservices/DocUpload/config/service.config.js b/backend/src/microservices/DocUpload/config/service.config.js
new file mode 100644
index 0000000..0dbd2c4
--- /dev/null
+++ b/backend/src/microservices/DocUpload/config/service.config.js
@@ -0,0 +1,49 @@
+const net = require('net');
+
+const findAvailablePort = (startPort) => {
+ return new Promise((resolve, reject) => {
+ const server = net.createServer();
+ server.unref();
+ server.on('error', (err) => {
+ if (err.code === 'EADDRINUSE') {
+ findAvailablePort(startPort + 1).then(resolve, reject);
+ } else {
+ reject(err);
+ }
+ });
+ server.listen(startPort, () => {
+ const { port } = server.address();
+ server.close(() => {
+ resolve(port);
+ });
+ });
+ });
+};
+
+const config = {
+ service: {
+ name: 'document-service',
+ version: '1.0.0',
+ },
+ cors: {
+ origins: process.env.ALLOWED_ORIGINS ? process.env.ALLOWED_ORIGINS.split(',') : ['http://localhost:3000'],
+ methods: ['GET', 'POST', 'DELETE', 'OPTIONS'],
+ allowedHeaders: ['Content-Type', 'X-User-Id', 'X-User-Email', 'Authorization'],
+ },
+ upload: {
+ maxFileSize: 10 * 1024 * 1024, // 10MB
+ allowedTypes: ['image/jpeg', 'image/png', 'application/pdf', 'application/msword'],
+ maxFiles: 10
+ },
+ endpoints: {
+ base: '/api/documents',
+ upload: '/upload',
+ get: '/list',
+ delete: '/:id'
+ }
+};
+
+module.exports = {
+ ...config,
+ findAvailablePort
+};
\ No newline at end of file
diff --git a/backend/src/microservices/DocUpload/controller/upload.controller.js b/backend/src/microservices/DocUpload/controller/upload.controller.js
new file mode 100644
index 0000000..1f00844
--- /dev/null
+++ b/backend/src/microservices/DocUpload/controller/upload.controller.js
@@ -0,0 +1,210 @@
+import express from "express";
+import multer from "multer";
+import fs from "fs";
+import path from "path";
+import uploadService from "../services/upload.service.js";
+
+// Configure multer for file upload
+const storage = multer.diskStorage({
+ destination: function (req, file, cb) {
+ const uploadDir = "uploads/";
+ // Create uploads directory if it doesn't exist
+ if (!fs.existsSync(uploadDir)) {
+ fs.mkdirSync(uploadDir);
+ }
+ cb(null, uploadDir);
+ },
+ filename: function (req, file, cb) {
+ // Create unique filename
+ cb(null, Date.now() + "-" + file.originalname);
+ },
+});
+
+export const upload = multer({
+ storage: storage,
+ limits: {
+ fileSize: 10 * 1024 * 1024, // 10MB limit
+ },
+ fileFilter: (req, file, cb) => {
+ // Add allowed file types
+ const allowedTypes = [
+ "image/jpeg",
+ "image/png",
+ "application/pdf",
+ "application/msword",
+ ];
+ if (allowedTypes.includes(file.mimetype)) {
+ cb(null, true);
+ } else {
+ cb(new Error("Invalid file type"));
+ }
+ },
+});
+
+export const uploadDocument = async (req, res) => {
+ try {
+ const { documentType } = req.body;
+ const files = req.files;
+
+ if (!files || files.length === 0) {
+ return res.status(400).json({ error: "No file uploaded" });
+ }
+
+ if (!documentType) {
+ return res
+ .status(400)
+ .json({ error: "Document type and number are required" });
+ }
+ // console.log("Worlddddddddddddddddddddddddddddddd");
+ // console.log(req.user);
+
+ const document = await uploadService.uploadFiles(
+ files,
+ // req.user._id,
+ req.params.userId,
+ // req.user.email,
+ { documentType }
+ );
+
+ if (!document.verified) {
+ return res.status(400).json({ error: "Document verification failed" });
+ }
+
+ // Clean up uploaded files
+ // files.forEach(file => {
+ // fs.unlink(file.path, err => {
+ // if (err) console.error('Error deleting temporary file:', err);
+ // });
+ // });
+
+ res.status(201).json(document);
+ } catch (error) {
+ // Clean up files if upload fails
+ if (req.files) {
+ req.files.forEach((file) => {
+ fs.unlink(file.path, (err) => {
+ if (err) console.error("Error deleting temporary file:", err);
+ });
+ });
+ }
+ res.status(500).json({ error: error.message });
+ }
+};
+// Upload document
+// const uploadDoc = async (documentType) => {
+// try {
+// // const { documentType } = req.body;
+// console.log(req.body);
+
+// const files = req.files;
+
+// if (!files || files.length === 0) {
+// return res.status(400).json({ error: "No file uploaded" });
+// }
+
+// if (!documentType) {
+// return res
+// .status(400)
+// .json({ error: "Document type and number are required" });
+// }
+// console.log("Worlddddddddddddddddddddddddddddddd");
+// console.log(req.user);
+
+// const document = await uploadService.uploadFiles(
+// files,
+// // req.user._id,
+// req.body.userId,
+// // req.user.email,
+// { documentType }
+// );
+
+// // console.log(document);
+
+// // Clean up uploaded files
+// // files.forEach(file => {
+// // fs.unlink(file.path, err => {
+// // if (err) console.error('Error deleting temporary file:', err);
+// // });
+// // });
+
+// return document;
+// } catch (error) {
+// console.log(error);
+// return {};
+// }
+// };
+
+// Get all documents for a user
+export const getDocs = async (req, res) => {
+ try {
+ const documents = await uploadService.getAllDocumentsByUser(
+ req.params.userId
+ );
+ res.json(documents);
+ } catch (error) {
+ res.status(500).json({ error: error.message });
+ }
+};
+
+// Get document status
+export const getStatus = async (req, res) => {
+ try {
+ const status = await uploadService.getDocumentStatus(req, params.userId);
+ res.json(status);
+ } catch (error) {
+ res.status(500).json({ error: error.message });
+ }
+};
+
+// Delete a specific document type
+export const deleteDocByType = async (req, res) => {
+ try {
+ const { documentType } = req.body;
+ const userId = req.params.userId;
+ const result = await deleteDoc(documentType, userId);
+ re.json(result);
+ } catch (error) {
+ res.status(500).json({ error });
+ }
+};
+const deleteDoc = async (documentType, userId) => {
+ try {
+ const result = await uploadService.deleteDocument(documentType, userId);
+ return result;
+ } catch (error) {
+ return { error: error.message };
+ }
+};
+
+// Verify document (admin only)
+export const verifyDocByType = async (req, res) => {
+ try {
+ const { documentType } = req.body;
+ const userId = req.params.userId;
+ console.log(userId, documentType);
+ const document = await verifyDoc(userId, documentType);
+ res.json(document);
+ } catch (error) {
+ console.log(error);
+
+ res.status(500).json({ error });
+ }
+};
+const verifyDoc = async (
+ userId,
+ documentType,
+ status = "VERIFIED",
+ comments = "Good To Go!"
+) => {
+ try {
+ const document = await uploadService.verifyDocument(
+ userId,
+ documentType,
+ status,
+ comments
+ );
+ return document;
+ } catch (error) {
+ return { error: error };
+ }
+};
diff --git a/backend/src/microservices/DocUpload/index.js b/backend/src/microservices/DocUpload/index.js
new file mode 100644
index 0000000..739c7cd
--- /dev/null
+++ b/backend/src/microservices/DocUpload/index.js
@@ -0,0 +1,133 @@
+const express = require('express');
+const cors = require('cors');
+const dotenv = require('dotenv');
+const { connectDB } = require('./config/database');
+const uploadRoutes = require('./controller/upload.controller');
+const serviceConfig = require('./config/service.config');
+const morgan = require('morgan');
+const helmet = require('helmet');
+const path = require('path');
+const { findAvailablePort } = require('./config/service.config');
+// const cors = require( "cors");
+
+dotenv.config();
+
+const app = express();
+const PORT = process.env.PORT || 3001;
+
+app.use(cors())
+
+// Security middleware
+app.use(helmet({
+ contentSecurityPolicy: false // This allows loading of resources
+}));
+
+// CORS configuration
+app.use(cors({
+ origin: serviceConfig.cors.origins,
+ methods: serviceConfig.cors.methods,
+ allowedHeaders: serviceConfig.cors.allowedHeaders
+}));
+
+// Logging middleware
+app.use(morgan('combined'));
+
+// Basic middleware
+app.use(express.json());
+app.use(express.urlencoded({ extended: true }));
+
+// Serve static files from public directory
+app.use(express.static(path.join(__dirname, 'public')));
+
+// Health check endpoint
+app.get('/health', (req, res) => {
+ res.json({
+ service: serviceConfig.service.name,
+ version: serviceConfig.service.version,
+ status: 'healthy',
+ timestamp: new Date().toISOString()
+ });
+});
+
+// API documentation endpoint
+app.get('/api-docs', (req, res) => {
+ res.json({
+ service: serviceConfig.service.name,
+ version: serviceConfig.service.version,
+ endpoints: {
+ upload: {
+ path: `${serviceConfig.endpoints.base}${serviceConfig.endpoints.upload}`,
+ method: 'POST',
+ headers: ['X-User-Id', 'X-User-Email'],
+ body: 'multipart/form-data'
+ },
+ list: {
+ path: `${serviceConfig.endpoints.base}${serviceConfig.endpoints.get}`,
+ method: 'GET',
+ headers: ['X-User-Id', 'X-User-Email']
+ },
+ delete: {
+ path: `${serviceConfig.endpoints.base}${serviceConfig.endpoints.delete}`,
+ method: 'DELETE',
+ headers: ['X-User-Id', 'X-User-Email']
+ }
+ }
+ });
+});
+
+// Routes
+app.use('/api/documents', uploadRoutes);
+
+// Serve index.html for the root route
+app.get('/', (req, res) => {
+ res.sendFile(path.join(__dirname, 'public', 'index.html'));
+});
+
+// Error handling middleware
+app.use((err, req, res, next) => {
+ console.error(err.stack);
+ res.status(err.status || 500).json({
+ error: {
+ message: err.message || 'Internal Server Error',
+ code: err.code || 'INTERNAL_ERROR'
+ }
+ });
+});
+
+// Initialize Database Connection and Start Server
+const startServer = async () => {
+ try {
+ await connectDB();
+
+ const availablePort = await findAvailablePort(process.env.PORT || 3000);
+
+ const server = app.listen(availablePort, () => {
+ console.log(`${serviceConfig.service.name} is running on port ${availablePort}`);
+ }).on('error', (err) => {
+ if (err.code === 'EADDRINUSE') {
+ console.log(`Port ${availablePort} is busy, trying ${availablePort + 1}`);
+ server.listen(availablePort + 1);
+ } else {
+ console.error('Server error:', err);
+ }
+ });
+
+ // Graceful shutdown
+ process.on('SIGTERM', () => {
+ console.log('SIGTERM signal received: closing HTTP server');
+ server.close(() => {
+ console.log('HTTP server closed');
+ process.exit(0);
+ });
+ });
+
+ } catch (error) {
+ console.error('Failed to start server:', error);
+ process.exit(1);
+ }
+};
+
+startServer();
+
+// Export for testing
+module.exports = app;
\ No newline at end of file
diff --git a/backend/src/microservices/DocUpload/models/document.model.js b/backend/src/microservices/DocUpload/models/document.model.js
new file mode 100644
index 0000000..9e1fc79
--- /dev/null
+++ b/backend/src/microservices/DocUpload/models/document.model.js
@@ -0,0 +1,253 @@
+// const mongoose = require("mongoose");
+import mongoose from "mongoose";
+
+const documentSchema = new mongoose.Schema(
+ {
+ userId: {
+ type: String,
+ required: true,
+ unique: true,
+ },
+ userEmail: {
+ type: String,
+ // required: true
+ },
+ // documents: {
+ // cic: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // bis: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // coo: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // gstin: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // iec: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // shipping: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // tradeLicense: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // UAEcoo: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // UAEesma: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // UAEid: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // UAEvat: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // aadhaar: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // pan: {
+ // documentNumber: String,
+ // cloudinaryId: String,
+ // fileUrl: String,
+ // fileType: String,
+ // size: Number,
+ // status: {
+ // type: String,
+ // enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ // default: "NOT_UPLOADED",
+ // },
+ // verificationComments: String,
+ // uploadedAt: Date,
+ // verifiedAt: Date,
+ // },
+ // },
+ documents: {
+ type: Map, // This allows dynamic document types
+ of: {
+ // documentNumber: String,
+ cloudinaryId: String,
+ fileUrl: String,
+ fileType: String,
+ size: Number,
+ status: {
+ type: String,
+ enum: ["PENDING", "VERIFIED", "REJECTED", "NOT_UPLOADED"],
+ default: "NOT_UPLOADED",
+ },
+ verificationComments: String,
+ uploadedAt: Date,
+ verifiedAt: Date,
+ },
+ },
+ createdAt: {
+ type: Date,
+ default: Date.now,
+ },
+ updatedAt: {
+ type: Date,
+ default: Date.now,
+ },
+ },
+ {
+ timestamps: true, // Adds createdAt and updatedAt
+ strict: false, // Allows flexible schema
+ }
+);
+
+// Update timestamp on document changes
+documentSchema.pre("save", function (next) {
+ this.updatedAt = new Date();
+ next();
+});
+
+const Document = mongoose.model("Document", documentSchema);
+
+export default Document;
diff --git a/backend/src/microservices/DocUpload/public/index.html b/backend/src/microservices/DocUpload/public/index.html
new file mode 100644
index 0000000..1d8b9c5
--- /dev/null
+++ b/backend/src/microservices/DocUpload/public/index.html
@@ -0,0 +1,591 @@
+
+
+
+
+
+ Document Upload Service
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/backend/src/microservices/DocUpload/readme.md b/backend/src/microservices/DocUpload/readme.md
new file mode 100644
index 0000000..1c43185
--- /dev/null
+++ b/backend/src/microservices/DocUpload/readme.md
@@ -0,0 +1,207 @@
+# Business Document Management Service
+
+A microservice for handling business-related document uploads, storage, and verification using Cloudinary and MongoDB.
+
+## Required Documents
+
+1. Partnership Registration Number
+2. Certificate of Incorporation (CIN Number)
+3. Goods and Service Tax (GSTIN Number)
+4. PAN Number of Business/Sole Proprietor
+5. IEC (Import Export Number)
+6. MSME Number (Optional)
+7. DPIIT Number
+8. AD Code from the Authorized Bank
+
+## Features
+
+- Upload and manage business documents
+- Document verification status tracking
+- Secure cloud storage using Cloudinary
+- Document metadata management in MongoDB
+- Document number validation
+- Status tracking (PENDING, VERIFIED, REJECTED)
+- User-specific document management
+- Support for various file types (PDF, Images, etc.)
+- Integration-ready API
+
+## Prerequisites
+
+- Node.js (v14 or higher)
+- MongoDB
+- Cloudinary account
+
+## Installation
+
+1. Clone the repository:
+```bash
+git clone
+cd document-management-service
+```
+
+2. Install dependencies:
+```bash
+npm install
+```
+
+3. Create a `.env` file based on `.env.example`:
+```bash
+cp .env.example .env
+```
+
+4. Update the `.env` file with your credentials:
+```env
+CLOUDINARY_CLOUD_NAME=your_cloud_name
+CLOUDINARY_API_KEY=your_api_key
+CLOUDINARY_API_SECRET=your_secret
+MONGODB_URI=mongodb://127.0.0.1:27017/cloudinary-docs
+PORT=3001
+ALLOWED_ORIGINS=http://localhost:3001
+NODE_ENV=development
+LOG_LEVEL=debug
+```
+
+## Usage
+
+### Development
+```bash
+npm run dev
+```
+
+### Production
+```bash
+npm start
+```
+
+### Testing
+```bash
+npm test
+```
+
+## API Documentation
+
+### Upload Document
+```http
+POST /api/documents
+Headers:
+ X-User-Id: string
+ X-User-Email: string
+Body: multipart/form-data
+ - files: File
+ - documentType: enum[
+ 'PARTNERSHIP_REGISTRATION',
+ 'INCORPORATION_CERTIFICATE',
+ 'GST',
+ 'PAN',
+ 'IEC',
+ 'MSME',
+ 'DPIIT',
+ 'AD_CODE'
+ ]
+ - documentNumber: string
+```
+
+### Get All Documents
+```http
+GET /api/documents
+Headers:
+ X-User-Id: string
+ X-User-Email: string
+```
+
+### Get Document Status
+```http
+GET /api/documents/status
+Headers:
+ X-User-Id: string
+ X-User-Email: string
+```
+
+### Delete Document
+```http
+DELETE /api/documents/:documentType
+Headers:
+ X-User-Id: string
+ X-User-Email: string
+```
+
+### Verify Document (Admin Only)
+```http
+POST /api/documents/:documentType/verify
+Headers:
+ X-User-Id: string
+ X-User-Email: string
+Body:
+ - status: enum['VERIFIED', 'REJECTED']
+ - comments: string
+```
+
+## Document States
+
+- `NOT_UPLOADED`: Document hasn't been uploaded yet
+- `PENDING`: Document uploaded, waiting for verification
+- `VERIFIED`: Document has been verified by admin
+- `REJECTED`: Document was rejected by admin
+
+## Project Structure
+
+```
+project/
+├── config/
+│ ├── cloudinary.config.js
+│ ├── database.js
+│ └── service.config.js
+├── middleware/
+│ └── auth.js
+├── models/
+│ └── document.model.js
+├── routes/
+│ └── upload.routes.js
+├── services/
+│ └── upload.service.js
+├── public/
+│ └── index.html
+├── uploads/
+├── .env
+├── .env.example
+├── .gitignore
+├── index.js
+├── package.json
+└── README.md
+```
+
+## Security Features
+
+- CORS protection
+- Helmet security headers
+- File type validation
+- Size limits
+- User authentication via headers
+- Document verification workflow
+- Secure file storage
+
+## Error Handling
+
+All endpoints return standardized error responses:
+
+```json
+{
+ "error": {
+ "message": "Error description",
+ "code": "ERROR_CODE"
+ }
+}
+```
+
+## Contributing
+
+1. Fork the repository
+2. Create your feature branch (`git checkout -b feature/amazing-feature`)
+3. Commit your changes (`git commit -m 'Add some amazing feature'`)
+4. Push to the branch (`git push origin feature/amazing-feature`)
+5. Open a Pull Request
+
+## License
+
+This project is licensed under the MIT License - see the LICENSE file for details.
+
diff --git a/backend/src/microservices/DocUpload/services/upload.service.js b/backend/src/microservices/DocUpload/services/upload.service.js
new file mode 100644
index 0000000..ec12c60
--- /dev/null
+++ b/backend/src/microservices/DocUpload/services/upload.service.js
@@ -0,0 +1,221 @@
+import cloudinary from "../config/cloudinary.config.js";
+import Document from "../models/document.model.js";
+import fs from "fs";
+import { verifyDocument } from "../../kyc/document_verify/main.js";
+
+class UploadService {
+ async uploadFiles(files, userId, documentDetails) {
+ try {
+ // First, find or create user document record
+ // console.log("Yayyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy");
+ let userDocs = await Document.findOne({ userId });
+
+ if (!userDocs) {
+ userDocs = new Document({
+ userId,
+ // userEmail,
+ documents: {},
+ });
+ }
+
+ const file = files[0]; // Since we're only handling one file at a time
+ // console.log("FILEPATH : ", file.path);
+
+ const verificationResult = await verifyDocument(
+ file.path,
+ documentDetails.documentType
+ );
+ console.log(verificationResult);
+ if (!verificationResult.verified) {
+ return {
+ verified: false,
+ };
+ }
+
+ // Upload file to Cloudinary
+ const result = await cloudinary.uploader.upload(file.path, {
+ resource_type: "auto",
+ folder: `documents/${userId}`,
+ use_filename: true,
+ unique_filename: true,
+ overwrite: true,
+ });
+ // console.log(result);
+
+ // Update the specific document type
+ // check if userDocs.documents exists. if not, create it
+ // if (!userDocs.documents) {
+ // userDocs.documents = {};
+ // }
+ // console.log(documentDetails.documentType);
+ if (!userDocs.documents[documentDetails.documentType]) {
+ userDocs.documents[documentDetails.documentType] = {};
+ }
+
+ // userDocs.documents[documentDetails.documentType] = {
+ // // documentNumber: documentDetails.documentNumber,
+ // cloudinaryId: result.public_id,
+ // fileUrl: result.secure_url,
+ // fileType: file.mimetype,
+ // size: file.size,
+ // status: "PENDING",
+ // uploadedAt: new Date(),
+ // verificationComments: null,
+ // verifiedAt: null,
+ // };
+ let data = {
+ cloudinaryId: result.public_id,
+ fileUrl: result.secure_url,
+ fileType: file.mimetype,
+ size: file.size,
+ status: "VERIFIED",
+ uploadedAt: new Date(),
+ verificationComments: null,
+ verifiedAt: new Date(),
+ };
+ userDocs.set(`documents.${documentDetails.documentType}`, data);
+
+ console.log(userDocs);
+
+ await userDocs.save();
+ data["verified"] = true;
+ return data;
+ } catch (error) {
+ throw new Error(`Upload failed: ${error.message}`);
+ }
+ }
+
+ async getAllDocumentsByUser(userId) {
+ try {
+ console.log(userId);
+
+ const userDocs = await Document.findOne({ userId });
+ if (!userDocs) {
+ return {
+ userId,
+ documents: {},
+ };
+ }
+ return userDocs;
+ } catch (error) {
+ console.log(error);
+
+ throw new Error(`Failed to fetch documents: ${error.message}`);
+ }
+ }
+
+ async getDocumentStatus(userId) {
+ try {
+ const userDocs = await Document.findOne({ userId });
+
+ const requiredDocs = [
+ "cic",
+ "bis",
+ "coo",
+ "gstin",
+ "iec",
+ "shipping",
+ "tradeLicense",
+ "pan",
+ "aadhaar",
+ "UAEvat",
+ "UAEid",
+ "UAEesma",
+ "UAEcoo",
+ ];
+
+ const status = requiredDocs.map((docType) => {
+ const doc = userDocs?.documents?.[docType];
+ return {
+ documentType: docType,
+ status: doc?.status || "NOT_UPLOADED",
+ documentNumber: doc?.documentNumber || null,
+ comments: doc?.verificationComments || null,
+ uploadedAt: doc?.uploadedAt || null,
+ verifiedAt: doc?.verifiedAt || null,
+ };
+ });
+
+ return status;
+ } catch (error) {
+ throw new Error(`Failed to fetch document status: ${error.message}`);
+ }
+ }
+
+ async deleteDocument(documentType, userId) {
+ try {
+ const userDocs = await Document.findOne({ userId });
+
+ if (!userDocs || !userDocs.documents[documentType]) {
+ throw new Error("Document not found or unauthorized");
+ }
+
+ const cloudinaryId = userDocs.documents[documentType].cloudinaryId;
+
+ // Delete from Cloudinary
+ if (cloudinaryId) {
+ await cloudinary.uploader.destroy(cloudinaryId, {
+ resource_type: "auto",
+ });
+ }
+
+ // Remove the specific document
+ userDocs.documents[documentType] = {
+ status: "NOT_UPLOADED",
+ };
+
+ await userDocs.save();
+
+ return { message: "Document deleted successfully" };
+ } catch (error) {
+ throw new Error(`Delete failed: ${error.message}`);
+ }
+ }
+
+ // New method to verify documents
+ async verifyDocument(userId, documentType, verificationStatus, comments) {
+ try {
+ console.log("Helloooooooooooo");
+
+ const userDocs = await Document.findOne({ userId });
+
+ // console.log(userDocs);
+ // if (!userDocs || !userDocs.documents[documentType]) {
+ // console.log("worlddddddddddd");
+
+ // throw new Error("Document not found");
+ // }
+
+ // userDocs.documents[documentType].status = verificationStatus;
+ // userDocs.documents[documentType].verificationComments = comments;
+ // userDocs.documents[documentType].verifiedAt = new Date();
+ // userDocs.set(`documents.${documentType}.status`, verificationStatus);
+ // userDocs.set(`documents.${documentType}.verificationComments`, comments);
+ // userDocs.set(`documents.${documentType}.verifiedAt`, new Date());
+ // console.log(userDocs);
+
+ // await userDocs.save();
+
+ const currentDoc = userDocs.documents.get(documentType);
+
+ // Update the document
+ if (currentDoc) {
+ currentDoc.status = verificationStatus;
+ currentDoc.verificationComments = comments;
+ currentDoc.verifiedAt = new Date();
+
+ // Set the updated document back to the Map
+ userDocs.documents.set(documentType, currentDoc);
+
+ // Save the document
+ await userDocs.save();
+ }
+
+ return userDocs.documents[documentType];
+ } catch (error) {
+ throw new Error(`Verification failed: ${error.message}`);
+ }
+ }
+}
+
+export default new UploadService();
diff --git a/backend/src/microservices/DocUpload/uploads/1738613749656-download (2).jpeg b/backend/src/microservices/DocUpload/uploads/1738613749656-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/src/microservices/DocUpload/uploads/1738613749656-download (2).jpeg differ
diff --git a/backend/src/microservices/kyc/document_verify/main.js b/backend/src/microservices/kyc/document_verify/main.js
index 90868dd..db3d095 100644
--- a/backend/src/microservices/kyc/document_verify/main.js
+++ b/backend/src/microservices/kyc/document_verify/main.js
@@ -40,7 +40,7 @@ function getFuncForDocType(docType) {
return docTypeFunctionMap[docType] || [dummy];
}
-async function verifyDocument(filePath, docType) {
+export async function verifyDocument(filePath, docType) {
try {
// 1. Extract text from the document using OCR.
let extractedText = "";
@@ -97,6 +97,6 @@ async function verifyDocument(filePath, docType) {
// console.log(`Verification Result for ${docType}:`, result);
// }
-verifyDocument("Test Docs/Certificate Of Origin.jpg", "coo").then((result) => {
- console.log("Document Verification Result:", result);
-});
+// verifyDocument("Test Docs/Certificate Of Origin.jpg", "coo").then((result) => {
+// console.log("Document Verification Result:", result);
+// });
diff --git a/backend/src/middlewares/auth.js b/backend/src/middlewares/auth.js
index bd085e8..da4b35d 100644
--- a/backend/src/middlewares/auth.js
+++ b/backend/src/middlewares/auth.js
@@ -3,6 +3,7 @@ import jwt from 'jsonwebtoken'
const authMiddleware = async (req, res, next) => {
try{
const {token} = req.cookies;
+ console.log(token);
if (!token) {
return res.status(401).json({
isAuthenticated: false,
@@ -11,6 +12,7 @@ const authMiddleware = async (req, res, next) => {
}
const decoded = await jwt.verify(token, process.env.JWT_SECRET);
+ console.log(decoded);
req.id = decoded.id;
req.role = decoded.role;
next();
diff --git a/backend/src/routes/doc_upload.js b/backend/src/routes/doc_upload.js
new file mode 100644
index 0000000..968131d
--- /dev/null
+++ b/backend/src/routes/doc_upload.js
@@ -0,0 +1,14 @@
+import express from 'express';
+import {upload, uploadDocument, getDocs,getStatus, deleteDocByType, verifyDocByType} from "./../microservices/DocUpload/controller/upload.controller.js";
+
+const app = express();
+
+const router = express.Router();
+
+router.get('/:userId', getDocs)
+router.post('/upload/:userId', upload.array('files', 1), uploadDocument);
+router.get('/status/:userId', getStatus);
+router.put('/verify/:userId', verifyDocByType)
+router.delete('/:userId', deleteDocByType)
+
+export default router;
\ No newline at end of file
diff --git a/backend/uploads/1738622680244-download (2).jpeg b/backend/uploads/1738622680244-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738622680244-download (2).jpeg differ
diff --git a/backend/uploads/1738622802646-download (2).jpeg b/backend/uploads/1738622802646-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738622802646-download (2).jpeg differ
diff --git a/backend/uploads/1738622844020-download (2).jpeg b/backend/uploads/1738622844020-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738622844020-download (2).jpeg differ
diff --git a/backend/uploads/1738623134541-download (2).jpeg b/backend/uploads/1738623134541-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738623134541-download (2).jpeg differ
diff --git a/backend/uploads/1738623339663-download (2).jpeg b/backend/uploads/1738623339663-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738623339663-download (2).jpeg differ
diff --git a/backend/uploads/1738623408514-download (2).jpeg b/backend/uploads/1738623408514-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738623408514-download (2).jpeg differ
diff --git a/backend/uploads/1738623451466-download (2).jpeg b/backend/uploads/1738623451466-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738623451466-download (2).jpeg differ
diff --git a/backend/uploads/1738623752645-download (2).jpeg b/backend/uploads/1738623752645-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738623752645-download (2).jpeg differ
diff --git a/backend/uploads/1738623789442-download (2).jpeg b/backend/uploads/1738623789442-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738623789442-download (2).jpeg differ
diff --git a/backend/uploads/1738623824233-download (2).jpeg b/backend/uploads/1738623824233-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738623824233-download (2).jpeg differ
diff --git a/backend/uploads/1738623885876-download (2).jpeg b/backend/uploads/1738623885876-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738623885876-download (2).jpeg differ
diff --git a/backend/uploads/1738623953088-download (2).jpeg b/backend/uploads/1738623953088-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738623953088-download (2).jpeg differ
diff --git a/backend/uploads/1738624000866-download (2).jpeg b/backend/uploads/1738624000866-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624000866-download (2).jpeg differ
diff --git a/backend/uploads/1738624108964-download (2).jpeg b/backend/uploads/1738624108964-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624108964-download (2).jpeg differ
diff --git a/backend/uploads/1738624130283-download (2).jpeg b/backend/uploads/1738624130283-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624130283-download (2).jpeg differ
diff --git a/backend/uploads/1738624146873-download (2).jpeg b/backend/uploads/1738624146873-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624146873-download (2).jpeg differ
diff --git a/backend/uploads/1738624160171-download (2).jpeg b/backend/uploads/1738624160171-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624160171-download (2).jpeg differ
diff --git a/backend/uploads/1738624235069-download (2).jpeg b/backend/uploads/1738624235069-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624235069-download (2).jpeg differ
diff --git a/backend/uploads/1738624264339-download (2).jpeg b/backend/uploads/1738624264339-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624264339-download (2).jpeg differ
diff --git a/backend/uploads/1738624284723-download (2).jpeg b/backend/uploads/1738624284723-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624284723-download (2).jpeg differ
diff --git a/backend/uploads/1738624299434-download (2).jpeg b/backend/uploads/1738624299434-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624299434-download (2).jpeg differ
diff --git a/backend/uploads/1738624349130-download (2).jpeg b/backend/uploads/1738624349130-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624349130-download (2).jpeg differ
diff --git a/backend/uploads/1738624358817-download (2).jpeg b/backend/uploads/1738624358817-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624358817-download (2).jpeg differ
diff --git a/backend/uploads/1738624441713-download (2).jpeg b/backend/uploads/1738624441713-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738624441713-download (2).jpeg differ
diff --git a/backend/uploads/1738626075559-download (2).jpeg b/backend/uploads/1738626075559-download (2).jpeg
new file mode 100644
index 0000000..59f9d7f
Binary files /dev/null and b/backend/uploads/1738626075559-download (2).jpeg differ
diff --git a/backend/uploads/1738626185673-Import Export Code Certificate.jpg b/backend/uploads/1738626185673-Import Export Code Certificate.jpg
new file mode 100644
index 0000000..bc44760
Binary files /dev/null and b/backend/uploads/1738626185673-Import Export Code Certificate.jpg differ
diff --git a/backend/uploads/1738626790513-BIS Certificate.jpg b/backend/uploads/1738626790513-BIS Certificate.jpg
new file mode 100644
index 0000000..1388235
Binary files /dev/null and b/backend/uploads/1738626790513-BIS Certificate.jpg differ
diff --git a/backend/uploads/1738626846116-Aadhaar Card.jpg b/backend/uploads/1738626846116-Aadhaar Card.jpg
new file mode 100644
index 0000000..aa2f4c0
Binary files /dev/null and b/backend/uploads/1738626846116-Aadhaar Card.jpg differ
diff --git a/backend/uploads/1738627344636-Shipping Bill.jpg b/backend/uploads/1738627344636-Shipping Bill.jpg
new file mode 100644
index 0000000..2900f30
Binary files /dev/null and b/backend/uploads/1738627344636-Shipping Bill.jpg differ
diff --git a/backend/uploads/1738627383983-Aadhaar Card.jpg b/backend/uploads/1738627383983-Aadhaar Card.jpg
new file mode 100644
index 0000000..aa2f4c0
Binary files /dev/null and b/backend/uploads/1738627383983-Aadhaar Card.jpg differ
diff --git a/backend/uploads/1738627458486-Trade License.jpg b/backend/uploads/1738627458486-Trade License.jpg
new file mode 100644
index 0000000..ba63d6b
Binary files /dev/null and b/backend/uploads/1738627458486-Trade License.jpg differ
diff --git a/backend/uploads/1738627500337-UAE ID Card.jpg b/backend/uploads/1738627500337-UAE ID Card.jpg
new file mode 100644
index 0000000..8469dcc
Binary files /dev/null and b/backend/uploads/1738627500337-UAE ID Card.jpg differ
diff --git a/backend/uploads/1738627528151-UAE VAT Certificate.jpg b/backend/uploads/1738627528151-UAE VAT Certificate.jpg
new file mode 100644
index 0000000..e664b14
Binary files /dev/null and b/backend/uploads/1738627528151-UAE VAT Certificate.jpg differ
diff --git a/frontend/.env b/frontend/.env
index 647f9dd..45036da 100644
--- a/frontend/.env
+++ b/frontend/.env
@@ -1 +1,2 @@
-VITE_API_URL='http://localhost:3000/'
+VITE_API_URL=http://localhost:3000
+
diff --git a/frontend/src/api/api.js b/frontend/src/api/api.js
index f65e3b7..f483c91 100644
--- a/frontend/src/api/api.js
+++ b/frontend/src/api/api.js
@@ -9,8 +9,8 @@ const api = axios.create({
})
export const authAPI = {
- login: (email, password) => {
- return api.post('/user/login', { email, password })
+ login: (email, password, role) => {
+ return api.post('/user/login', { email, password, role })
},
logout: () => {
diff --git a/frontend/src/components/buyer/BuyerCompliance.jsx b/frontend/src/components/buyer/BuyerCompliance.jsx
index 108d09d..1ae0573 100644
--- a/frontend/src/components/buyer/BuyerCompliance.jsx
+++ b/frontend/src/components/buyer/BuyerCompliance.jsx
@@ -1,151 +1,168 @@
-import React, { useState, useCallback } from 'react';
-import { motion, AnimatePresence } from 'framer-motion';
-import { useDropzone } from 'react-dropzone';
-import {
- FaShieldAlt, FaCheckCircle, FaExclamationTriangle,
- FaFileAlt, FaDownload, FaUpload, FaClock, FaTimes,
- FaCloudUploadAlt
-} from 'react-icons/fa';
+import React, { useState, useCallback } from "react";
+import { motion, AnimatePresence } from "framer-motion";
+import { useDropzone } from "react-dropzone";
+import {
+ FaShieldAlt,
+ FaCheckCircle,
+ FaExclamationTriangle,
+ FaFileAlt,
+ FaDownload,
+ FaUpload,
+ FaClock,
+ FaTimes,
+ FaCloudUploadAlt,
+} from "react-icons/fa";
+import {
+ requiredDocumentsIndia,
+ requiredDocumentsUAE,
+} from "../../constants/documents.js";
const BuyerCompliance = () => {
- const [activeSection, setActiveSection] = useState('overview');
+ const [activeSection, setActiveSection] = useState("overview");
const [showUploadModal, setShowUploadModal] = useState(false);
- const [selectedDocType, setSelectedDocType] = useState('');
+ const [selectedDocType, setSelectedDocType] = useState("");
const [uploadProgress, setUploadProgress] = useState(0);
const [uploadedDocuments, setUploadedDocuments] = useState([]);
+ const [selectedCountry, setSelectedCountry] = useState("india");
+
+ const documentList =
+ selectedCountry === "india" ? requiredDocumentsIndia : requiredDocumentsUAE;
+
const complianceStats = [
{
- title: 'Overall Compliance',
- value: '95%',
- status: 'good',
+ title: "Overall Compliance",
+ value: "95%",
+ status: "good",
icon: ,
- color: 'green'
+ color: "green",
},
{
- title: 'Pending Documents',
- value: '2',
- status: 'warning',
+ title: "Pending Documents",
+ value: "2",
+ status: "warning",
icon: ,
- color: 'yellow'
+ color: "yellow",
},
{
- title: 'Last Verification',
- value: '10 days ago',
- status: 'good',
+ title: "Last Verification",
+ value: "10 days ago",
+ status: "good",
icon: ,
- color: 'blue'
- }
+ color: "blue",
+ },
];
const documents = [
{
id: 1,
- name: 'Business Registration',
- status: 'verified',
- expiryDate: '2025-12-31',
- lastUpdated: '2024-01-15'
+ name: "Business Registration",
+ status: "verified",
+ expiryDate: "2025-12-31",
+ lastUpdated: "2024-01-15",
},
{
id: 2,
- name: 'Import License',
- status: 'pending',
- expiryDate: '2024-12-31',
- lastUpdated: '2024-02-01'
- }
- ];
-
- const requiredDocuments = [
- {
- id: 'business_reg',
- name: 'Business Registration',
- description: 'Company Registration or Proprietorship Document',
- required: true
- },
- {
- id: 'import_license',
- name: 'Import License',
- description: 'Valid Import-Export Code (IEC)',
- required: true
- },
- {
- id: 'gst',
- name: 'GST Registration',
- description: 'GST Registration Certificate',
- required: true
- },
- {
- id: 'pan',
- name: 'PAN Card',
- description: 'Permanent Account Number Card',
- required: true
- },
- {
- id: 'bank_details',
- name: 'Bank Details',
- description: 'Bank Account Verification Letter',
- required: true
- },
- {
- id: 'trade_license',
- name: 'Trade License',
- description: 'Local Trade License if applicable',
- required: false
+ name: "Import License",
+ status: "pending",
+ expiryDate: "2024-12-31",
+ lastUpdated: "2024-02-01",
},
- {
- id: 'msme',
- name: 'MSME Registration',
- description: 'MSME Registration Certificate if applicable',
- required: false
- }
];
- const handleUploadClick = () => {
- setSelectedDocType('');
- setShowUploadModal(true);
- };
+ // const handleUploadClick = () => {
+ // setSelectedDocType('');
+ // setShowUploadModal(true);
+ // };
+
+ // const handleFileUpload = (file) => {
+ // // Simulate upload progress
+ // let progress = 0;
+ // const interval = setInterval(() => {
+ // progress += 10;
+ // setUploadProgress(progress);
+ // if (progress >= 100) {
+ // clearInterval(interval);
+ // // Add document to uploaded documents list
+ // const newDoc = {
+ // id: Date.now(),
+ // name: selectedDocType || file.name,
+ // status: 'pending',
+ // expiryDate: '2025-12-31',
+ // lastUpdated: new Date().toISOString().split('T')[0],
+ // file: file
+ // };
+ // setUploadedDocuments(prev => [...prev, newDoc]);
+ // setTimeout(() => {
+ // setShowUploadModal(false);
+ // setUploadProgress(0);
+ // }, 500);
+ // }
+ // }, 300);
+ // };
const handleFileUpload = (file) => {
- // Simulate upload progress
let progress = 0;
const interval = setInterval(() => {
progress += 10;
setUploadProgress(progress);
if (progress >= 100) {
clearInterval(interval);
- // Add document to uploaded documents list
- const newDoc = {
- id: Date.now(),
- name: selectedDocType || file.name,
- status: 'pending',
- expiryDate: '2025-12-31',
- lastUpdated: new Date().toISOString().split('T')[0],
- file: file
- };
- setUploadedDocuments(prev => [...prev, newDoc]);
- setTimeout(() => {
- setShowUploadModal(false);
- setUploadProgress(0);
- }, 500);
+
+ // Prepare form data
+ const formData = new FormData();
+ formData.append("files", file); // Ensure "files" is the key name
+ formData.append("documentType", selectedDocType || file.name);
+ // formData.append("expiryDate", "2025-12-31"); // Adjust as needed
+
+ // API call
+ fetch("http://localhost:3000/docs/upload/17", {
+ method: "POST",
+ body: formData,
+ })
+ .then((response) => response.json())
+ .then((data) => {
+ console.log("Upload successful:", data);
+
+ // Add document to uploaded list
+ const newDoc = {
+ id: Date.now(),
+ name: selectedDocType || file.name,
+ status: "pending",
+ expiryDate: "2025-12-31",
+ lastUpdated: new Date().toISOString().split("T")[0],
+ file: file,
+ };
+
+ setUploadedDocuments((prev) => [...prev, newDoc]);
+ })
+ .catch((error) => console.error("Upload failed:", error))
+ .finally(() => {
+ setShowUploadModal(false);
+ setUploadProgress(0);
+ });
}
}, 300);
};
- const onDrop = useCallback((acceptedFiles) => {
- const file = acceptedFiles[0];
- if (file) {
- handleFileUpload(file);
- }
- }, [selectedDocType]);
+ const onDrop = useCallback(
+ (acceptedFiles) => {
+ const file = acceptedFiles[0];
+ if (file) {
+ handleFileUpload(file);
+ }
+ },
+ [selectedDocType]
+ );
const { getRootProps, getInputProps, isDragActive } = useDropzone({
onDrop,
accept: {
- 'application/pdf': ['.pdf'],
- 'image/*': ['.jpeg', '.jpg', '.png']
+ "application/pdf": [".pdf"],
+ "image/*": [".jpeg", ".jpg", ".png"],
},
maxSize: 5242880, // 5MB
- multiple: false
+ multiple: false,
});
return (
@@ -162,10 +179,14 @@ const BuyerCompliance = () => {
{stat.title}
{stat.value}
-
- {stat.status === 'good' ? 'Good Standing' : 'Needs Attention'}
+
+ {stat.status === "good" ? "Good Standing" : "Needs Attention"}
@@ -184,7 +205,17 @@ const BuyerCompliance = () => {
Required Documents
- setSelectedCountry(e.target.value)}
+ >
+
+
+
+
+ {/* {
>
Upload Document
-
+ */}
- {requiredDocuments.map((doc) => {
+ {documentList.map((doc) => {
const isUploaded = uploadedDocuments.some(
- uploaded => uploaded.name === doc.name
+ (uploaded) => uploaded.name === doc.name
);
return (
{
whileHover={{ scale: 1.05 }}
whileTap={{ scale: 0.95 }}
onClick={() => {
- setSelectedDocType(doc.name);
+ setSelectedDocType(doc.id);
setShowUploadModal(true);
}}
className={`px-4 py-2 rounded-lg flex items-center gap-2 ${
- isUploaded
- ? 'bg-green-500 text-white'
- : 'bg-blue-500 text-white'
+ isUploaded
+ ? "bg-green-500 text-white"
+ : "bg-blue-500 text-white"
}`}
>
- {isUploaded ? 'Update' : 'Upload'}
+ {isUploaded ? "Update" : "Upload"}
);
@@ -310,7 +341,9 @@ const BuyerCompliance = () => {
onClick={(e) => e.stopPropagation()}
>
-
Upload {selectedDocType}
+
+ Upload {selectedDocType}
+