Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
141 changes: 45 additions & 96 deletions Disease prediction/app.py
Original file line number Diff line number Diff line change
@@ -1,122 +1,71 @@
from flask import Flask, render_template, request, jsonify
from utils import load_keras_model, predict_image_keras
import os
import re
from functools import wraps
from werkzeug.utils import secure_filename
import requests
import json

app = Flask(__name__)

UPLOAD_FOLDER = r'disease/static/uploads'
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
# Configuration for Unified Pipeline
# In production, this would be an env variable pointing to the main backend service
PIPELINE_API_URL = "http://localhost:5000/api/v1/ingest/upload"
PIPELINE_STATUS_URL = "http://localhost:5000/api/v1/ingest/status/"

# Security configuration
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif', 'bmp'}
MAX_FILE_SIZE = 16 * 1024 * 1024 # 16MB max file size

# Input validation helper functions
def allowed_file(filename):
"""Check if file extension is allowed"""
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS

def sanitize_filename(filename):
"""Sanitize filename to prevent path traversal attacks"""
if not filename:
return ""
# Remove any path separators and dangerous characters
cleaned = re.sub(r'[<>:"/\\|?*]', '', filename)
return secure_filename(cleaned)

def validate_file_size(file):
"""Validate file size"""
if file.content_length and file.content_length > MAX_FILE_SIZE:
return False
return True

# Load the Keras model
try:
model = load_keras_model(r'disease/model.h5')
except Exception as e:
app.logger.error(f"Failed to load model: {str(e)}")
model = None

# Route for homepage
@app.route('/')
def index():
return render_template('index.html')

# Route for prediction
@app.route('/predict', methods=['POST'])
def predict():
"""
Proxies the prediction request to the Unified Data Extraction Pipeline.
"""
try:
# Check if file was uploaded
if 'file' not in request.files:
return jsonify({'error': 'No file uploaded'}), 400

file = request.files['file']

# Check if file was selected
if file.filename == '':
return jsonify({'error': 'No file selected'}), 400

# Forward to Central Pipeline
files = {'file': (file.filename, file.stream, file.mimetype)}
data = {
'type': 'DISEASE',
'metadata': json.dumps({'source': 'web_legacy_app'})
}

# Validate file extension
if not allowed_file(file.filename):
return jsonify({'error': 'Invalid file type. Allowed: PNG, JPG, JPEG, GIF, BMP'}), 400

# Validate file size
if not validate_file_size(file):
return jsonify({'error': f'File too large. Maximum size: {MAX_FILE_SIZE // (1024*1024)}MB'}), 400

# Sanitize filename
filename = sanitize_filename(file.filename)
if not filename:
return jsonify({'error': 'Invalid filename'}), 400

# Create unique filename to prevent overwrites
import uuid
unique_filename = f"{uuid.uuid4().hex}_{filename}"
filepath = os.path.join(app.config['UPLOAD_FOLDER'], unique_filename)

# Save file
file.save(filepath)

# Check if model is loaded
if model is None:
return jsonify({'error': 'Model not available'}), 500

# Make prediction
predicted_class, description = predict_image_keras(model, filepath)
# Note: In a real microservice mesh, we'd use mutual TLS or internal tokens.
# Here we assume an internal call.
response = requests.post(PIPELINE_API_URL, files=files, data=data)

# Clean up uploaded file (optional - remove if you want to keep files)
try:
os.remove(filepath)
except:
pass # Ignore cleanup errors
if response.status_code != 202:
return jsonify({'error': f"Pipeline Error: {response.text}"}), response.status_code

result_data = response.json()
tracking_id = result_data.get('tracking_id')

return render_template('result.html',
prediction=predicted_class,
description=description,
image_path=filepath)

except Exception as e:
app.logger.error(f"Prediction error: {str(e)}")
return jsonify({'error': 'Prediction failed'}), 500
# Poll for result (Simple implementation for legacy compatibility)
# In a real app, we'd use WebSockets or redirect the user to a status page
import time
for _ in range(10): # Try for 10 seconds
time.sleep(1)
status_resp = requests.get(PIPELINE_STATUS_URL + tracking_id)
if status_resp.status_code == 200:
status_data = status_resp.json().get('data', {})
if status_data.get('status') == 'COMPLETED':
result = status_data.get('result', {})
return render_template('result.html',
prediction=result.get('prediction'),
description=result.get('recommendation'),
image_path=status_data.get('filename')) # Path might need adjustment for serving
elif status_data.get('status') == 'FAILED':
return jsonify({'error': 'Processing Failed'}), 500

# Global error handlers
@app.errorhandler(400)
def bad_request(error):
return jsonify({'error': 'Bad request'}), 400
return jsonify({'message': 'Processing started. Check status later.', 'tracking_id': tracking_id}), 202

@app.errorhandler(413)
def too_large(error):
return jsonify({'error': 'File too large'}), 413

@app.errorhandler(500)
def internal_error(error):
app.logger.error(f"Internal error: {str(error)}")
return jsonify({'error': 'Internal server error'}), 500
except Exception as e:
app.logger.error(f"Proxy error: {str(e)}")
return jsonify({'error': 'Internal Proxy Error'}), 500

if __name__ == '__main__':
app.run(debug=True)
app.run(debug=True, port=5001) # Run on different port to main app
44 changes: 44 additions & 0 deletions Soil Classification Model/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import os
import json
import random

class SoilClassifier:
"""
Wrapper for the Soil Classification Model.
In a real scenario, this would load the .pkl or .h5 model.
"""

def __init__(self, model_path=None):
self.model_path = model_path
# Load model here
pass

def predict(self, input_data):
"""
Classifies soil based on input features or image.
"""
# Mocking the classification logic from the notebook
soil_types = ['Black Soil', 'Red Soil', 'Clay Soil', 'Alluvial Soil']
crops = {
'Black Soil': ['Cotton', 'Wheat', 'Sugarcane'],
'Red Soil': ['Groundnut', 'Potato', 'Rice'],
'Clay Soil': ['Rice', 'Lettuce', 'Broccoli'],
'Alluvial Soil': ['Rice', 'Wheat', 'Sugarcane']
}

predicted_type = random.choice(soil_types)

return {
'soil_type': predicted_type,
'recommended_crops': crops[predicted_type],
'confidence': round(random.uniform(0.85, 0.99), 2),
'attributes': {
'pH': round(random.uniform(5.5, 8.5), 1),
'moisture': f"{random.randint(20, 80)}%"
}
}

if __name__ == "__main__":
# Test run
classifier = SoilClassifier()
print(classifier.predict({}))
7 changes: 7 additions & 0 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
from backend.utils.validation import validate_input, sanitize_input
from backend.extensions import socketio, db, migrate, mail, limiter, babel, get_locale
from backend.api.v1.files import files_bp
from backend.api.ingestion import ingestion_bp
from backend.middleware.audit import AuditMiddleware
from crop_recommendation.routes import crop_bp
# from disease_prediction.routes import disease_bp
from spatial_analytics.routes import spatial_bp
Expand All @@ -27,6 +29,7 @@
from auth_utils import token_required, roles_required
import backend.sockets.forum_events # Register forum socket events
import backend.sockets.knowledge_events # Register knowledge exchange events
import backend.sockets.alert_socket # Register centralized alert socket events
from backend.utils.i18n import t
from server.Routes.rotation_routes import rotation_bp

Expand Down Expand Up @@ -60,6 +63,9 @@
# Initialize Celery with app context
celery = make_celery(app)

# Initialize Audit Middleware
audit_mw = AuditMiddleware(app)

# Import models after db initialization
from backend.models import User

Expand All @@ -70,6 +76,7 @@
app.register_blueprint(health_bp)
app.register_blueprint(files_bp)
app.register_blueprint(spatial_bp)
app.register_blueprint(ingestion_bp, url_prefix='/api/v1')

# Register API v1 (including loan, weather, schemes, etc.)
register_api(app)
Expand Down
Loading
Loading