Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
__pycache__/
eco_project/backend/gcloud-credentials.json
server.log
125 changes: 74 additions & 51 deletions eco_project/backend/app.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from flask import Flask, jsonify, request, send_from_directory
from flask_socketio import SocketIO, emit
import requests
import os

Expand All @@ -18,6 +19,7 @@ def log_struct(self, *args, **kwargs):
logger = MockLogger()

app = Flask(__name__, static_folder='static')
socketio = SocketIO(app)

@app.route('/')
def home():
Expand Down Expand Up @@ -213,56 +215,6 @@ def agricultural_land():
)
return jsonify({"error": str(e)}), 500

@app.route('/api/chat', methods=['POST'])
def chat():
data = request.get_json()
user_message = data.get('message')

if not user_message:
return jsonify({"error": "No message provided."}), 400

# Get Ollama API URL from environment variable or use default
ollama_api_url = os.environ.get("OLLAMA_API_URL", "http://localhost:11434/api/generate")

try:
# Prepare the data for the Ollama API
ollama_data = {
"model": "gemma:2b",
"prompt": user_message,
"stream": False
}

# Send the request to the Ollama API
response = requests.post(ollama_api_url, json=ollama_data)
response.raise_for_status() # Raise an exception for bad status codes

# Extract the response from Ollama
ai_response = response.json().get('response', "I'm sorry, I couldn't generate a response.")

logger.log_struct(
{
"message": f"Chat message received: {user_message}",
"response": ai_response,
"component": "backend",
"endpoint": "/api/chat",
},
severity="INFO",
)

return jsonify({"response": ai_response})

except requests.exceptions.RequestException as e:
logger.log_struct(
{
"message": f"Error communicating with Ollama API: {e}",
"component": "backend",
"endpoint": "/api/chat",
"url": ollama_api_url,
},
severity="ERROR",
)
return jsonify({"error": "Failed to communicate with the AI service."}), 500

@app.route('/api/drinking_water')
def drinking_water():
# World Bank API URL for access to safely managed drinking water
Expand Down Expand Up @@ -513,6 +465,77 @@ def livestock():
]
return jsonify(mock_data)

@app.route('/api/videos')
def videos():
mock_videos = [
{"title": "The Problem with Traditional Agriculture", "url": "https://www.youtube.com/embed/Yp7XFAE8kr4"},
{"title": "Agroecology for Sustainable Food Systems", "url": "https://www.youtube.com/embed/6OyGlwYUS5w"},
{"title": "How does an organic farmer conserve water?", "url": "https://www.youtube.com/embed/32ZMYDbItQ8"}
]
return jsonify(mock_videos)

@app.route('/api/chat', methods=['POST'])
def chat():
data = request.get_json()
user_message = data.get('message')

if not user_message:
return jsonify({"error": "No message provided."}), 400

# Get Ollama API URL from environment variable or use default
ollama_api_url = os.environ.get("OLLAMA_API_URL", "http://localhost:11434/api/generate")

try:
# Prepare the data for the Ollama API
ollama_data = {
"model": "gemma:2b",
"prompt": user_message,
"stream": False
}

# Send the request to the Ollama API
response = requests.post(ollama_api_url, json=ollama_data)
response.raise_for_status() # Raise an exception for bad status codes

# Extract the response from Ollama
ai_response = response.json().get('response', "I'm sorry, I couldn't generate a response.")

logger.log_struct(
{
"message": f"Chat message received: {user_message}",
"response": ai_response,
"component": "backend",
"endpoint": "/api/chat",
},
severity="INFO",
)

return jsonify({"response": ai_response})

except requests.exceptions.RequestException as e:
logger.log_struct(
{
"message": f"Error communicating with Ollama API: {e}",
"component": "backend",
"endpoint": "/api/chat",
"url": ollama_api_url,
},
severity="ERROR",
)
return jsonify({"error": "Failed to communicate with the AI service."}), 500

@socketio.on('connect')
def handle_connect():
print('Client connected')

@socketio.on('disconnect')
def handle_disconnect():
print('Client disconnected')

@socketio.on('chat_message')
def handle_chat_message(message):
emit('chat_message', message, broadcast=True)

if __name__ == '__main__':
port = int(os.environ.get("PORT", 8080))
app.run(host='0.0.0.0', port=port, debug=False)
socketio.run(app, host='0.0.0.0', port=port, debug=False, allow_unsafe_werkzeug=True)
1 change: 1 addition & 0 deletions eco_project/backend/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@ Flask
requests
gunicorn
google-cloud-logging
Flask-SocketIO
23 changes: 23 additions & 0 deletions eco_project/backend/static/chat.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#chat-container {
display: flex;
flex-direction: column;
height: 400px;
border: 1px solid #ccc;
padding: 10px;
}

#chat-window {
flex-grow: 1;
overflow-y: auto;
border-bottom: 1px solid #ccc;
margin-bottom: 10px;
}

#chat-input {
padding: 5px;
}

#send-btn {
padding: 5px 10px;
cursor: pointer;
}
32 changes: 32 additions & 0 deletions eco_project/backend/static/chat.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Chat - Environment Protection</title>
<link rel="stylesheet" href="style.css">
<link rel="stylesheet" href="chat.css">
</head>
<body>
<header>
<h1>Community Chat</h1>
<nav>
<a href="index.html">Home</a>
</nav>
</header>
<main>
<section id="chat-container">
<div id="chat-window">
<!-- Chat messages will appear here -->
</div>
<input type="text" id="chat-input" placeholder="Type your message...">
<button id="send-btn">Send</button>
</section>
</main>
<footer>
<p>&copy; 2025 Environment Protection Initiative</p>
</footer>
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js"></script>
<script src="chat.js"></script>
</body>
</html>
35 changes: 35 additions & 0 deletions eco_project/backend/static/chat.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
document.addEventListener('DOMContentLoaded', () => {
const socket = io();
const sendBtn = document.getElementById('send-btn');
const chatInput = document.getElementById('chat-input');
const chatWindow = document.getElementById('chat-window');

socket.on('connect', () => {
console.log('Connected to server');
});

socket.on('disconnect', () => {
console.log('Disconnected from server');
});

socket.on('chat_message', (msg) => {
appendMessage(msg);
});

if (sendBtn) {
sendBtn.addEventListener('click', () => {
const userInput = chatInput.value;
if (userInput) {
socket.emit('chat_message', userInput);
chatInput.value = '';
}
});
}

function appendMessage(message) {
const messageElement = document.createElement('p');
messageElement.textContent = message;
chatWindow.appendChild(messageElement);
chatWindow.scrollTop = chatWindow.scrollHeight;
}
});
8 changes: 8 additions & 0 deletions eco_project/backend/static/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,14 @@ <h2>Drinking Water and Energy Access</h2>
<h2>Global Health and Education</h2>
<p>Explore data and insights on global health and education trends from leading international organizations. <a href="health_education.html">Learn more</a>.</p>
</section>
<section id="community">
<h2>Community</h2>
<p>Share your ideas and connect with others.</p>
<ul>
<li><a href="videos.html">Watch Videos</a></li>
<li><a href="chat.html">Join the Chat</a></li>
</ul>
</section>
<section id="ai-agent">
<h2>AI Assistant</h2>
<p>Your personal guide to environmental protection.</p>
Expand Down
23 changes: 23 additions & 0 deletions eco_project/backend/static/videos.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
.video-container {
display: flex;
flex-wrap: wrap;
justify-content: space-around;
padding: 20px;
}

.video-item {
width: 300px;
margin: 15px;
border: 1px solid #ccc;
box-shadow: 0 0 5px rgba(0,0,0,0.1);
}

.video-item iframe {
width: 100%;
height: 170px;
}

.video-item-title {
padding: 10px;
font-weight: bold;
}
30 changes: 30 additions & 0 deletions eco_project/backend/static/videos.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Videos - Environment Protection</title>
<link rel="stylesheet" href="style.css">
<link rel="stylesheet" href="videos.css">
</head>
<body>
<header>
<h1>Video Gallery</h1>
<nav>
<a href="index.html">Home</a>
</nav>
</header>
<main>
<section id="videos">
<h2>Featured Videos</h2>
<div class="video-container">
<!-- Video embeds will go here -->
</div>
</section>
</main>
<footer>
<p>&copy; 2025 Environment Protection Initiative</p>
</footer>
<script src="videos.js"></script>
</body>
</html>
31 changes: 31 additions & 0 deletions eco_project/backend/static/videos.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
document.addEventListener('DOMContentLoaded', () => {
const videoContainer = document.querySelector('.video-container');

async function fetchVideos() {
try {
const response = await fetch('/api/videos');
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const videos = await response.json();
displayVideos(videos);
} catch (error) {
videoContainer.innerHTML = `<p>Error fetching videos: ${error.message}</p>`;
}
}

function displayVideos(videos) {
let html = '';
videos.forEach(video => {
html += `
<div class="video-item">
<iframe src="${video.url}" frameborder="0" allowfullscreen></iframe>
<div class="video-item-title">${video.title}</div>
</div>
`;
});
videoContainer.innerHTML = html;
}

fetchVideos();
});
Loading