-
Notifications
You must be signed in to change notification settings - Fork 11
EEG Export and Import Update #66
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -5,6 +5,7 @@ use axum::{ | |
| routing::{get, post}, | ||
| Json, | ||
| Router, | ||
| body::Bytes, | ||
| }; | ||
| use serde::{Deserialize, Serialize}; | ||
| use serde_json::{json, Value}; | ||
|
|
@@ -17,10 +18,13 @@ use pyo3::Python; | |
| use pyo3::types::{PyList, PyModule, PyTuple}; | ||
| use pyo3::PyResult; | ||
| use pyo3::{IntoPy, ToPyObject}; | ||
| use chrono::{DateTime, Utc}; | ||
| use axum::http::{HeaderMap, HeaderValue, header}; | ||
| use axum::response::IntoResponse; | ||
| use rand_core::OsRng; | ||
|
|
||
| // shared logic library | ||
| use shared_logic::db::{initialize_connection, DbClient}; | ||
| use shared_logic::db::{DbClient, get_eeg_time_range, initialize_connection, export_eeg_data_as_csv}; | ||
| use shared_logic::models::{User, NewUser, UpdateUser, Session, FrontendState}; | ||
|
|
||
| // Argon2 imports | ||
|
|
@@ -36,6 +40,21 @@ struct AppState { | |
| db_client: DbClient, | ||
| } | ||
|
|
||
| // define request struct for exporting EEG data | ||
| #[derive(Deserialize)] | ||
| struct ExportEEGRequest { | ||
| filename: String, | ||
| options: ExportOptions | ||
| } | ||
|
|
||
| #[derive(Deserialize)] | ||
| struct ExportOptions { | ||
| format: String, | ||
| includeHeader: bool, | ||
| start_time: Option<DateTime<Utc>>, | ||
| end_time: Option<DateTime<Utc>>, | ||
| } | ||
|
|
||
|
|
||
| #[derive(Debug, Clone, Deserialize)] | ||
| pub struct LoginRequest { | ||
|
|
@@ -247,6 +266,68 @@ async fn get_frontend_state( | |
| } | ||
| } | ||
|
|
||
| // Handler for POST /api/sessions/{session_id}/eeg_data/export | ||
| async fn export_eeg_data( | ||
| State(app_state): State<AppState>, | ||
| Path(session_id): Path<i32>, | ||
| Json(request): Json<ExportEEGRequest>, | ||
| ) -> Result<impl IntoResponse, (StatusCode, String)> { | ||
| info!("Received request to export EEG data for session {}", session_id); | ||
|
|
||
| // right now the only export format supported is CSV, so we just check for that | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The prechecks can be in it's own helper function. Make it a more modular function, since the logic for selecting a time frame(start and end time) is not Export specific. This will reduce code duplication ` |
||
| if request.options.format.to_lowercase() != "csv" { | ||
| return Err((StatusCode::BAD_REQUEST, format!("Unsupported export format: {}", request.options.format))); | ||
| } | ||
|
|
||
| let (start_time, end_time) = get_eeg_time_range(&app_state.db_client, session_id, &request.options) | ||
| .await.map_err(|e| { | ||
| error!("Failed to get EEG time range: {}", e); | ||
| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to get EEG time range: {}", e)) | ||
| })?; | ||
|
|
||
| let header_included = request.options.includeHeader; | ||
|
|
||
| // finally call the export function in db.rs | ||
| let return_csv = match export_eeg_data_as_csv(&app_state.db_client, session_id, start_time, end_time, header_included).await { | ||
| Ok(csv_data) => csv_data, | ||
| Err(e) => { | ||
| error!("Failed to export EEG data: {}", e); | ||
| return Err((StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to export EEG data: {}", e))); | ||
| } | ||
| }; | ||
|
|
||
| // small safety: avoid quotes breaking header | ||
| let filename = request.filename.replace('"', ""); | ||
|
|
||
| let mut headers = HeaderMap::new(); | ||
| headers.insert(header::CONTENT_TYPE, HeaderValue::from_static("text/csv; charset=utf-8")); | ||
|
|
||
| let content_disp = format!("attachment; filename=\"{}\"", filename); | ||
| headers.insert( | ||
| header::CONTENT_DISPOSITION, | ||
| HeaderValue::from_str(&content_disp).map_err(|e| { | ||
| (StatusCode::BAD_REQUEST, format!("Invalid filename for header: {}", e)) | ||
| })?, | ||
| ); | ||
|
|
||
| // return CSV directly as the body | ||
| Ok((headers, return_csv)) | ||
|
|
||
| } | ||
|
|
||
| // Handler for POST /api/sessions/{session_id}/eeg_data/import | ||
| async fn import_eeg_data( | ||
| State(app_state): State<AppState>, | ||
| Path(session_id): Path<i32>, | ||
| // we expect the CSV data to be sent as raw text in the body of the request | ||
| body: Bytes, | ||
| ) -> Result<Json<Value>, (StatusCode, String)> { | ||
| shared_logic::db::import_eeg_data_from_csv(&app_state.db_client, session_id, &body) | ||
| .await | ||
| .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to import EEG data: {}", e)))?; | ||
|
|
||
| Ok(Json(json!({"status": "success"}))) | ||
| } | ||
|
|
||
|
|
||
|
|
||
|
|
@@ -353,6 +434,9 @@ async fn main() { | |
| .route("/api/sessions/:session_id/frontend-state", post(set_frontend_state)) | ||
| .route("/api/sessions/:session_id/frontend-state", get(get_frontend_state)) | ||
|
|
||
| .route("/api/sessions/:session_id/eeg_data/export", post(export_eeg_data)) | ||
| .route("/api/sessions/:session_id/eeg_data/import", post(import_eeg_data)) | ||
|
|
||
| // Share application state with all handlers | ||
| .with_state(app_state); | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,8 @@ | ||
| -- note that this assumes that eeg_data has no rows (since as of this migration there should be no real data yet) | ||
| -- to do so just run TRUNCATE TABLE eeg_data before applying this migration | ||
| ALTER TABLE eeg_data | ||
| ADD COLUMN session_id INTEGER NOT NULL, | ||
| ADD CONSTRAINT fk_session FOREIGN KEY (session_id) REFERENCES sessions(id) ON DELETE CASCADE; | ||
|
|
||
| -- we can create an index on session_id and time, since the bulk of our queries will be filtering based on these | ||
| CREATE INDEX eeg_data_session_time_idx ON eeg_data (session_id, time DESC); -- using DESC since i'm expecting recent data to be more relevant |
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This is a massive function with too many responsibility. I would recommend to break it down into bunch of helpers to improve readability and modularity