diff --git a/build-apk.sh b/build-apk.sh index 6f34533..c6a0de1 100755 --- a/build-apk.sh +++ b/build-apk.sh @@ -56,17 +56,19 @@ timestamp=$(date +"%Y%m%d-%H%M%S") # Development feature keyword system # You can modify this keyword based on the current development focus -DEV_KEYWORD="android-navbar-transparency-fix" +DEV_KEYWORD="no-localhost-alerts" # Alternative keywords for different features: -# DEV_KEYWORD="zoom-fix" # For zoom control fixes -# DEV_KEYWORD="ui-polish" # For UI improvements -# DEV_KEYWORD="gps-optimize" # For GPS improvements -# DEV_KEYWORD="audio-enhance" # For audio features -# DEV_KEYWORD="map-layers" # For map layer features -# DEV_KEYWORD="performance" # For performance improvements -# DEV_KEYWORD="bugfix" # For bug fixes -# DEV_KEYWORD="feature" # For new features +# DEV_KEYWORD="spatial-audio-nearby" # For proximity-based spatial audio +# DEV_KEYWORD="jamm-crossfade" # For advanced Jamm and Concatenated modes +# DEV_KEYWORD="freemium-limit" # For 10 recordings limit system +# DEV_KEYWORD="store-ready" # For Amazon/Samsung store submission +# DEV_KEYWORD="ui-polish" # For UI improvements +# DEV_KEYWORD="gps-optimize" # For GPS improvements +# DEV_KEYWORD="audio-enhance" # For audio features +# DEV_KEYWORD="map-layers" # For map layer features +# DEV_KEYWORD="performance" # For performance improvements +# DEV_KEYWORD="bugfix" # For bug fixes target_apk="biomap-$DEV_KEYWORD-$timestamp.apk" cp "$APK_PATH" "$target_apk" diff --git a/src/components/MapContainer.jsx b/src/components/MapContainer.jsx index 0af1e0a..142ef0f 100644 --- a/src/components/MapContainer.jsx +++ b/src/components/MapContainer.jsx @@ -11,6 +11,45 @@ import AudioRecorder from '../services/AudioRecorder.tsx'; import locationService from '../services/locationService.js'; import breadcrumbService from '../services/breadcrumbService.js'; +// Custom alert function for Android without localhost text +const showAlert = (message) => { + if (window.Capacitor?.isNativePlatform()) { + // For native platforms, create a simple modal overlay + const overlay = document.createElement('div'); + overlay.style.cssText = ` + position: fixed; top: 0; left: 0; right: 0; bottom: 0; + background: rgba(0,0,0,0.7); z-index: 10000; + display: flex; align-items: center; justify-content: center; + `; + + const modal = document.createElement('div'); + modal.style.cssText = ` + background: rgba(255, 255, 255, 0.85); border-radius: 8px; padding: 20px; + max-width: 300px; margin: 20px; text-align: center; + box-shadow: 0 10px 30px rgba(0,0,0,0.3); + `; + + modal.innerHTML = ` +

${message}

+ + `; + + overlay.appendChild(modal); + document.body.appendChild(overlay); + + // Close on button click or overlay click + const closeModal = () => document.body.removeChild(overlay); + modal.querySelector('button').onclick = closeModal; + overlay.onclick = (e) => e.target === overlay && closeModal(); + } else { + // For web, use regular alert + alert(message); + } +}; + class MapContainer extends React.Component { constructor (props) { super(props) @@ -33,12 +72,16 @@ class MapContainer extends React.Component { isOnline: navigator.onLine, tracklog: this.loadTracklogFromStorage(), mapInstance: null, // Add map instance state - currentLayer: 'OpenStreetMap', // Add current layer state + currentLayer: 'CartoDB', // Set CartoDB as default for collector interface breadcrumbVisualization: 'line', // 'line', 'heatmap', 'markers', 'animated' showBreadcrumbs: true, // Enable by default currentBreadcrumbs: [] } + // Audio management refs for cleanup + this.audioRefs = []; + this.isAudioPlaying = false; + this.lastAcceptedPosition = null; // For debouncing GPS updates this.lastAcceptedTimestamp = 0; this.updateSelectedPoint = this.updateSelectedPoint.bind(this) @@ -61,6 +104,7 @@ class MapContainer extends React.Component { this.handleLayerChange = this.handleLayerChange.bind(this); this.toggleBreadcrumbs = this.toggleBreadcrumbs.bind(this); this.setBreadcrumbVisualization = this.setBreadcrumbVisualization.bind(this); + this.stopAllAudio = this.stopAllAudio.bind(this); } // --- Tracklog helpers --- @@ -102,6 +146,13 @@ class MapContainer extends React.Component { } toggleAudioRecorder() { + // Check recording limit before opening recorder + if (!this.state.isAudioRecorderVisible && !localStorageService.canCreateNewRecording()) { + const message = localStorageService.getLimitMessage(); + showAlert(message); + return; + } + this.setState({ isAudioRecorderVisible: !this.state.isAudioRecorderVisible }) } @@ -153,10 +204,13 @@ class MapContainer extends React.Component { console.log('✅ Recording validation passed, saving...'); // Save to localStorage - // Include native audio file path in metadata (if available) so export can use it later + // Include breadcrumb data if available from the current session const metadataToSave = { ...recordingData.metadata, - ...(recordingData.audioPath ? { audioPath: recordingData.audioPath } : {}) + ...(recordingData.audioPath ? { audioPath: recordingData.audioPath } : {}), + ...(recordingData.metadata.breadcrumbs ? { breadcrumbs: recordingData.metadata.breadcrumbs } : {}), + ...(recordingData.metadata.breadcrumbSession ? { breadcrumbSession: recordingData.metadata.breadcrumbSession } : {}), + ...(recordingData.metadata.movementPattern ? { movementPattern: recordingData.metadata.movementPattern } : {}) }; const recordingId = await localStorageService.saveRecording(metadataToSave, recordingData.audioBlob); @@ -169,10 +223,10 @@ class MapContainer extends React.Component { }); // Show success message - alert(`Grabación "${recordingData.metadata.displayName}" guardada exitosamente!`); + showAlert(`Grabación "${recordingData.metadata.displayName}" guardada exitosamente!`); } catch (error) { console.error('Recording save failed:', error); - alert(`No se pudo guardar la grabación: ${error.message}`); + showAlert(`No se pudo guardar la grabación: ${error.message}`); } } @@ -326,36 +380,59 @@ class MapContainer extends React.Component { async handlePlayAudio(recordingId) { + console.log(`🎵 MapContainer: Playing audio for recording ${recordingId}`); + + // Stop any existing audio first + this.stopAllAudio(); + try { const recording = this.mapData.AudioRecordings.byId[recordingId]; if (recording) { - // Try flexible blob (localStorage or native file) - const audioBlob = await localStorageService.getAudioBlobFlexible(recordingId); + // Get audio blob from localStorage + const audioBlob = await localStorageService.getAudioBlob(recordingId); if (audioBlob) { + console.log(`✅ MapContainer: Audio blob found (${audioBlob.size} bytes)`); const audio = new Audio(URL.createObjectURL(audioBlob)); - audio.play().catch(error => { - console.error('Error playing audio (blob):', error); - alert('Error al reproducir el archivo de audio'); - }); - return; - } - // As last resort, try native path via convertFileSrc - if (recording.audioPath) { - const playableUrl = await localStorageService.getPlayableUrl(recordingId); - if (playableUrl) { - const audio = new Audio(playableUrl); - try { await audio.play(); return; } catch (_) {} - } + + // Track audio reference for cleanup + this.audioRefs.push(audio); + this.isAudioPlaying = true; + + // Set up event handlers + let hasEndedNaturally = false; + + audio.onended = () => { + console.log('🏁 MapContainer: Audio playback ended'); + hasEndedNaturally = true; + this.isAudioPlaying = false; + // Remove error handler before cleanup to prevent false errors + audio.onerror = null; + this.stopAllAudio(); + }; + + audio.onerror = (error) => { + // Only show error if audio hasn't ended naturally + if (!hasEndedNaturally) { + console.error('❌ MapContainer: Audio playback error:', error); + this.stopAllAudio(); + showAlert('Error al reproducir el archivo de audio'); + } + }; + + await audio.play(); + console.log('🎵 MapContainer: Audio playback started'); + } else { + console.log('❌ MapContainer: Audio blob not found for recording:', recordingId); + showAlert('Archivo de audio no disponible'); } - console.log('Audio data not found for recording:', recordingId); - alert('Archivo de audio no disponible'); } else { - console.log('Recording not found:', recordingId); - alert('Grabación no encontrada'); + console.log('❌ MapContainer: Recording not found:', recordingId); + showAlert('Grabación no encontrada'); } } catch (error) { - console.error('Error playing audio:', error); - alert('Error al reproducir el archivo de audio'); + console.error('❌ MapContainer: Error playing audio:', error); + this.stopAllAudio(); + showAlert('Error al reproducir el archivo de audio'); } } @@ -399,6 +476,27 @@ class MapContainer extends React.Component { breadcrumbService.stopTracking(); } + // Audio cleanup method - same as SoundWalkAndroid + stopAllAudio() { + console.log('🔚 MapContainer: Stopping all audio'); + this.isAudioPlaying = false; + + this.audioRefs.forEach(audio => { + try { + audio.pause(); + audio.currentTime = 0; + // Revoke blob URLs to prevent memory leaks + if (audio.src && audio.src.startsWith('blob:')) { + URL.revokeObjectURL(audio.src); + } + audio.src = ''; + } catch (error) { + console.warn('⚠️ Error cleaning up audio:', error.message); + } + }); + this.audioRefs = []; + } + componentDidMount() { // Load existing recordings first this.loadExistingRecordings(); @@ -419,12 +517,8 @@ class MapContainer extends React.Component { // Add global playAudio function for popup buttons window.playAudio = this.handlePlayAudio; - // Check for cached permission state first - // [REMOVE REDUNDANT PERMISSION REQUESTS] - // Remove checkCachedPermissionState and related permission request logic - if (this.props.onRequestLocation) { - this.props.onRequestLocation(); // Ensure location tracking starts automatically - } + // Automatically request GPS permission and start location tracking + this.initializeLocationTracking(); // Start breadcrumb tracking if enabled if (this.state.showBreadcrumbs) { @@ -432,6 +526,43 @@ class MapContainer extends React.Component { } } + // Initialize location tracking with automatic permission request + async initializeLocationTracking() { + console.log('MapContainer: Initializing location tracking for collector interface'); + + try { + // Check if we have cached location from previous session + const hasUserLocation = this.props.userLocation && this.props.userLocation.lat && this.props.userLocation.lng; + + if (!hasUserLocation) { + console.log('MapContainer: No cached location, requesting GPS permission and location'); + + // Request location permission and get current position + const position = await locationService.requestLocation(); + + if (position) { + console.log('MapContainer: GPS location obtained, updating state and centering map'); + this.handleLocationGranted(position); + } + } else { + console.log('MapContainer: Using cached location, starting location watch'); + // If we have cached location, just start the watch + if (this.props.onRequestLocation) { + this.props.onRequestLocation(); + } + } + } catch (error) { + console.error('MapContainer: Failed to initialize location tracking:', error); + // Handle permission denied or GPS error + this.handleLocationError(error); + + // Still try to trigger parent location request as fallback + if (this.props.onRequestLocation) { + this.props.onRequestLocation(); + } + } + } + componentDidUpdate(prevProps) { // Automatically center map to userLocation when it changes if ( @@ -510,18 +641,36 @@ class MapContainer extends React.Component { } componentWillUnmount() { + console.log('🧹 MapContainer: Component unmounting, cleaning up audio'); + // Stop all audio before unmounting + this.stopAllAudio(); + locationService.stopLocationWatch(); window.removeEventListener('online', this.handleOnlineStatus); window.removeEventListener('offline', this.handleOnlineStatus); // Stop breadcrumb tracking this.stopBreadcrumbTracking(); + + // Clean up global function + if (window.playAudio === this.handlePlayAudio) { + delete window.playAudio; + } } handleOnlineStatus = () => { this.setState({ isOnline: navigator.onLine }); } + // Handle back to landing with audio cleanup + handleBackToLanding = () => { + console.log('🏠 MapContainer: Navigating back to landing, stopping audio'); + this.stopAllAudio(); + if (this.props.onBackToLanding) { + this.props.onBackToLanding(); + } + } + async handleUploadPending() { const pending = localStorageService.getPendingUploads(); for (const rec of pending) { @@ -530,7 +679,7 @@ class MapContainer extends React.Component { localStorageService.markUploaded(rec.uniqueId); } this.setState({ pendingUploads: localStorageService.getPendingUploads() }); - alert('Grabaciones pendientes marcadas como subidas!'); + showAlert('Grabaciones pendientes marcadas como subidas!'); } render () { @@ -587,9 +736,8 @@ class MapContainer extends React.Component { toggleAudioRecorder={this.toggleAudioRecorder} updateQuery={this.updateQuery} userLocation={this.props.userLocation} - onBackToLanding={this.props.onBackToLanding} + onBackToLanding={this.handleBackToLanding} onLocationRefresh={this.handleLocationRefresh.bind(this)} - onRequestGPSAccess={this.handleLocationRefresh.bind(this)} isRecording={this.state.isAudioRecorderVisible} isMicDisabled={isMicDisabled} mapInstance={this.state.mapInstance} @@ -603,7 +751,6 @@ class MapContainer extends React.Component { showSearch={true} showZoomControls={true} showLayerSelector={true} - showImportButton={false} /> )} + {/* Recording Limit Counter (only if showMicButton is true) */} + {this.props.showMicButton && (() => { + const limitInfo = localStorageService.getRecordingLimitInfo(); + return ( +
+ {limitInfo.used}/{limitInfo.limit} +
+ ); + })()} + {/* Info Button */} + `; + + overlay.appendChild(modal); + document.body.appendChild(overlay); + + // Close on button click or overlay click + const closeModal = () => document.body.removeChild(overlay); + modal.querySelector('button').onclick = closeModal; + overlay.onclick = (e) => e.target === overlay && closeModal(); + } else { + // For web, use regular alert + alert(message); + } +}; + const LISTEN_MODES = { CONCAT: 'concatenated', JAMM: 'jamm', @@ -262,18 +301,34 @@ const SoundWalk = ({ onBackToLanding, locationPermission, userLocation, hasReque // Check for nearby audio spots (15m range) const checkNearbySpots = (position) => { - if (!position || !audioSpots.length) return; + console.log('🔍 SoundWalk CheckNearbySpots called with position:', position); + console.log('📍 Total audioSpots available:', audioSpots.length); + + if (!position) { + console.warn('❌ No position provided to checkNearbySpots'); + return; + } - const nearby = audioSpots.filter(spot => { + if (!audioSpots.length) { + console.warn('❌ No audio spots available for proximity check'); + return; + } + + const nearby = audioSpots.filter((spot, index) => { const distance = calculateDistance( position.lat, position.lng, spot.location.lat, spot.location.lng ); - return distance <= 15; // 15 meters range + console.log(`📏 SoundWalk Spot ${index + 1} (${spot.filename}): ${distance.toFixed(1)}m away`); + const isNearby = distance <= 15; + if (isNearby) { + console.log(`✅ SoundWalk Spot ${index + 1} is within 15m range`); + } + return isNearby; }); + console.log(`🎯 SoundWalk Found ${nearby.length} nearby spots out of ${audioSpots.length} total`); setNearbySpots(nearby); - console.log('SoundWalk: Found nearby spots:', nearby.length); }; // Calculate distance between two points in meters @@ -509,20 +564,20 @@ const SoundWalk = ({ onBackToLanding, locationPermission, userLocation, hasReque document.body.removeChild(link); URL.revokeObjectURL(url); } - alert('Todas las grabaciones y registro de ruta exportadas con éxito!'); + showAlert('Todas las grabaciones y registro de ruta exportadas con éxito!'); } catch (error) { console.error('Export error:', error); - alert('Exportación fallida: ' + error.message); + showAlert('Exportación fallida: ' + error.message); } }; const handleExportMetadata = async () => { try { await RecordingExporter.exportMetadata(); - alert('Metadatos exportados con éxito!'); + showAlert('Metadatos exportados con éxito!'); } catch (error) { console.error('Metadata export error:', error); - alert('Exportación de metadatos fallida: ' + error.message); + showAlert('Exportación de metadatos fallida: ' + error.message); } }; @@ -590,7 +645,7 @@ const SoundWalk = ({ onBackToLanding, locationPermission, userLocation, hasReque } } catch (error) { console.error('Error playing audio:', error); - alert('Error al reproducir audio: ' + error.message); + showAlert('Error al reproducir audio: ' + error.message); } finally { setIsPlaying(false); } @@ -637,7 +692,7 @@ const SoundWalk = ({ onBackToLanding, locationPermission, userLocation, hasReque await playNearbySpots(nearbySpots); } catch (error) { console.error('Error playing nearby spots:', error); - alert('Error al reproducir cercanos: ' + error.message); + showAlert('Error al reproducir cercanos: ' + error.message); } finally { setIsPlaying(false); } @@ -827,11 +882,11 @@ const SoundWalk = ({ onBackToLanding, locationPermission, userLocation, hasReque if (audioBlob) { await playAudio(spot, audioBlob, userLocation); } else { - alert('Archivo de audio no encontrado'); + showAlert('Archivo de audio no encontrado'); } } catch (error) { console.error('Error al reproducir audio:', error); - alert('Error al reproducir audio: ' + error.message); + showAlert('Error al reproducir audio: ' + error.message); } }} style={{ diff --git a/src/components/SoundWalkAndroid.jsx b/src/components/SoundWalkAndroid.jsx index 7cb3661..e06978a 100644 --- a/src/components/SoundWalkAndroid.jsx +++ b/src/components/SoundWalkAndroid.jsx @@ -1,7 +1,7 @@ // BETA VERSION: Overlapping audio spots now support Concatenated and Jamm listening modes. import React, { useState, useEffect, useRef } from 'react'; import { MapContainer, TileLayer, Marker, Popup, Circle, useMap } from 'react-leaflet'; -import { Play, Pause, Square, Volume2, VolumeX, ArrowLeft, MapPin, Download, Upload } from 'lucide-react'; +import { Play, Pause, Square, Volume2, VolumeX, ArrowLeft, MapPin, Download } from 'lucide-react'; import localStorageService from '../services/localStorageService'; import RecordingExporter from '../utils/recordingExporter'; import TracklogExporter from '../utils/tracklogExporter.js'; @@ -43,6 +43,45 @@ function MapUpdater({ center, zoom }) { return null; } +// Custom alert function for Android without localhost text +const showAlert = (message) => { + if (window.Capacitor?.isNativePlatform()) { + // For native platforms, create a simple modal overlay + const overlay = document.createElement('div'); + overlay.style.cssText = ` + position: fixed; top: 0; left: 0; right: 0; bottom: 0; + background: rgba(0,0,0,0.7); z-index: 10000; + display: flex; align-items: center; justify-content: center; + `; + + const modal = document.createElement('div'); + modal.style.cssText = ` + background: white; border-radius: 8px; padding: 20px; + max-width: 300px; margin: 20px; text-align: center; + box-shadow: 0 10px 30px rgba(0,0,0,0.3); + `; + + modal.innerHTML = ` +

${message}

+ + `; + + overlay.appendChild(modal); + document.body.appendChild(overlay); + + // Close on button click or overlay click + const closeModal = () => document.body.removeChild(overlay); + modal.querySelector('button').onclick = closeModal; + overlay.onclick = (e) => e.target === overlay && closeModal(); + } else { + // For web, use regular alert + alert(message); + } +}; + const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPermission, userLocation, hasRequestedPermission, setLocationPermission, setUserLocation, setHasRequestedPermission }) => { // Add local state for GPS button state const [gpsState, setGpsState] = useState('idle'); // idle, loading, granted, denied @@ -78,6 +117,9 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer // Add layer switching state const [currentLayer, setCurrentLayer] = useState('StadiaSatellite'); + + // Debug state + const [showDebugInfo, setShowDebugInfo] = useState(false); // 1. Move recentering logic to a useEffect that depends on userLocation, and use 10 meters useEffect(() => { @@ -257,8 +299,41 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer function getProximityVolume(distance) { if (distance <= 5) return 1.0; - if (distance >= 10) return 0.1; - return Math.exp(-(distance - 5) / 2); + if (distance >= 15) return 0.1; // Extended range for spatial mixing + return Math.exp(-(distance - 5) / 3); + } + + // Calculate bearing (direction) from user to audio spot in degrees (0-360) + function calculateBearing(lat1, lng1, lat2, lng2) { + const dLng = (lng2 - lng1) * Math.PI / 180; + const lat1Rad = lat1 * Math.PI / 180; + const lat2Rad = lat2 * Math.PI / 180; + + const y = Math.sin(dLng) * Math.cos(lat2Rad); + const x = Math.cos(lat1Rad) * Math.sin(lat2Rad) - + Math.sin(lat1Rad) * Math.cos(lat2Rad) * Math.cos(dLng); + + let bearing = Math.atan2(y, x) * 180 / Math.PI; + return (bearing + 360) % 360; // Normalize to 0-360 + } + + // Convert bearing to stereo pan (-1 = full left, +1 = full right) + function calculateSterePan(bearing) { + // 0° = North = center (0) + // 90° = East = full right (+1) + // 180° = South = center (0) + // 270° = West = full left (-1) + + // Convert bearing to radians for smoother calculation + const bearingRad = bearing * Math.PI / 180; + + // Use sine function to map bearing to pan (-1 to +1) + // 90° (East) = sin(90°) = 1 (right) + // 270° (West) = sin(270°) = -1 (left) + const pan = Math.sin(bearingRad); + + // Clamp to valid range + return Math.max(-1, Math.min(1, pan)); } const playAudio = async (spot, audioBlob, userPos = null) => { @@ -273,9 +348,12 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer let dist = 0; if (proximityVolumeEnabled && userPos && spot.location) { dist = calculateDistance(userPos.lat, userPos.lng, spot.location.lat, spot.location.lng); - audio.volume = getProximityVolume(dist); + const proximityVolume = getProximityVolume(dist); + audio.volume = proximityVolume; + console.log(`🔊 Proximity volume: distance=${dist.toFixed(1)}m, volume=${proximityVolume.toFixed(2)}`); } else { audio.volume = isMuted ? 0 : volume; + console.log(`🔊 Standard volume: ${audio.volume.toFixed(2)} (muted=${isMuted})`); } audioRefs.current.push(audio); setCurrentAudio(spot); @@ -316,57 +394,160 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer }; const playNearbySpots = async (spots) => { + console.log(`🎧 Spatial audio playback with ${spots.length} nearby spots`); if (spots.length === 0) return; + try { setIsLoading(true); await stopAllAudio(); - const sortedSpots = spots.sort((a, b) => new Date(a.timestamp) - new Date(b.timestamp)); - for (const spot of sortedSpots) { - if (!isPlayingRef.current) break; + + // Set playing state + isPlayingRef.current = true; + setIsPlaying(true); + + // Calculate bearing and distance for each spot + const spatialSpots = spots.map(spot => { + const distance = calculateDistance( + userLocation.lat, userLocation.lng, + spot.location.lat, spot.location.lng + ); + const bearing = calculateBearing( + userLocation.lat, userLocation.lng, + spot.location.lat, spot.location.lng + ); + return { ...spot, distance, bearing }; + }); + + console.log('🗺️ Spatial positions:', spatialSpots.map(s => + `${s.filename}: ${s.distance.toFixed(1)}m, ${s.bearing.toFixed(0)}°` + )); + + // Start all nearby sounds simultaneously with spatial audio + const audioPromises = spatialSpots.map(async (spot) => { try { - const audioBlob = await localStorageService.getAudioBlobFlexible(spot.id); - if (audioBlob) { - await playAudio(spot, audioBlob, userLocation); - await new Promise((resolve) => { - const timeout = setTimeout(() => { - resolve(); - }, 30000); - if (audioRefs.current[0]) { - audioRefs.current[0].onended = () => { - clearTimeout(timeout); - resolve(); - }; - } else { - clearTimeout(timeout); - resolve(); + console.log(`🎵 Loading spatial audio: ${spot.filename}`); + const audioBlob = await localStorageService.getAudioBlob(spot.id); + + if (audioBlob && audioBlob.size > 0) { + const audio = new Audio(URL.createObjectURL(audioBlob)); + audio.preload = 'auto'; + audio.loop = true; // Loop for ambient mixing + + // Calculate spatial audio properties + const volume = getProximityVolume(spot.distance); + const pan = calculateSterePan(spot.bearing); + + console.log(`🔊 ${spot.filename}: vol=${volume.toFixed(2)}, pan=${pan.toFixed(2)}, dist=${spot.distance.toFixed(1)}m`); + + // Apply volume (use global volume state) + audio.volume = isMuted ? 0 : volume; + + // Apply stereo panning using Web Audio API + if (window.AudioContext || window.webkitAudioContext) { + try { + const audioContext = new (window.AudioContext || window.webkitAudioContext)(); + const audioSource = audioContext.createMediaElementSource(audio); + const panNode = audioContext.createStereoPanner(); + + panNode.pan.value = pan; + audioSource.connect(panNode); + panNode.connect(audioContext.destination); + + // Store audio context references for cleanup + audio._audioContext = audioContext; + audio._panNode = panNode; + audio._audioSource = audioSource; + + console.log(`🎧 Spatial audio setup for ${spot.filename}: pan=${pan.toFixed(2)}`); + } catch (spatialError) { + console.warn('⚠️ Spatial audio not supported, using standard audio'); } - }); - } else { - // Try native path as last resort - const playableUrl = await localStorageService.getPlayableUrl(spot.id); - if (playableUrl) { - const el = new Audio(playableUrl); - try { await el.play(); } catch (_) {} } + + // Track audio reference + audioRefs.current.push(audio); + + // Set up event handlers + audio.onended = () => { + console.log(`🔚 ${spot.filename} ended`); + }; + + audio.onerror = (error) => { + console.error(`❌ Audio error for ${spot.filename}:`, error); + }; + + // Start playback + await audio.play(); + console.log(`▶️ Started spatial playback: ${spot.filename}`); + + return audio; + } else { + console.warn(`⚠️ No audio blob for ${spot.filename}`); + return null; } - } catch (error) {} - } - } catch (error) {} finally { + } catch (error) { + console.error(`❌ Error setting up spatial audio for ${spot.filename}:`, error); + return null; + } + }); + + // Wait for all audio to start + const activeAudios = (await Promise.all(audioPromises)).filter(audio => audio !== null); + console.log(`🎼 Started ${activeAudios.length} simultaneous spatial audio streams`); + + // Update current audio info (show the closest one) + const closestSpot = spatialSpots.reduce((closest, current) => + current.distance < closest.distance ? current : closest + ); + setCurrentAudio(closestSpot); + setSelectedSpot(closestSpot); + + } catch (error) { + console.error('❌ Error in spatial audio playback:', error); + isPlayingRef.current = false; + setIsPlaying(false); + } finally { setIsLoading(false); } }; const handlePlayNearby = () => { - // Check for nearby spots with current location - if (userLocation) { - checkNearbySpots(userLocation); + console.log('🎯 handlePlayNearby called'); + + // If already playing, stop playback + if (isPlaying) { + console.log('⏹️ Already playing, stopping audio'); + stopAllAudio(); + return; + } + + if (!userLocation) { + console.log('❌ No user location available'); + showAlert('Se requiere ubicación GPS para reproducir sonidos cercanos.'); + return; } - if (nearbySpots.length > 0) { + // Calculate nearby spots directly instead of relying on state + console.log('📍 User location available, calculating nearby spots directly'); + const directNearbyCheck = audioSpots.filter(spot => { + if (!spot || !spot.location) return false; + const distance = calculateDistance( + userLocation.lat, userLocation.lng, + spot.location.lat, spot.location.lng + ); + console.log(`📏 Distance to ${spot.filename}: ${distance.toFixed(1)}m`); + return distance <= 15; // Extended range for spatial mixing + }); + + console.log(`🔍 Found ${directNearbyCheck.length} nearby spots directly`); + + if (directNearbyCheck.length > 0) { + console.log('✅ Playing nearby spots:', directNearbyCheck.map(s => s.filename)); setPlaybackMode('nearby'); - playNearbySpots(nearbySpots); + playNearbySpots(directNearbyCheck); } else { - alert('No hay puntos de audio cercanos. Acércate a las grabaciones de audio o intenta un modo de reproducción diferente.'); + console.log('❌ No nearby spots found'); + showAlert('No hay puntos de audio cercanos (dentro de 15 metros). Acércate más a las grabaciones para experimentar el audio espacial.'); } }; @@ -400,14 +581,8 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer console.error('Export error:', error); // Only show error if it's not a handled fallback if (!error.message.includes('aborted') && !error.message.includes('showDirectoryPicker')) { - alert('Exportación fallida: ' + error.message); + showAlert('Exportación fallida: ' + error.message); } - return; - } - // Show a simple success message only for web fallback (Android native shows its own detailed alert) - const isNative = !!(window.Capacitor && (window.Capacitor.isNative || (window.Capacitor.isNativePlatform && window.Capacitor.isNativePlatform()))); - if (!isNative) { - alert('Exportación completada. Archivos guardados como descargas.'); } }; @@ -430,22 +605,21 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer try { const currentSession = breadcrumbService.getCurrentSession(); if (!currentSession) { - alert('No hay una sesión activa para exportar. Inicia el rastreo de migas de pan primero.'); + showAlert('No hay una sesión activa para exportar. Inicia el rastreo de migas de pan primero.'); return; } // Stop tracking to get complete session data const sessionData = breadcrumbService.stopTracking(); if (!sessionData) { - alert('Error al obtener datos de la sesión.'); + showAlert('Error al obtener datos de la sesión.'); return; } // Get associated recordings const associatedRecordings = TracklogExporter.getAssociatedRecordings(sessionData); - const summary = await TracklogExporter.exportTracklog(sessionData, associatedRecordings, 'zip'); - alert(`Tracklog exportado. Archivos guardados en Descargas/Downloads. Puntos: ${summary.totalBreadcrumbs}, Grabaciones asociadas: ${summary.associatedRecordings}`); + await TracklogExporter.exportTracklog(sessionData, associatedRecordings, 'zip'); // Restart tracking breadcrumbService.startTracking(); @@ -453,7 +627,7 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer } catch (error) { console.error('Error exporting tracklog:', error); - alert('Error al exportar tracklog: ' + error.message); + showAlert('Error al exportar tracklog: ' + error.message); } }; @@ -462,26 +636,62 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer await TracklogExporter.exportCurrentSession('zip'); } catch (error) { console.error('Error exporting current session:', error); - alert('Error al exportar sesión actual: ' + error.message); + showAlert('Error al exportar sesión actual: ' + error.message); } }; function stopAllAudio() { + console.log('🛑 Stopping all audio and cleaning up spatial audio'); isPlayingRef.current = false; setIsPlaying(false); setCurrentAudio(null); setSelectedSpot(null); + if (playbackTimeoutRef.current) { clearTimeout(playbackTimeoutRef.current); playbackTimeoutRef.current = null; } + audioRefs.current.forEach(audio => { try { + // Stop playback audio.pause(); audio.currentTime = 0; + + // Clean up Web Audio API contexts + if (audio._audioContext) { + try { + if (audio._audioSource) { + audio._audioSource.disconnect(); + } + if (audio._panNode) { + audio._panNode.disconnect(); + } + if (audio._audioContext.state !== 'closed') { + audio._audioContext.close(); + } + console.log('🧹 Cleaned up spatial audio context'); + } catch (contextError) { + console.warn('⚠️ Error cleaning up audio context:', contextError); + } + + // Clear references + delete audio._audioContext; + delete audio._audioSource; + delete audio._panNode; + } + + // Revoke blob URLs to prevent memory leaks + if (audio.src && audio.src.startsWith('blob:')) { + URL.revokeObjectURL(audio.src); + } audio.src = ''; - } catch (error) {} + + } catch (error) { + console.warn('⚠️ Error stopping audio:', error); + } }); + audioRefs.current = []; } @@ -512,16 +722,159 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer setIsLoading(true); await stopAllAudio(); setPlaybackMode('concatenated'); - const audioBlobs = []; - for (const spot of group) { - const blob = await localStorageService.getAudioBlobFlexible(spot.id); - if (blob) audioBlobs.push(blob); - } - if (audioBlobs.length > 0) { - const concatenatedBlob = new Blob(audioBlobs, { type: 'audio/webm' }); - await playSingleAudio(concatenatedBlob); - } - } catch (error) {} finally { + + console.log(`🔗 Starting Concatenated mode with ${group.length} files`); + + // Sort spots chronologically by timestamp + const sortedSpots = [...group].sort((a, b) => { + const timeA = new Date(a.timestamp || 0).getTime(); + const timeB = new Date(b.timestamp || 0).getTime(); + return timeA - timeB; + }); + + console.log(`📅 Sorted ${sortedSpots.length} spots chronologically`); + + // Create Web Audio API context for crossfading + const audioContext = new (window.AudioContext || window.webkitAudioContext)(); + const crossfadeDuration = 2.0; // 2 seconds crossfade + + let currentIndex = 0; + let currentAudio = null; + let currentGainNode = null; + + const playNextWithCrossfade = async () => { + if (currentIndex >= sortedSpots.length || !isPlayingRef.current) { + console.log('🏁 Concatenated playback completed'); + stopAllAudio(); + return; + } + + const spot = sortedSpots[currentIndex]; + const blob = await localStorageService.getAudioBlob(spot.id); + + if (!blob) { + console.warn(`⚠️ No audio blob for ${spot.filename}, skipping`); + currentIndex++; + playNextWithCrossfade(); + return; + } + + // Create new audio element + const newAudio = new Audio(URL.createObjectURL(blob)); + newAudio.volume = 0; // Start silent for Web Audio API control + audioRefs.current.push(newAudio); + + // Create Web Audio API nodes + const audioSource = audioContext.createMediaElementSource(newAudio); + const gainNode = audioContext.createGain(); + + // Connect audio graph + audioSource.connect(gainNode); + gainNode.connect(audioContext.destination); + + // Set initial gain based on whether this is first track or crossfading + const isFirstTrack = currentIndex === 0; + gainNode.gain.setValueAtTime(isFirstTrack ? (isMuted ? 0 : volume) : 0, audioContext.currentTime); + + console.log(`🎵 Starting track ${currentIndex + 1}/${sortedSpots.length}: ${spot.filename}`); + + // Start playback + await newAudio.play(); + + // Handle crossfading + if (currentAudio && currentGainNode && !isFirstTrack) { + console.log(`🌊 Starting ${crossfadeDuration}s crossfade`); + + // Fade out current track + currentGainNode.gain.setValueAtTime(isMuted ? 0 : volume, audioContext.currentTime); + currentGainNode.gain.exponentialRampToValueAtTime(0.001, audioContext.currentTime + crossfadeDuration); + + // Fade in new track + gainNode.gain.setValueAtTime(0.001, audioContext.currentTime); + gainNode.gain.exponentialRampToValueAtTime(isMuted ? 0.001 : volume, audioContext.currentTime + crossfadeDuration); + + // Stop previous audio after crossfade + setTimeout(() => { + if (currentAudio) { + currentAudio.pause(); + currentAudio.currentTime = 0; + console.log('🔇 Stopped previous track after crossfade'); + } + }, crossfadeDuration * 1000 + 100); + } else if (isFirstTrack) { + // First track - fade in from silence + gainNode.gain.setValueAtTime(0.001, audioContext.currentTime); + gainNode.gain.exponentialRampToValueAtTime(isMuted ? 0.001 : volume, audioContext.currentTime + 0.5); + } + + // Update current references + currentAudio = newAudio; + currentGainNode = gainNode; + + // Set up next track transition + const setupNextTrack = () => { + const remainingTime = newAudio.duration - newAudio.currentTime; + + if (remainingTime <= crossfadeDuration && currentIndex < sortedSpots.length - 1) { + // Start next track for crossfade + currentIndex++; + playNextWithCrossfade(); + } else if (remainingTime <= 0.1) { + // Current track ended, move to next if no crossfade was triggered + currentIndex++; + if (currentIndex < sortedSpots.length) { + setTimeout(() => playNextWithCrossfade(), 100); + } else { + stopAllAudio(); + } + } + }; + + // Monitor playback progress + const progressInterval = setInterval(() => { + if (!isPlayingRef.current || newAudio.ended || newAudio.paused) { + clearInterval(progressInterval); + if (!isPlayingRef.current) return; + + // Track ended naturally + currentIndex++; + if (currentIndex < sortedSpots.length) { + setTimeout(() => playNextWithCrossfade(), 100); + } else { + stopAllAudio(); + } + return; + } + + setupNextTrack(); + }, 100); + + // Handle track end + newAudio.addEventListener('ended', () => { + clearInterval(progressInterval); + currentIndex++; + if (currentIndex < sortedSpots.length && isPlayingRef.current) { + setTimeout(() => playNextWithCrossfade(), 100); + } else { + stopAllAudio(); + } + }); + + // Update UI with current track info + setCurrentAudio(spot); + setSelectedSpot(spot); + }; + + // Start the concatenated playback + isPlayingRef.current = true; + setIsPlaying(true); + await playNextWithCrossfade(); + + console.log('✅ Concatenated mode playback started successfully'); + + } catch (error) { + console.error('❌ Error in Concatenated mode:', error); + } finally { setIsLoading(false); } } @@ -532,31 +885,125 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer setIsLoading(true); await stopAllAudio(); setPlaybackMode('jamm'); + + console.log(`🎼 Starting Jamm mode with ${group.length} files`); + + // Create Web Audio API context for advanced panning + const audioContext = new (window.AudioContext || window.webkitAudioContext)(); const audioElements = []; - for (const spot of group) { + const audioSources = []; + const panNodes = []; + + for (let i = 0; i < group.length; i++) { + const spot = group[i]; const blob = await localStorageService.getAudioBlob(spot.id); + if (blob) { + // Create audio element const audio = new Audio(URL.createObjectURL(blob)); - audio.volume = (isMuted ? 0 : volume) / group.length; - audio.loop = true; + audio.volume = isMuted ? 0 : volume; + audio.loop = false; // NO LOOPING as requested audioRefs.current.push(audio); audioElements.push(audio); + + // Create Web Audio API nodes for advanced panning + const audioSource = audioContext.createMediaElementSource(audio); + const panNode = audioContext.createStereoPanner(); + + // Connect audio graph + audioSource.connect(panNode); + panNode.connect(audioContext.destination); + + audioSources.push(audioSource); + panNodes.push(panNode); + + // Get audio duration to calculate panning speed + audio.addEventListener('loadedmetadata', () => { + const duration = audio.duration; + startPanningAnimation(panNode, duration, i); + }); + + console.log(`🎵 Prepared audio ${i + 1}: ${spot.filename}`); } } + if (audioElements.length > 0) { isPlayingRef.current = true; setIsPlaying(true); - for (const audio of audioElements) { + + // Start all audio simultaneously + console.log(`▶️ Starting ${audioElements.length} simultaneous audio streams`); + const playPromises = audioElements.map(audio => { try { - await audio.play(); - } catch (error) {} - } + return audio.play(); + } catch (error) { + console.warn('Audio play failed:', error); + return Promise.resolve(); + } + }); + + await Promise.all(playPromises); + + // Set up global end handler (when any audio ends, stop all) + audioElements.forEach(audio => { + audio.addEventListener('ended', () => { + console.log('🏁 Audio ended in Jamm mode, stopping all'); + stopAllAudio(); + }); + }); + + console.log('✅ Jamm mode playback started successfully'); } - } catch (error) {} finally { + } catch (error) { + console.error('❌ Error in Jamm mode:', error); + } finally { setIsLoading(false); } } + // Panning animation function for Jamm mode + function startPanningAnimation(panNode, duration, index) { + if (!panNode || !duration || duration <= 0) return; + + const startTime = Date.now(); + const updateInterval = 50; // Update every 50ms for smooth panning + + // Each audio gets a different panning pattern + const panDirection = index % 2 === 0 ? 1 : -1; // Alternate L→R and R→L + + const panAnimation = () => { + const elapsed = (Date.now() - startTime) / 1000; // seconds + const progress = elapsed / duration; // 0 to 1 + + if (progress >= 1 || !isPlayingRef.current) { + panNode.pan.value = 0; // Center at end + return; + } + + // Calculate pan value: complete L→R→L cycle during file duration + const cycleProgress = (progress * 2) % 2; // 0 to 2 + let panValue; + + if (cycleProgress <= 1) { + // First half: L to R (or R to L) + panValue = (cycleProgress - 0.5) * 2 * panDirection; // -1 to 1 + } else { + // Second half: R to L (or L to R) + panValue = (1.5 - cycleProgress) * 2 * panDirection; // 1 to -1 + } + + // Clamp to valid range [-1, 1] + panNode.pan.value = Math.max(-1, Math.min(1, panValue)); + + // Continue animation + setTimeout(panAnimation, updateInterval); + }; + + // Start panning animation + panAnimation(); + console.log(`🎛️ Started panning animation for audio ${index + 1}, direction: ${panDirection > 0 ? 'L→R' : 'R→L'}`); + } + function findOverlappingSpots(spot) { return audioSpots.filter(otherSpot => { if (otherSpot.id === spot.id) return false; @@ -687,14 +1134,6 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer zoomControl={false} ref={mapRef} > - {/* StadiaMaps Satellite (default) */} - - {/* OpenStreetMap Layer */} + + {/* StadiaMaps Satellite Layer */} + {userLocation && ( <> @@ -839,6 +1286,55 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer : 'No hay puntos de audio cercanos' }

+ + {showDebugInfo && ( +
+
📍 GPS: {userLocation ? `${userLocation.lat.toFixed(6)}, ${userLocation.lng.toFixed(6)}` : 'No disponible'}
+
🎵 Total spots: {audioSpots.length}
+
📏 Detección: 15m radio (audio espacial)
+
🔍 Estado: {locationPermission}
+
🔊 Volumen: {volume.toFixed(2)} {isMuted ? '(SILENCIADO)' : ''}
+
📐 Proximidad: {proximityVolumeEnabled ? 'ACTIVADA' : 'DESACTIVADA'}
+ {nearbySpots.length > 0 && ( +
+
Cercanos encontrados:
+ {nearbySpots.map((spot, idx) => { + const distance = userLocation ? calculateDistance( + userLocation.lat, userLocation.lng, + spot.location.lat, spot.location.lng + ) : 0; + return ( +
+ • {spot.filename} ({distance.toFixed(1)}m) +
+ ); + })} +
+ )} +
+ )}
@@ -872,7 +1368,7 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer }} style={{ display: 'flex', alignItems: 'center', gap: '8px', - backgroundColor: (nearbySpots.length > 0 || selectedSpot) ? '#10B981' : '#9CA3AF', + backgroundColor: (nearbySpots.length > 0 || selectedSpot) ? '#3B82F6' : '#6B7280', color: 'white', border: 'none', borderRadius: '8px', padding: '8px 16px', fontSize: '14px', cursor: (nearbySpots.length > 0 || selectedSpot) ? 'pointer' : 'not-allowed', transition: 'background-color 0.2s' }} > @@ -925,7 +1421,7 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer boxShadow: '0 4px 12px rgba(0, 0, 0, 0.1)' }} > - + Exportar Audio @@ -946,7 +1442,7 @@ const SoundWalkAndroid = ({ onBackToLanding, locationPermission: propLocationPer boxShadow: '0 4px 12px rgba(0, 0, 0, 0.1)' }} > - Exportar Tracklog + 📍 Exportar Tracklog
{isLoading && ( diff --git a/src/services/AudioRecorder.tsx b/src/services/AudioRecorder.tsx index f957290..387273b 100644 --- a/src/services/AudioRecorder.tsx +++ b/src/services/AudioRecorder.tsx @@ -4,6 +4,45 @@ import audioService from './audioService.js'; import { VoiceRecorder } from 'capacitor-voice-recorder'; import breadcrumbService from './breadcrumbService.js'; +// Custom alert function for Android without localhost text +const showAlert = (message) => { + if (window.Capacitor?.isNativePlatform()) { + // For native platforms, create a simple modal overlay + const overlay = document.createElement('div'); + overlay.style.cssText = ` + position: fixed; top: 0; left: 0; right: 0; bottom: 0; + background: rgba(0,0,0,0.7); z-index: 10000; + display: flex; align-items: center; justify-content: center; + `; + + const modal = document.createElement('div'); + modal.style.cssText = ` + background: rgba(255, 255, 255, 0.85); border-radius: 8px; padding: 20px; + max-width: 300px; margin: 20px; text-align: center; + box-shadow: 0 10px 30px rgba(0,0,0,0.3); + `; + + modal.innerHTML = ` +

${message}

+ + `; + + overlay.appendChild(modal); + document.body.appendChild(overlay); + + // Close on button click or overlay click + const closeModal = () => document.body.removeChild(overlay); + modal.querySelector('button').onclick = closeModal; + overlay.onclick = (e) => e.target === overlay && closeModal(); + } else { + // For web, use regular alert + alert(message); + } +}; + // Logging utility for debugging microphone issues class AudioLogger { static logs: string[] = []; @@ -196,7 +235,7 @@ const AudioRecorder = ({ AudioLogger.log('startRecording called', { userLocation }); if (!userLocation) { AudioLogger.error('No GPS location available'); - alert('Please wait for GPS location before recording'); + showAlert('Please wait for GPS location before recording'); return; } try { @@ -218,10 +257,10 @@ const AudioRecorder = ({ } // No fallback for Android: show error AudioLogger.log('Native plugin not available, cannot record on this platform.'); - alert('Native audio recording is not available on this platform.'); + showAlert('Native audio recording is not available on this platform.'); } catch (err) { AudioLogger.error('Failed to start native recording', err); - alert('Failed to start recording: ' + (err?.message || err)); + showAlert('Failed to start recording: ' + (err?.message || err)); } }; @@ -239,7 +278,11 @@ const AudioRecorder = ({ const breadcrumbSession = breadcrumbService.stopTracking(); console.log('Breadcrumb session completed:', breadcrumbSession); - if (result?.value?.recordDataBase64) { + if (result?.value?.path) { + setNativeRecordingPath(result.value.path); + setAudioBlob(null); + setShowMetadata(true); // Show metadata form after recording + } else if (result?.value?.recordDataBase64) { // Convert base64 to Blob const base64 = result.value.recordDataBase64; const mimeType = result.value.mimeType || 'audio/aac'; @@ -253,17 +296,12 @@ const AudioRecorder = ({ setAudioBlob(blob); setNativeRecordingPath(null); setShowMetadata(true); // Show metadata form after recording - } else if (result?.value?.path) { - // Fallback: if only path is provided, keep old behavior - setNativeRecordingPath(result.value.path); - setAudioBlob(null); - setShowMetadata(true); } else { - alert('No audio file was saved.'); + showAlert('No audio file was saved.'); } } catch (err) { AudioLogger.error('Failed to stop native recording', err); - alert('Failed to stop recording: ' + (err?.message || err)); + showAlert('Failed to stop recording: ' + (err?.message || err)); } return; } @@ -315,13 +353,13 @@ const AudioRecorder = ({ const handleSave = async () => { // Validate metadata first if (!validateMetadata()) { - alert('Please fill in all required fields: Filename, Description, and Temperature.'); + showAlert('Please fill in all required fields: Filename, Description, and Temperature.'); return; } // Validate that we have actual recording data if (!nativeRecordingPath && !audioBlob) { - alert('No recording data found. Please record audio before saving.'); + showAlert('No recording data found. Please record audio before saving.'); return; } @@ -350,7 +388,7 @@ const AudioRecorder = ({ } if (!hasValidAudio) { - alert('No valid audio data found. The recording may be incomplete or corrupted. Please try recording again.'); + showAlert('No valid audio data found. The recording may be incomplete or corrupted. Please try recording again.'); return; } @@ -362,12 +400,12 @@ const AudioRecorder = ({ // Minimum recording duration check if (duration < 1) { - alert('Recording is too short (less than 1 second). Please record for longer.'); + showAlert('Recording is too short (less than 1 second). Please record for longer.'); return; } if ((window as any).Capacitor?.isNativePlatform()) { - if (!nativeRecordingPath && !audioBlob) { - alert('No recording to save.'); + if (!nativeRecordingPath && !audioBlob) { + showAlert('No recording to save.'); return; } // Save metadata and file path or blob @@ -398,39 +436,33 @@ const AudioRecorder = ({ }; // --- Robust validation for required fields --- if (!recordingMetadata.location || typeof recordingMetadata.location.lat !== 'number' || !isFinite(recordingMetadata.location.lat) || typeof recordingMetadata.location.lng !== 'number' || !isFinite(recordingMetadata.location.lng)) { - alert('Recording location is missing or invalid. Please ensure GPS is available.'); + showAlert('Recording location is missing or invalid. Please ensure GPS is available.'); return; } if (!recordingMetadata.filename || !recordingMetadata.filename.trim()) { - alert('Filename is required.'); + showAlert('Filename is required.'); return; } if (!recordingMetadata.timestamp) { - alert('Timestamp is missing.'); + showAlert('Timestamp is missing.'); return; } if (!recordingMetadata.duration || !isFinite(recordingMetadata.duration) || recordingMetadata.duration <= 0) { - alert('Duration is missing or invalid.'); + showAlert('Duration is missing or invalid.'); return; } // --- End robust validation --- - // Prefer webm blobs for consistent playback/storage const recordingData = { audioPath: nativeRecordingPath, audioBlob: audioBlob, - metadata: { - ...recordingMetadata, - // If we have a blob, force filename extension to .webm for consistency - filename: audioBlob ? recordingMetadata.filename.replace(/\.[^.]+$/, '') + '.webm' : recordingMetadata.filename, - mimeType: audioBlob ? 'audio/webm' : recordingMetadata.mimeType - } + metadata: recordingMetadata }; onSaveRecording(recordingData); reset(); return; } // No fallback for Android: show error - alert('Native audio recording is not available on this platform.'); + showAlert('Native audio recording is not available on this platform.'); }; const reset = () => { @@ -522,9 +554,9 @@ const AudioRecorder = ({ onClick={async () => { const success = await AudioLogger.saveLogs(); if (success) { - alert('Audio logs saved successfully! Check your downloads folder.'); + showAlert('Audio logs saved successfully! Check your downloads folder.'); } else { - alert('Failed to save logs. Check console for details.'); + showAlert('Failed to save logs. Check console for details.'); } }} style={{ diff --git a/src/services/localStorageService.js b/src/services/localStorageService.js index d985f52..f149822 100644 --- a/src/services/localStorageService.js +++ b/src/services/localStorageService.js @@ -62,6 +62,13 @@ class LocalStorageService { } const recordings = this.getAllRecordings(); + + // Check recording limit for free version + const FREE_VERSION_LIMIT = 10; + if (recordings.length >= FREE_VERSION_LIMIT) { + throw new Error(`Free version limited to ${FREE_VERSION_LIMIT} recordings. Upgrade to Premium for unlimited recordings.`); + } + const recordingId = recording.uniqueId || `recording-${Date.now()}`; // Add timestamp if not present @@ -174,94 +181,6 @@ class LocalStorageService { } } - /** - * Get audio blob from localStorage or fallback to native file path via Capacitor Filesystem - * @param {string} recordingId - * @returns {Promise} - */ - async getAudioBlobFlexible(recordingId) { - // Try local stored blob first - const blob = await this.getAudioBlob(recordingId); - if (blob) return blob; - - // Fallback to native file path if available - try { - const recording = this.getRecording(recordingId); - if (!recording || !recording.audioPath) return null; - const { Filesystem } = await import('@capacitor/filesystem'); - const readRes = await Filesystem.readFile({ path: recording.audioPath }); - if (!readRes || !readRes.data) return null; - const mimeType = recording.mimeType || this.inferMimeTypeFromFilename(recording.filename) || 'audio/m4a'; - const base64 = readRes.data; - return this.base64ToBlob(base64, mimeType); - } catch (e) { - console.warn('getAudioBlobFlexible: native fallback failed', e); - return null; - } - } - - /** - * Convert base64 (no data URL prefix) to Blob - */ - base64ToBlob(base64, mimeType = 'application/octet-stream') { - const byteCharacters = atob(base64); - const byteArrays = []; - for (let offset = 0; offset < byteCharacters.length; offset += 1024) { - const slice = byteCharacters.slice(offset, offset + 1024); - const byteNumbers = new Array(slice.length); - for (let i = 0; i < slice.length; i++) { - byteNumbers[i] = slice.charCodeAt(i); - } - const byteArray = new Uint8Array(byteNumbers); - byteArrays.push(byteArray); - } - return new Blob(byteArrays, { type: mimeType }); - } - - /** - * Infer MIME type from filename extension - */ - inferMimeTypeFromFilename(filename) { - if (!filename) return null; - const lower = filename.toLowerCase(); - if (lower.endsWith('.m4a') || lower.endsWith('.aac')) return 'audio/m4a'; - if (lower.endsWith('.mp3')) return 'audio/mpeg'; - if (lower.endsWith('.ogg')) return 'audio/ogg'; - if (lower.endsWith('.wav')) return 'audio/wav'; - if (lower.endsWith('.webm')) return 'audio/webm'; - return null; - } - - /** - * Get a webview-safe playable URL for a recording using its native path - * @param {string} recordingId - * @returns {Promise} web URL suitable for