diff --git a/web/src/components/AudioPlayer.tsx b/web/src/components/AudioPlayer.tsx index fdc6031dd..a0d9277dc 100644 --- a/web/src/components/AudioPlayer.tsx +++ b/web/src/components/AudioPlayer.tsx @@ -3,120 +3,120 @@ import { useEffect, useRef, useState } from "react"; import { Button } from "@/components/ui/button"; interface Props { - src: string; - className?: string; + src: string; + className?: string; } const AudioPlayer = ({ src, className = "" }: Props) => { - const audioRef = useRef(null); - const [isPlaying, setIsPlaying] = useState(false); - const [currentTime, setCurrentTime] = useState(0); - const [duration, setDuration] = useState(0); - const [isLoading, setIsLoading] = useState(true); + const audioRef = useRef(null); + const [isPlaying, setIsPlaying] = useState(false); + const [currentTime, setCurrentTime] = useState(0); + const [duration, setDuration] = useState(0); + const [isLoading, setIsLoading] = useState(true); - useEffect(() => { - const audio = audioRef.current; - if (!audio) return; + useEffect(() => { + const audio = audioRef.current; + if (!audio) return; - const handleLoadedMetadata = () => { - if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { - setDuration(audio.duration); - } - setIsLoading(false); - }; - - const handleTimeUpdate = () => { - setCurrentTime(audio.currentTime); - if (duration === 0 && audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { - setDuration(audio.duration); - } - }; - - const handleEnded = () => { - setIsPlaying(false); - setCurrentTime(0); - }; - - const handleLoadedData = () => { - // For files without proper duration in metadata, - // try to get it after some data is loaded - if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { - setDuration(audio.duration); - setIsLoading(false); - } - }; - - audio.addEventListener("loadedmetadata", handleLoadedMetadata); - audio.addEventListener("loadeddata", handleLoadedData); - audio.addEventListener("timeupdate", handleTimeUpdate); - audio.addEventListener("ended", handleEnded); - - return () => { - audio.removeEventListener("loadedmetadata", handleLoadedMetadata); - audio.removeEventListener("loadeddata", handleLoadedData); - audio.removeEventListener("timeupdate", handleTimeUpdate); - audio.removeEventListener("ended", handleEnded); - }; - }, []); - - const togglePlayPause = () => { - const audio = audioRef.current; - if (!audio) return; - - if (isPlaying) { - audio.pause(); - } else { - audio.play(); - } - setIsPlaying(!isPlaying); + const handleLoadedMetadata = () => { + if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { + setDuration(audio.duration); + } + setIsLoading(false); }; - const handleSeek = (e: React.ChangeEvent) => { - const audio = audioRef.current; - if (!audio) return; - - const newTime = parseFloat(e.target.value); - audio.currentTime = newTime; - setCurrentTime(newTime); + const handleTimeUpdate = () => { + setCurrentTime(audio.currentTime); + if (duration === 0 && audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { + setDuration(audio.duration); + } }; - const formatTime = (time: number): string => { - if (!isFinite(time) || isNaN(time)) return "0:00"; - - const minutes = Math.floor(time / 60); - const seconds = Math.floor(time % 60); - return `${minutes}:${seconds.toString().padStart(2, "0")}`; + const handleEnded = () => { + setIsPlaying(false); + setCurrentTime(0); }; - return ( -
-
- ); + audio.addEventListener("loadedmetadata", handleLoadedMetadata); + audio.addEventListener("loadeddata", handleLoadedData); + audio.addEventListener("timeupdate", handleTimeUpdate); + audio.addEventListener("ended", handleEnded); + + return () => { + audio.removeEventListener("loadedmetadata", handleLoadedMetadata); + audio.removeEventListener("loadeddata", handleLoadedData); + audio.removeEventListener("timeupdate", handleTimeUpdate); + audio.removeEventListener("ended", handleEnded); + }; + }, []); + + const togglePlayPause = () => { + const audio = audioRef.current; + if (!audio) return; + + if (isPlaying) { + audio.pause(); + } else { + audio.play(); + } + setIsPlaying(!isPlaying); + }; + + const handleSeek = (e: React.ChangeEvent) => { + const audio = audioRef.current; + if (!audio) return; + + const newTime = parseFloat(e.target.value); + audio.currentTime = newTime; + setCurrentTime(newTime); + }; + + const formatTime = (time: number): string => { + if (!isFinite(time) || isNaN(time)) return "0:00"; + + const minutes = Math.floor(time / 60); + const seconds = Math.floor(time % 60); + return `${minutes}:${seconds.toString().padStart(2, "0")}`; + }; + + return ( +
+
+ ); }; -export default AudioPlayer; \ No newline at end of file +export default AudioPlayer; diff --git a/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts b/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts index d8f671d7f..9888bf76c 100644 --- a/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts +++ b/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts @@ -1,123 +1,123 @@ import { useRef, useState } from "react"; interface AudioRecorderState { - isRecording: boolean; - isPaused: boolean; - recordingTime: number; - mediaRecorder: MediaRecorder | null; + isRecording: boolean; + isPaused: boolean; + recordingTime: number; + mediaRecorder: MediaRecorder | null; } export const useAudioRecorder = () => { - const [state, setState] = useState({ + const [state, setState] = useState({ + isRecording: false, + isPaused: false, + recordingTime: 0, + mediaRecorder: null, + }); + const chunksRef = useRef([]); + const timerRef = useRef(null); + + const startRecording = async () => { + try { + const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); + const mediaRecorder = new MediaRecorder(stream); + chunksRef.current = []; + + mediaRecorder.ondataavailable = (e: BlobEvent) => { + if (e.data.size > 0) { + chunksRef.current.push(e.data); + } + }; + + mediaRecorder.start(); + setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder })); + + timerRef.current = window.setInterval(() => { + setState((prev) => { + if (prev.isPaused) { + return prev; + } + return { ...prev, recordingTime: prev.recordingTime + 1 }; + }); + }, 1000); + } catch (error) { + console.error("Error accessing microphone:", error); + throw error; + } + }; + + const stopRecording = (): Promise => { + return new Promise((resolve, reject) => { + const { mediaRecorder } = state; + if (!mediaRecorder) { + reject(new Error("No active recording")); + return; + } + + mediaRecorder.onstop = () => { + const blob = new Blob(chunksRef.current, { type: "audio/webm" }); + chunksRef.current = []; + resolve(blob); + }; + + mediaRecorder.stop(); + mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); + + if (timerRef.current) { + clearInterval(timerRef.current); + timerRef.current = null; + } + + setState({ isRecording: false, isPaused: false, recordingTime: 0, mediaRecorder: null, + }); }); - const chunksRef = useRef([]); - const timerRef = useRef(null); + }; - const startRecording = async () => { - try { - const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); - const mediaRecorder = new MediaRecorder(stream); - chunksRef.current = []; + const cancelRecording = () => { + const { mediaRecorder } = state; + if (mediaRecorder) { + mediaRecorder.stop(); + mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); + } - mediaRecorder.ondataavailable = (e: BlobEvent) => { - if (e.data.size > 0) { - chunksRef.current.push(e.data); - } - }; + if (timerRef.current) { + clearInterval(timerRef.current); + timerRef.current = null; + } - mediaRecorder.start(); - setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder })); + chunksRef.current = []; + setState({ + isRecording: false, + isPaused: false, + recordingTime: 0, + mediaRecorder: null, + }); + }; - timerRef.current = window.setInterval(() => { - setState((prev) => { - if (prev.isPaused) { - return prev; - } - return { ...prev, recordingTime: prev.recordingTime + 1 }; - }); - }, 1000); - } catch (error) { - console.error("Error accessing microphone:", error); - throw error; - } - }; + const togglePause = () => { + const { mediaRecorder, isPaused } = state; + if (!mediaRecorder) return; - const stopRecording = (): Promise => { - return new Promise((resolve, reject) => { - const { mediaRecorder } = state; - if (!mediaRecorder) { - reject(new Error("No active recording")); - return; - } + if (isPaused) { + mediaRecorder.resume(); + } else { + mediaRecorder.pause(); + } - mediaRecorder.onstop = () => { - const blob = new Blob(chunksRef.current, { type: "audio/webm" }); - chunksRef.current = []; - resolve(blob); - }; + setState((prev) => ({ ...prev, isPaused: !prev.isPaused })); + }; - mediaRecorder.stop(); - mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); - - if (timerRef.current) { - clearInterval(timerRef.current); - timerRef.current = null; - } - - setState({ - isRecording: false, - isPaused: false, - recordingTime: 0, - mediaRecorder: null, - }); - }); - }; - - const cancelRecording = () => { - const { mediaRecorder } = state; - if (mediaRecorder) { - mediaRecorder.stop(); - mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); - } - - if (timerRef.current) { - clearInterval(timerRef.current); - timerRef.current = null; - } - - chunksRef.current = []; - setState({ - isRecording: false, - isPaused: false, - recordingTime: 0, - mediaRecorder: null, - }); - }; - - const togglePause = () => { - const { mediaRecorder, isPaused } = state; - if (!mediaRecorder) return; - - if (isPaused) { - mediaRecorder.resume(); - } else { - mediaRecorder.pause(); - } - - setState((prev) => ({ ...prev, isPaused: !prev.isPaused })); - }; - - return { - isRecording: state.isRecording, - isPaused: state.isPaused, - recordingTime: state.recordingTime, - startRecording, - stopRecording, - cancelRecording, - togglePause, - }; + return { + isRecording: state.isRecording, + isPaused: state.isPaused, + recordingTime: state.recordingTime, + startRecording, + stopRecording, + cancelRecording, + togglePause, + }; };