mirror of
https://github.com/usememos/memos.git
synced 2025-12-11 14:46:03 +08:00
Lint fix (sorry)
This commit is contained in:
parent
063128e75f
commit
02b36d5bae
2 changed files with 207 additions and 207 deletions
|
|
@ -3,120 +3,120 @@ import { useEffect, useRef, useState } from "react";
|
|||
import { Button } from "@/components/ui/button";
|
||||
|
||||
interface Props {
|
||||
src: string;
|
||||
className?: string;
|
||||
src: string;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
const AudioPlayer = ({ src, className = "" }: Props) => {
|
||||
const audioRef = useRef<HTMLAudioElement>(null);
|
||||
const [isPlaying, setIsPlaying] = useState(false);
|
||||
const [currentTime, setCurrentTime] = useState(0);
|
||||
const [duration, setDuration] = useState(0);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const audioRef = useRef<HTMLAudioElement>(null);
|
||||
const [isPlaying, setIsPlaying] = useState(false);
|
||||
const [currentTime, setCurrentTime] = useState(0);
|
||||
const [duration, setDuration] = useState(0);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
const audio = audioRef.current;
|
||||
if (!audio) return;
|
||||
useEffect(() => {
|
||||
const audio = audioRef.current;
|
||||
if (!audio) return;
|
||||
|
||||
const handleLoadedMetadata = () => {
|
||||
if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) {
|
||||
setDuration(audio.duration);
|
||||
}
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
const handleTimeUpdate = () => {
|
||||
setCurrentTime(audio.currentTime);
|
||||
if (duration === 0 && audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) {
|
||||
setDuration(audio.duration);
|
||||
}
|
||||
};
|
||||
|
||||
const handleEnded = () => {
|
||||
setIsPlaying(false);
|
||||
setCurrentTime(0);
|
||||
};
|
||||
|
||||
const handleLoadedData = () => {
|
||||
// For files without proper duration in metadata,
|
||||
// try to get it after some data is loaded
|
||||
if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) {
|
||||
setDuration(audio.duration);
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
audio.addEventListener("loadedmetadata", handleLoadedMetadata);
|
||||
audio.addEventListener("loadeddata", handleLoadedData);
|
||||
audio.addEventListener("timeupdate", handleTimeUpdate);
|
||||
audio.addEventListener("ended", handleEnded);
|
||||
|
||||
return () => {
|
||||
audio.removeEventListener("loadedmetadata", handleLoadedMetadata);
|
||||
audio.removeEventListener("loadeddata", handleLoadedData);
|
||||
audio.removeEventListener("timeupdate", handleTimeUpdate);
|
||||
audio.removeEventListener("ended", handleEnded);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const togglePlayPause = () => {
|
||||
const audio = audioRef.current;
|
||||
if (!audio) return;
|
||||
|
||||
if (isPlaying) {
|
||||
audio.pause();
|
||||
} else {
|
||||
audio.play();
|
||||
}
|
||||
setIsPlaying(!isPlaying);
|
||||
const handleLoadedMetadata = () => {
|
||||
if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) {
|
||||
setDuration(audio.duration);
|
||||
}
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
const handleSeek = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const audio = audioRef.current;
|
||||
if (!audio) return;
|
||||
|
||||
const newTime = parseFloat(e.target.value);
|
||||
audio.currentTime = newTime;
|
||||
setCurrentTime(newTime);
|
||||
const handleTimeUpdate = () => {
|
||||
setCurrentTime(audio.currentTime);
|
||||
if (duration === 0 && audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) {
|
||||
setDuration(audio.duration);
|
||||
}
|
||||
};
|
||||
|
||||
const formatTime = (time: number): string => {
|
||||
if (!isFinite(time) || isNaN(time)) return "0:00";
|
||||
|
||||
const minutes = Math.floor(time / 60);
|
||||
const seconds = Math.floor(time % 60);
|
||||
return `${minutes}:${seconds.toString().padStart(2, "0")}`;
|
||||
const handleEnded = () => {
|
||||
setIsPlaying(false);
|
||||
setCurrentTime(0);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={`flex items-center gap-2 ${className}`}>
|
||||
<audio ref={audioRef} src={src} preload="metadata" />
|
||||
const handleLoadedData = () => {
|
||||
// For files without proper duration in metadata,
|
||||
// try to get it after some data is loaded
|
||||
if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) {
|
||||
setDuration(audio.duration);
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
<div className="flex flex-row items-center px-2 py-1 rounded-md text-secondary-foreground gap-2">
|
||||
<span className="font-mono text-sm">
|
||||
{formatTime(currentTime)} / {formatTime(duration)}
|
||||
</span>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={togglePlayPause}
|
||||
disabled={isLoading}
|
||||
className="shrink-0 h-auto w-auto p-0.5 hover:bg-background/50"
|
||||
>
|
||||
{isPlaying ? <PauseIcon className="w-4 h-4" /> : <PlayIcon className="w-4 h-4" />}
|
||||
</Button>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max={duration || 0}
|
||||
value={currentTime}
|
||||
onChange={handleSeek}
|
||||
disabled={isLoading || !duration}
|
||||
className="flex-1 h-1 bg-muted hover:bg-background/50 hover:bg-background/50 rounded-lg appearance-none cursor-pointer disabled:opacity-50 [&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:w-3 [&::-webkit-slider-thumb]:h-3 [&::-webkit-slider-thumb]:rounded-full [&::-webkit-slider-thumb]:bg-primary [&::-moz-range-thumb]:w-3 [&::-moz-range-thumb]:h-3 [&::-moz-range-thumb]:rounded-full [&::-moz-range-thumb]:bg-primary [&::-moz-range-thumb]:border-0"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
audio.addEventListener("loadedmetadata", handleLoadedMetadata);
|
||||
audio.addEventListener("loadeddata", handleLoadedData);
|
||||
audio.addEventListener("timeupdate", handleTimeUpdate);
|
||||
audio.addEventListener("ended", handleEnded);
|
||||
|
||||
return () => {
|
||||
audio.removeEventListener("loadedmetadata", handleLoadedMetadata);
|
||||
audio.removeEventListener("loadeddata", handleLoadedData);
|
||||
audio.removeEventListener("timeupdate", handleTimeUpdate);
|
||||
audio.removeEventListener("ended", handleEnded);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const togglePlayPause = () => {
|
||||
const audio = audioRef.current;
|
||||
if (!audio) return;
|
||||
|
||||
if (isPlaying) {
|
||||
audio.pause();
|
||||
} else {
|
||||
audio.play();
|
||||
}
|
||||
setIsPlaying(!isPlaying);
|
||||
};
|
||||
|
||||
const handleSeek = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const audio = audioRef.current;
|
||||
if (!audio) return;
|
||||
|
||||
const newTime = parseFloat(e.target.value);
|
||||
audio.currentTime = newTime;
|
||||
setCurrentTime(newTime);
|
||||
};
|
||||
|
||||
const formatTime = (time: number): string => {
|
||||
if (!isFinite(time) || isNaN(time)) return "0:00";
|
||||
|
||||
const minutes = Math.floor(time / 60);
|
||||
const seconds = Math.floor(time % 60);
|
||||
return `${minutes}:${seconds.toString().padStart(2, "0")}`;
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={`flex items-center gap-2 ${className}`}>
|
||||
<audio ref={audioRef} src={src} preload="metadata" />
|
||||
|
||||
<div className="flex flex-row items-center px-2 py-1 rounded-md text-secondary-foreground gap-2">
|
||||
<span className="font-mono text-sm">
|
||||
{formatTime(currentTime)} / {formatTime(duration)}
|
||||
</span>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={togglePlayPause}
|
||||
disabled={isLoading}
|
||||
className="shrink-0 h-auto w-auto p-0.5 hover:bg-background/50"
|
||||
>
|
||||
{isPlaying ? <PauseIcon className="w-4 h-4" /> : <PlayIcon className="w-4 h-4" />}
|
||||
</Button>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max={duration || 0}
|
||||
value={currentTime}
|
||||
onChange={handleSeek}
|
||||
disabled={isLoading || !duration}
|
||||
className="flex-1 h-1 bg-muted hover:bg-background/50 hover:bg-background/50 rounded-lg appearance-none cursor-pointer disabled:opacity-50 [&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:w-3 [&::-webkit-slider-thumb]:h-3 [&::-webkit-slider-thumb]:rounded-full [&::-webkit-slider-thumb]:bg-primary [&::-moz-range-thumb]:w-3 [&::-moz-range-thumb]:h-3 [&::-moz-range-thumb]:rounded-full [&::-moz-range-thumb]:bg-primary [&::-moz-range-thumb]:border-0"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default AudioPlayer;
|
||||
export default AudioPlayer;
|
||||
|
|
|
|||
|
|
@ -1,123 +1,123 @@
|
|||
import { useRef, useState } from "react";
|
||||
|
||||
interface AudioRecorderState {
|
||||
isRecording: boolean;
|
||||
isPaused: boolean;
|
||||
recordingTime: number;
|
||||
mediaRecorder: MediaRecorder | null;
|
||||
isRecording: boolean;
|
||||
isPaused: boolean;
|
||||
recordingTime: number;
|
||||
mediaRecorder: MediaRecorder | null;
|
||||
}
|
||||
|
||||
export const useAudioRecorder = () => {
|
||||
const [state, setState] = useState<AudioRecorderState>({
|
||||
const [state, setState] = useState<AudioRecorderState>({
|
||||
isRecording: false,
|
||||
isPaused: false,
|
||||
recordingTime: 0,
|
||||
mediaRecorder: null,
|
||||
});
|
||||
const chunksRef = useRef<Blob[]>([]);
|
||||
const timerRef = useRef<number | null>(null);
|
||||
|
||||
const startRecording = async () => {
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
const mediaRecorder = new MediaRecorder(stream);
|
||||
chunksRef.current = [];
|
||||
|
||||
mediaRecorder.ondataavailable = (e: BlobEvent) => {
|
||||
if (e.data.size > 0) {
|
||||
chunksRef.current.push(e.data);
|
||||
}
|
||||
};
|
||||
|
||||
mediaRecorder.start();
|
||||
setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder }));
|
||||
|
||||
timerRef.current = window.setInterval(() => {
|
||||
setState((prev) => {
|
||||
if (prev.isPaused) {
|
||||
return prev;
|
||||
}
|
||||
return { ...prev, recordingTime: prev.recordingTime + 1 };
|
||||
});
|
||||
}, 1000);
|
||||
} catch (error) {
|
||||
console.error("Error accessing microphone:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const stopRecording = (): Promise<Blob> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { mediaRecorder } = state;
|
||||
if (!mediaRecorder) {
|
||||
reject(new Error("No active recording"));
|
||||
return;
|
||||
}
|
||||
|
||||
mediaRecorder.onstop = () => {
|
||||
const blob = new Blob(chunksRef.current, { type: "audio/webm" });
|
||||
chunksRef.current = [];
|
||||
resolve(blob);
|
||||
};
|
||||
|
||||
mediaRecorder.stop();
|
||||
mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
|
||||
|
||||
if (timerRef.current) {
|
||||
clearInterval(timerRef.current);
|
||||
timerRef.current = null;
|
||||
}
|
||||
|
||||
setState({
|
||||
isRecording: false,
|
||||
isPaused: false,
|
||||
recordingTime: 0,
|
||||
mediaRecorder: null,
|
||||
});
|
||||
});
|
||||
const chunksRef = useRef<Blob[]>([]);
|
||||
const timerRef = useRef<number | null>(null);
|
||||
};
|
||||
|
||||
const startRecording = async () => {
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
const mediaRecorder = new MediaRecorder(stream);
|
||||
chunksRef.current = [];
|
||||
const cancelRecording = () => {
|
||||
const { mediaRecorder } = state;
|
||||
if (mediaRecorder) {
|
||||
mediaRecorder.stop();
|
||||
mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
|
||||
}
|
||||
|
||||
mediaRecorder.ondataavailable = (e: BlobEvent) => {
|
||||
if (e.data.size > 0) {
|
||||
chunksRef.current.push(e.data);
|
||||
}
|
||||
};
|
||||
if (timerRef.current) {
|
||||
clearInterval(timerRef.current);
|
||||
timerRef.current = null;
|
||||
}
|
||||
|
||||
mediaRecorder.start();
|
||||
setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder }));
|
||||
chunksRef.current = [];
|
||||
setState({
|
||||
isRecording: false,
|
||||
isPaused: false,
|
||||
recordingTime: 0,
|
||||
mediaRecorder: null,
|
||||
});
|
||||
};
|
||||
|
||||
timerRef.current = window.setInterval(() => {
|
||||
setState((prev) => {
|
||||
if (prev.isPaused) {
|
||||
return prev;
|
||||
}
|
||||
return { ...prev, recordingTime: prev.recordingTime + 1 };
|
||||
});
|
||||
}, 1000);
|
||||
} catch (error) {
|
||||
console.error("Error accessing microphone:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
const togglePause = () => {
|
||||
const { mediaRecorder, isPaused } = state;
|
||||
if (!mediaRecorder) return;
|
||||
|
||||
const stopRecording = (): Promise<Blob> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { mediaRecorder } = state;
|
||||
if (!mediaRecorder) {
|
||||
reject(new Error("No active recording"));
|
||||
return;
|
||||
}
|
||||
if (isPaused) {
|
||||
mediaRecorder.resume();
|
||||
} else {
|
||||
mediaRecorder.pause();
|
||||
}
|
||||
|
||||
mediaRecorder.onstop = () => {
|
||||
const blob = new Blob(chunksRef.current, { type: "audio/webm" });
|
||||
chunksRef.current = [];
|
||||
resolve(blob);
|
||||
};
|
||||
setState((prev) => ({ ...prev, isPaused: !prev.isPaused }));
|
||||
};
|
||||
|
||||
mediaRecorder.stop();
|
||||
mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
|
||||
|
||||
if (timerRef.current) {
|
||||
clearInterval(timerRef.current);
|
||||
timerRef.current = null;
|
||||
}
|
||||
|
||||
setState({
|
||||
isRecording: false,
|
||||
isPaused: false,
|
||||
recordingTime: 0,
|
||||
mediaRecorder: null,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const cancelRecording = () => {
|
||||
const { mediaRecorder } = state;
|
||||
if (mediaRecorder) {
|
||||
mediaRecorder.stop();
|
||||
mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
|
||||
}
|
||||
|
||||
if (timerRef.current) {
|
||||
clearInterval(timerRef.current);
|
||||
timerRef.current = null;
|
||||
}
|
||||
|
||||
chunksRef.current = [];
|
||||
setState({
|
||||
isRecording: false,
|
||||
isPaused: false,
|
||||
recordingTime: 0,
|
||||
mediaRecorder: null,
|
||||
});
|
||||
};
|
||||
|
||||
const togglePause = () => {
|
||||
const { mediaRecorder, isPaused } = state;
|
||||
if (!mediaRecorder) return;
|
||||
|
||||
if (isPaused) {
|
||||
mediaRecorder.resume();
|
||||
} else {
|
||||
mediaRecorder.pause();
|
||||
}
|
||||
|
||||
setState((prev) => ({ ...prev, isPaused: !prev.isPaused }));
|
||||
};
|
||||
|
||||
return {
|
||||
isRecording: state.isRecording,
|
||||
isPaused: state.isPaused,
|
||||
recordingTime: state.recordingTime,
|
||||
startRecording,
|
||||
stopRecording,
|
||||
cancelRecording,
|
||||
togglePause,
|
||||
};
|
||||
return {
|
||||
isRecording: state.isRecording,
|
||||
isPaused: state.isPaused,
|
||||
recordingTime: state.recordingTime,
|
||||
startRecording,
|
||||
stopRecording,
|
||||
cancelRecording,
|
||||
togglePause,
|
||||
};
|
||||
};
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue