mirror of
https://github.com/usememos/memos.git
synced 2025-12-11 22:57:09 +08:00
Merge eb7f779bb8 into 1d582e0f39
This commit is contained in:
commit
a93754e6f4
6 changed files with 428 additions and 37 deletions
152
web/src/components/AudioPlayer.tsx
Normal file
152
web/src/components/AudioPlayer.tsx
Normal file
|
|
@ -0,0 +1,152 @@
|
|||
import { PauseIcon, PlayIcon } from "lucide-react";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
|
||||
interface Props {
|
||||
src: string;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
const AudioPlayer = ({ src, className = "" }: Props) => {
|
||||
const audioRef = useRef<HTMLAudioElement>(null);
|
||||
const [isPlaying, setIsPlaying] = useState(false);
|
||||
const [currentTime, setCurrentTime] = useState(0);
|
||||
const [duration, setDuration] = useState(0);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
const audio = audioRef.current;
|
||||
if (!audio) return;
|
||||
|
||||
// Reset state when src changes
|
||||
setIsPlaying(false);
|
||||
setCurrentTime(0);
|
||||
setDuration(0);
|
||||
setIsLoading(true);
|
||||
|
||||
const handleLoadedMetadata = () => {
|
||||
if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) {
|
||||
setDuration(audio.duration);
|
||||
}
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
const handleTimeUpdate = () => {
|
||||
setCurrentTime(audio.currentTime);
|
||||
if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) {
|
||||
setDuration((prev) => (prev === 0 ? audio.duration : prev));
|
||||
}
|
||||
};
|
||||
|
||||
const handleEnded = () => {
|
||||
setIsPlaying(false);
|
||||
setCurrentTime(0);
|
||||
};
|
||||
|
||||
const handleLoadedData = () => {
|
||||
// For files without proper duration in metadata,
|
||||
// try to get it after some data is loaded
|
||||
if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) {
|
||||
setDuration(audio.duration);
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
audio.addEventListener("loadedmetadata", handleLoadedMetadata);
|
||||
audio.addEventListener("loadeddata", handleLoadedData);
|
||||
audio.addEventListener("timeupdate", handleTimeUpdate);
|
||||
audio.addEventListener("ended", handleEnded);
|
||||
|
||||
return () => {
|
||||
audio.removeEventListener("loadedmetadata", handleLoadedMetadata);
|
||||
audio.removeEventListener("loadeddata", handleLoadedData);
|
||||
audio.removeEventListener("timeupdate", handleTimeUpdate);
|
||||
audio.removeEventListener("ended", handleEnded);
|
||||
};
|
||||
}, [src]);
|
||||
|
||||
useEffect(() => {
|
||||
const handlePlayAudio = (e: Event) => {
|
||||
const customEvent = e as CustomEvent;
|
||||
if (customEvent.detail !== audioRef.current && isPlaying) {
|
||||
audioRef.current?.pause();
|
||||
setIsPlaying(false);
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener("play-audio", handlePlayAudio);
|
||||
return () => {
|
||||
document.removeEventListener("play-audio", handlePlayAudio);
|
||||
};
|
||||
}, [isPlaying]);
|
||||
|
||||
const togglePlayPause = async () => {
|
||||
const audio = audioRef.current;
|
||||
if (!audio) return;
|
||||
|
||||
if (isPlaying) {
|
||||
audio.pause();
|
||||
setIsPlaying(false);
|
||||
} else {
|
||||
try {
|
||||
// Stop other audio players
|
||||
const event = new CustomEvent("play-audio", { detail: audio });
|
||||
document.dispatchEvent(event);
|
||||
|
||||
await audio.play();
|
||||
setIsPlaying(true);
|
||||
} catch (error) {
|
||||
console.error("Failed to play audio:", error);
|
||||
setIsPlaying(false);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleSeek = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const audio = audioRef.current;
|
||||
if (!audio) return;
|
||||
|
||||
const newTime = parseFloat(e.target.value);
|
||||
audio.currentTime = newTime;
|
||||
setCurrentTime(newTime);
|
||||
};
|
||||
|
||||
const formatTime = (time: number): string => {
|
||||
if (!isFinite(time) || isNaN(time)) return "0:00";
|
||||
|
||||
const minutes = Math.floor(time / 60);
|
||||
const seconds = Math.floor(time % 60);
|
||||
return `${minutes}:${seconds.toString().padStart(2, "0")}`;
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={`flex items-center gap-2 ${className}`}>
|
||||
<audio ref={audioRef} src={src} preload="metadata" />
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={togglePlayPause}
|
||||
disabled={isLoading}
|
||||
className="shrink-0 p-0 h-5 w-5 hover:bg-transparent text-muted-foreground hover:text-foreground"
|
||||
aria-label={isPlaying ? "Pause audio" : "Play audio"}
|
||||
>
|
||||
{isPlaying ? <PauseIcon className="w-5 h-5" /> : <PlayIcon className="w-5 h-5" />}
|
||||
</Button>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max={duration || 0}
|
||||
value={currentTime}
|
||||
onChange={handleSeek}
|
||||
disabled={isLoading || !duration}
|
||||
className="w-full min-w-[128px] h-1 rounded-md bg-secondary cursor-pointer appearance-none [&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:w-3 [&::-webkit-slider-thumb]:h-3 [&::-webkit-slider-thumb]:bg-primary [&::-webkit-slider-thumb]:rounded-full [&::-moz-range-thumb]:w-3 [&::-moz-range-thumb]:h-3 [&::-moz-range-thumb]:bg-primary [&::-moz-range-thumb]:border-none [&::-moz-range-thumb]:rounded-full"
|
||||
aria-label="Seek audio position"
|
||||
/>
|
||||
<span className="text-sm text-muted-foreground whitespace-nowrap">
|
||||
{formatTime(currentTime)} / {formatTime(duration)}
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default AudioPlayer;
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
import { Attachment } from "@/types/proto/api/v1/attachment_service";
|
||||
import { getAttachmentUrl, isMidiFile } from "@/utils/attachment";
|
||||
import AttachmentIcon from "./AttachmentIcon";
|
||||
import AudioPlayer from "./AudioPlayer";
|
||||
|
||||
interface Props {
|
||||
attachment: Attachment;
|
||||
|
|
@ -20,7 +21,7 @@ const MemoAttachment: React.FC<Props> = (props: Props) => {
|
|||
className={`w-auto flex flex-row justify-start items-center text-muted-foreground hover:text-foreground hover:bg-accent rounded px-2 py-1 transition-colors ${className}`}
|
||||
>
|
||||
{attachment.type.startsWith("audio") && !isMidiFile(attachment.type) ? (
|
||||
<audio src={attachmentUrl} controls></audio>
|
||||
<AudioPlayer src={attachmentUrl} />
|
||||
) : (
|
||||
<>
|
||||
<AttachmentIcon className="w-4! h-4! mr-1" attachment={attachment} />
|
||||
|
|
|
|||
|
|
@ -1,8 +1,21 @@
|
|||
import { LatLng } from "leaflet";
|
||||
import { uniqBy } from "lodash-es";
|
||||
import { FileIcon, LinkIcon, LoaderIcon, MapPinIcon, Maximize2Icon, MoreHorizontalIcon, PlusIcon } from "lucide-react";
|
||||
import {
|
||||
FileIcon,
|
||||
LinkIcon,
|
||||
LoaderIcon,
|
||||
MapPinIcon,
|
||||
Maximize2Icon,
|
||||
MicIcon,
|
||||
MoreHorizontalIcon,
|
||||
PauseIcon,
|
||||
PlayIcon,
|
||||
PlusIcon,
|
||||
XIcon,
|
||||
} from "lucide-react";
|
||||
import { observer } from "mobx-react-lite";
|
||||
import { useContext, useState } from "react";
|
||||
import { toast } from "react-hot-toast";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
DropdownMenu,
|
||||
|
|
@ -13,12 +26,14 @@ import {
|
|||
DropdownMenuSubTrigger,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/ui/dropdown-menu";
|
||||
import { attachmentStore } from "@/store";
|
||||
import { Attachment } from "@/types/proto/api/v1/attachment_service";
|
||||
import { Location, MemoRelation } from "@/types/proto/api/v1/memo_service";
|
||||
import { useTranslate } from "@/utils/i18n";
|
||||
import { MemoEditorContext } from "../types";
|
||||
import { LinkMemoDialog } from "./InsertMenu/LinkMemoDialog";
|
||||
import { LocationDialog } from "./InsertMenu/LocationDialog";
|
||||
import { useAudioRecorder } from "./InsertMenu/useAudioRecorder";
|
||||
import { useFileUpload } from "./InsertMenu/useFileUpload";
|
||||
import { useLinkMemo } from "./InsertMenu/useLinkMemo";
|
||||
import { useLocation } from "./InsertMenu/useLocation";
|
||||
|
|
@ -52,6 +67,7 @@ const InsertMenu = observer((props: Props) => {
|
|||
});
|
||||
|
||||
const location = useLocation(props.location);
|
||||
const audioRecorder = useAudioRecorder();
|
||||
|
||||
const isUploading = uploadingFlag || props.isUploading;
|
||||
|
||||
|
|
@ -112,8 +128,67 @@ const InsertMenu = observer((props: Props) => {
|
|||
});
|
||||
};
|
||||
|
||||
const handleStopRecording = async () => {
|
||||
try {
|
||||
const blob = await audioRecorder.stopRecording();
|
||||
const filename = `recording-${Date.now()}.webm`;
|
||||
const file = new File([blob], filename, { type: "audio/webm" });
|
||||
const { name, size, type } = file;
|
||||
const buffer = new Uint8Array(await file.arrayBuffer());
|
||||
|
||||
const attachment = await attachmentStore.createAttachment({
|
||||
attachment: Attachment.fromPartial({
|
||||
filename: name,
|
||||
size,
|
||||
type,
|
||||
content: buffer,
|
||||
}),
|
||||
attachmentId: "",
|
||||
});
|
||||
context.setAttachmentList([...context.attachmentList, attachment]);
|
||||
} catch (error: any) {
|
||||
console.error("Failed to upload audio recording:", error);
|
||||
toast.error(error.message || "Failed to upload audio recording");
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
{audioRecorder.isRecording ? (
|
||||
<div className="flex flex-row items-center gap-2 mr-2">
|
||||
<div className="flex flex-row items-center px-2 py-1 rounded-md bg-red-50 text-red-600 border border-red-200">
|
||||
<div className={`w-2 h-2 rounded-full bg-red-500 mr-2 ${!audioRecorder.isPaused ? "animate-pulse" : ""}`} />
|
||||
<span className="font-mono text-sm">{new Date(audioRecorder.recordingTime * 1000).toISOString().substring(14, 19)}</span>
|
||||
</div>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
onClick={audioRecorder.togglePause}
|
||||
className="shrink-0"
|
||||
aria-label={audioRecorder.isPaused ? "Resume recording" : "Pause recording"}
|
||||
>
|
||||
{audioRecorder.isPaused ? <PlayIcon className="w-4 h-4" /> : <PauseIcon className="w-4 h-4" />}
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
onClick={handleStopRecording}
|
||||
className="shrink-0 text-red-600 hover:text-red-700"
|
||||
aria-label="Stop and save recording"
|
||||
>
|
||||
<div className="w-3 h-3 bg-current rounded-sm" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={audioRecorder.cancelRecording}
|
||||
className="shrink-0 text-red-600 hover:text-red-700"
|
||||
aria-label="Cancel recording"
|
||||
>
|
||||
<XIcon className="w-4 h-4" />
|
||||
</Button>
|
||||
</div>
|
||||
) : (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant="outline" size="icon" className="shadow-none" disabled={isUploading}>
|
||||
|
|
@ -133,6 +208,10 @@ const InsertMenu = observer((props: Props) => {
|
|||
<MapPinIcon className="w-4 h-4" />
|
||||
{t("tooltip.select-location")}
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem onClick={audioRecorder.startRecording}>
|
||||
<MicIcon className="w-4 h-4" />
|
||||
{t("tooltip.record-audio")}
|
||||
</DropdownMenuItem>
|
||||
{/* View submenu with Focus Mode */}
|
||||
<DropdownMenuSub>
|
||||
<DropdownMenuSubTrigger>
|
||||
|
|
@ -149,6 +228,7 @@ const InsertMenu = observer((props: Props) => {
|
|||
</DropdownMenuSub>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)}
|
||||
|
||||
{/* Hidden file input */}
|
||||
<input
|
||||
|
|
|
|||
|
|
@ -0,0 +1,156 @@
|
|||
import { useEffect, useRef, useState } from "react";
|
||||
|
||||
export const useAudioRecorder = () => {
|
||||
const [isRecording, setIsRecording] = useState(false);
|
||||
const [isPaused, setIsPaused] = useState(false);
|
||||
const [recordingTime, setRecordingTime] = useState(0);
|
||||
|
||||
const chunksRef = useRef<Blob[]>([]);
|
||||
const timerRef = useRef<number | null>(null);
|
||||
const durationRef = useRef<number>(0);
|
||||
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (mediaRecorderRef.current) {
|
||||
mediaRecorderRef.current.stream.getTracks().forEach((track) => track.stop());
|
||||
mediaRecorderRef.current = null;
|
||||
}
|
||||
if (timerRef.current) {
|
||||
clearInterval(timerRef.current);
|
||||
timerRef.current = null;
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const startRecording = async () => {
|
||||
let stream: MediaStream | null = null;
|
||||
try {
|
||||
stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
const recorder = new MediaRecorder(stream);
|
||||
chunksRef.current = [];
|
||||
durationRef.current = 0;
|
||||
setRecordingTime(0);
|
||||
|
||||
recorder.ondataavailable = (e: BlobEvent) => {
|
||||
if (e.data.size > 0) {
|
||||
chunksRef.current.push(e.data);
|
||||
}
|
||||
};
|
||||
|
||||
recorder.start();
|
||||
mediaRecorderRef.current = recorder;
|
||||
|
||||
setIsRecording(true);
|
||||
setIsPaused(false);
|
||||
|
||||
timerRef.current = window.setInterval(() => {
|
||||
if (!mediaRecorderRef.current || mediaRecorderRef.current.state === "paused") {
|
||||
return;
|
||||
}
|
||||
durationRef.current += 1;
|
||||
setRecordingTime(durationRef.current);
|
||||
}, 1000);
|
||||
} catch (error) {
|
||||
if (stream) {
|
||||
stream.getTracks().forEach((track) => track.stop());
|
||||
}
|
||||
console.error("Error accessing microphone:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const stopRecording = (): Promise<Blob> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Cleanup timer immediately to prevent further updates
|
||||
if (timerRef.current) {
|
||||
clearInterval(timerRef.current);
|
||||
timerRef.current = null;
|
||||
}
|
||||
|
||||
const recorder = mediaRecorderRef.current;
|
||||
if (!recorder) {
|
||||
reject(new Error("No active recording"));
|
||||
return;
|
||||
}
|
||||
|
||||
let isResolved = false;
|
||||
|
||||
const finalize = () => {
|
||||
if (isResolved) return;
|
||||
isResolved = true;
|
||||
|
||||
const blob = new Blob(chunksRef.current, { type: "audio/webm" });
|
||||
chunksRef.current = [];
|
||||
durationRef.current = 0;
|
||||
|
||||
setIsRecording(false);
|
||||
setIsPaused(false);
|
||||
setRecordingTime(0);
|
||||
|
||||
mediaRecorderRef.current = null;
|
||||
|
||||
resolve(blob);
|
||||
};
|
||||
|
||||
recorder.onstop = finalize;
|
||||
|
||||
try {
|
||||
recorder.stop();
|
||||
recorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
|
||||
} catch (error) {
|
||||
// Ignore errors during stop, as we'll finalize anyway
|
||||
console.warn("Error stopping media recorder:", error);
|
||||
}
|
||||
|
||||
// Safety timeout in case onstop never fires
|
||||
setTimeout(finalize, 1000);
|
||||
});
|
||||
};
|
||||
|
||||
const cancelRecording = () => {
|
||||
// Cleanup timer immediately
|
||||
if (timerRef.current) {
|
||||
clearInterval(timerRef.current);
|
||||
timerRef.current = null;
|
||||
}
|
||||
|
||||
const recorder = mediaRecorderRef.current;
|
||||
if (recorder) {
|
||||
recorder.stop();
|
||||
recorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop());
|
||||
}
|
||||
|
||||
chunksRef.current = [];
|
||||
durationRef.current = 0;
|
||||
|
||||
setIsRecording(false);
|
||||
setIsPaused(false);
|
||||
setRecordingTime(0);
|
||||
|
||||
mediaRecorderRef.current = null;
|
||||
};
|
||||
|
||||
const togglePause = () => {
|
||||
const recorder = mediaRecorderRef.current;
|
||||
if (!recorder) return;
|
||||
|
||||
if (isPaused) {
|
||||
recorder.resume();
|
||||
setIsPaused(false);
|
||||
} else {
|
||||
recorder.pause();
|
||||
setIsPaused(true);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
isRecording,
|
||||
isPaused,
|
||||
recordingTime,
|
||||
startRecording,
|
||||
stopRecording,
|
||||
cancelRecording,
|
||||
togglePause,
|
||||
};
|
||||
};
|
||||
|
|
@ -10,6 +10,7 @@
|
|||
* {
|
||||
@apply border-border outline-none ring-0;
|
||||
}
|
||||
|
||||
body {
|
||||
@apply bg-background text-foreground;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -479,6 +479,7 @@
|
|||
"select-location": "Location",
|
||||
"select-visibility": "Visibility",
|
||||
"tags": "Tags",
|
||||
"upload-attachment": "Upload Attachment(s)"
|
||||
"upload-attachment": "Upload Attachment(s)",
|
||||
"record-audio": "Record Audio"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue