feat: add voice note renderers for kinds 1222/1244 (NIP-A0)

Implements rendering for voice messages and voice message replies:
- Kind 1222: Voice Message with audio player and optional waveform
- Kind 1244: Voice Message Reply with threading support (NIP-22)

Features:
- Custom audio player with play/pause controls
- Waveform visualization from imeta tags
- Progress bar with seek support
- Duration display from imeta or audio metadata
- Reply context display for kind 1244
This commit is contained in:
Claude
2025-12-21 11:21:01 +00:00
parent bf3d451068
commit 238ff4426c
3 changed files with 441 additions and 1 deletions

View File

@@ -0,0 +1,431 @@
import { useState, useRef, useEffect } from "react";
import { BaseEventContainer, type BaseEventProps } from "./BaseEventRenderer";
import { Mic, Play, Pause, Reply } from "lucide-react";
import { getNip10References } from "applesauce-core/helpers/threading";
import { useNostrEvent } from "@/hooks/useNostrEvent";
import { UserName } from "../UserName";
import { useGrimoire } from "@/core/state";
import { InlineReplySkeleton } from "@/components/ui/skeleton";
import { KindBadge } from "@/components/KindBadge";
import { getEventDisplayTitle } from "@/lib/event-title";
import type { NostrEvent } from "@/types/nostr";
import {
Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
import { RichText } from "../RichText";
import { cn } from "@/lib/utils";
/**
* Parse voice note metadata from imeta tags (NIP-92)
* Returns waveform data and duration if present
*/
function parseVoiceNoteMetadata(event: NostrEvent): {
waveform?: number[];
duration?: number;
} {
const result: { waveform?: number[]; duration?: number } = {};
for (const tag of event.tags) {
if (tag[0] !== "imeta") continue;
for (let i = 1; i < tag.length; i++) {
const parts = tag[i].split(" ");
if (parts.length < 2) continue;
const key = parts[0];
const value = parts.slice(1).join(" ");
if (key === "waveform") {
// Waveform is space-separated amplitude integers
result.waveform = value.split(" ").map((v) => parseInt(v, 10));
} else if (key === "duration") {
result.duration = parseFloat(value);
}
}
}
return result;
}
/**
* Get audio URL from event content
*/
function getAudioUrl(event: NostrEvent): string | null {
const content = event.content.trim();
// Content MUST be a URL pointing to an audio file
if (content.startsWith("http://") || content.startsWith("https://")) {
return content;
}
return null;
}
/**
* Format duration in seconds to MM:SS format
*/
function formatDuration(seconds: number): string {
const mins = Math.floor(seconds / 60);
const secs = Math.floor(seconds % 60);
return `${mins}:${secs.toString().padStart(2, "0")}`;
}
/**
* Waveform visualization component
*/
function WaveformVisualization({
waveform,
progress,
onClick,
}: {
waveform: number[];
progress: number; // 0-1
onClick?: (progress: number) => void;
}) {
const containerRef = useRef<HTMLDivElement>(null);
// Normalize waveform to 0-1 range
const maxAmplitude = Math.max(...waveform, 1);
const normalizedWaveform = waveform.map((v) => v / maxAmplitude);
// Limit to ~50 bars for display
const targetBars = 50;
const step = Math.max(1, Math.floor(waveform.length / targetBars));
const displayBars: number[] = [];
for (let i = 0; i < waveform.length; i += step) {
const chunk = normalizedWaveform.slice(i, i + step);
displayBars.push(Math.max(...chunk));
}
const handleClick = (e: React.MouseEvent<HTMLDivElement>) => {
if (!containerRef.current || !onClick) return;
const rect = containerRef.current.getBoundingClientRect();
const clickProgress = (e.clientX - rect.left) / rect.width;
onClick(Math.max(0, Math.min(1, clickProgress)));
};
return (
<div
ref={containerRef}
className="flex items-center gap-[2px] h-8 cursor-pointer flex-1"
onClick={handleClick}
>
{displayBars.map((amplitude, i) => {
const barProgress = i / displayBars.length;
const isPlayed = barProgress < progress;
return (
<div
key={i}
className={cn(
"w-1 rounded-full transition-colors",
isPlayed ? "bg-primary" : "bg-muted-foreground/40"
)}
style={{
height: `${Math.max(4, amplitude * 100)}%`,
}}
/>
);
})}
</div>
);
}
/**
* Simple progress bar fallback when no waveform is available
*/
function SimpleProgressBar({
progress,
onClick,
}: {
progress: number;
onClick?: (progress: number) => void;
}) {
const containerRef = useRef<HTMLDivElement>(null);
const handleClick = (e: React.MouseEvent<HTMLDivElement>) => {
if (!containerRef.current || !onClick) return;
const rect = containerRef.current.getBoundingClientRect();
const clickProgress = (e.clientX - rect.left) / rect.width;
onClick(Math.max(0, Math.min(1, clickProgress)));
};
return (
<div
ref={containerRef}
className="flex-1 h-2 bg-muted-foreground/20 rounded-full cursor-pointer overflow-hidden"
onClick={handleClick}
>
<div
className="h-full bg-primary rounded-full transition-all"
style={{ width: `${progress * 100}%` }}
/>
</div>
);
}
/**
* Voice note audio player component
*/
function VoiceNotePlayer({
url,
waveform,
initialDuration,
}: {
url: string;
waveform?: number[];
initialDuration?: number;
}) {
const audioRef = useRef<HTMLAudioElement>(null);
const [isPlaying, setIsPlaying] = useState(false);
const [duration, setDuration] = useState(initialDuration || 0);
const [currentTime, setCurrentTime] = useState(0);
const [error, setError] = useState(false);
useEffect(() => {
const audio = audioRef.current;
if (!audio) return;
const handleTimeUpdate = () => setCurrentTime(audio.currentTime);
const handleDurationChange = () => {
if (audio.duration && !isNaN(audio.duration)) {
setDuration(audio.duration);
}
};
const handleEnded = () => {
setIsPlaying(false);
setCurrentTime(0);
};
const handleError = () => setError(true);
const handlePlay = () => setIsPlaying(true);
const handlePause = () => setIsPlaying(false);
audio.addEventListener("timeupdate", handleTimeUpdate);
audio.addEventListener("durationchange", handleDurationChange);
audio.addEventListener("loadedmetadata", handleDurationChange);
audio.addEventListener("ended", handleEnded);
audio.addEventListener("error", handleError);
audio.addEventListener("play", handlePlay);
audio.addEventListener("pause", handlePause);
return () => {
audio.removeEventListener("timeupdate", handleTimeUpdate);
audio.removeEventListener("durationchange", handleDurationChange);
audio.removeEventListener("loadedmetadata", handleDurationChange);
audio.removeEventListener("ended", handleEnded);
audio.removeEventListener("error", handleError);
audio.removeEventListener("play", handlePlay);
audio.removeEventListener("pause", handlePause);
};
}, []);
const togglePlayback = () => {
const audio = audioRef.current;
if (!audio) return;
if (isPlaying) {
audio.pause();
} else {
audio.play();
}
};
const handleSeek = (progress: number) => {
const audio = audioRef.current;
if (!audio || !duration) return;
audio.currentTime = progress * duration;
};
const progress = duration > 0 ? currentTime / duration : 0;
if (error) {
return (
<div className="flex items-center gap-3 p-3 border border-destructive/30 rounded-lg bg-destructive/10">
<Mic className="w-5 h-5 text-destructive" />
<span className="text-sm text-destructive">Failed to load audio</span>
<a
href={url}
target="_blank"
rel="noopener noreferrer"
className="text-xs text-primary underline ml-auto"
>
Open in new tab
</a>
</div>
);
}
return (
<div className="flex items-center gap-3 p-3 border border-border rounded-lg bg-muted/20">
<audio ref={audioRef} src={url} preload="metadata" />
<button
onClick={togglePlayback}
className="flex items-center justify-center w-10 h-10 rounded-full bg-primary text-primary-foreground hover:bg-primary/90 transition-colors"
aria-label={isPlaying ? "Pause" : "Play"}
>
{isPlaying ? (
<Pause className="w-5 h-5" />
) : (
<Play className="w-5 h-5 ml-0.5" />
)}
</button>
{waveform && waveform.length > 0 ? (
<WaveformVisualization
waveform={waveform}
progress={progress}
onClick={handleSeek}
/>
) : (
<SimpleProgressBar progress={progress} onClick={handleSeek} />
)}
<div className="text-xs text-muted-foreground font-mono whitespace-nowrap">
{formatDuration(currentTime)}
{duration > 0 && ` / ${formatDuration(duration)}`}
</div>
</div>
);
}
/**
* Parent event card component for reply references
*/
function ParentEventCard({
parentEvent,
onClickHandler,
}: {
parentEvent: NostrEvent;
onClickHandler: () => void;
}) {
// Don't show kind badge for common note types
const showKindBadge = parentEvent.kind !== 1 && parentEvent.kind !== 1222;
return (
<div
onClick={onClickHandler}
className="flex items-center gap-2 p-1 bg-muted/20 text-xs hover:bg-muted/30 cursor-crosshair rounded transition-colors"
>
<Tooltip>
<TooltipTrigger asChild>
<Reply className="size-3 flex-shrink-0" />
</TooltipTrigger>
<TooltipContent>
<p>Replying to</p>
</TooltipContent>
</Tooltip>
{showKindBadge && <KindBadge kind={parentEvent.kind} variant="compact" />}
<UserName
pubkey={parentEvent.pubkey}
className="text-accent font-semibold flex-shrink-0"
/>
<div className="text-muted-foreground truncate line-clamp-1 min-w-0 flex-1">
{showKindBadge ? (
getEventDisplayTitle(parentEvent, false)
) : parentEvent.kind === 1222 || parentEvent.kind === 1244 ? (
<span className="italic">Voice note</span>
) : (
<RichText
className="truncate line-clamp-1"
event={parentEvent}
options={{ showMedia: false, showEventEmbeds: false }}
/>
)}
</div>
</div>
);
}
/**
* Renderer for Kind 1222 - Voice Message (NIP-A0)
* Short voice messages with optional waveform visualization
*/
export function VoiceNoteRenderer({ event }: BaseEventProps) {
const audioUrl = getAudioUrl(event);
const { waveform, duration } = parseVoiceNoteMetadata(event);
if (!audioUrl) {
return (
<BaseEventContainer event={event}>
<div className="flex items-center gap-2 text-sm text-muted-foreground">
<Mic className="w-4 h-4" />
<span>Invalid voice note (no audio URL)</span>
</div>
</BaseEventContainer>
);
}
return (
<BaseEventContainer event={event}>
<VoiceNotePlayer
url={audioUrl}
waveform={waveform}
initialDuration={duration}
/>
</BaseEventContainer>
);
}
/**
* Renderer for Kind 1244 - Voice Message Comment (NIP-A0)
* Voice message replies following NIP-22 threading
*/
export function VoiceNoteReplyRenderer({ event }: BaseEventProps) {
const { addWindow } = useGrimoire();
const audioUrl = getAudioUrl(event);
const { waveform, duration } = parseVoiceNoteMetadata(event);
// Use NIP-10 threading helpers (NIP-22 follows same structure)
const refs = getNip10References(event);
const replyPointer = refs.reply?.e || refs.reply?.a;
const replyEvent = useNostrEvent(replyPointer, event);
const handleReplyClick = () => {
if (!replyEvent || !replyPointer) return;
addWindow(
"open",
{ pointer: replyPointer },
`Reply to ${replyEvent.pubkey.slice(0, 8)}...`
);
};
if (!audioUrl) {
return (
<BaseEventContainer event={event}>
<div className="flex items-center gap-2 text-sm text-muted-foreground">
<Mic className="w-4 h-4" />
<span>Invalid voice note (no audio URL)</span>
</div>
</BaseEventContainer>
);
}
return (
<BaseEventContainer event={event}>
<TooltipProvider>
{/* Show reply reference */}
{replyPointer && !replyEvent && (
<InlineReplySkeleton icon={<Reply className="size-3" />} />
)}
{replyPointer && replyEvent && (
<ParentEventCard
parentEvent={replyEvent}
onClickHandler={handleReplyClick}
/>
)}
</TooltipProvider>
<VoiceNotePlayer
url={audioUrl}
waveform={waveform}
initialDuration={duration}
/>
</BaseEventContainer>
);
}
// Named exports for the registry
export { VoiceNoteRenderer as Kind1222Renderer };
export { VoiceNoteReplyRenderer as Kind1244Renderer };

View File

@@ -35,6 +35,7 @@ import { GenericRelayListRenderer } from "./GenericRelayListRenderer";
import { LiveActivityRenderer } from "./LiveActivityRenderer";
import { LiveActivityDetailRenderer } from "./LiveActivityDetailRenderer";
import { SpellRenderer, SpellDetailRenderer } from "./SpellRenderer";
import { VoiceNoteRenderer, VoiceNoteReplyRenderer } from "./VoiceNoteRenderer";
import { NostrEvent } from "@/types/nostr";
import { BaseEventContainer, type BaseEventProps } from "./BaseEventRenderer";
@@ -57,6 +58,8 @@ const kindRenderers: Record<number, React.ComponentType<BaseEventProps>> = {
22: Kind22Renderer, // Short Video (NIP-71)
1063: Kind1063Renderer, // File Metadata (NIP-94)
1111: Kind1111Renderer, // Post (NIP-22)
1222: VoiceNoteRenderer, // Voice Message (NIP-A0)
1244: VoiceNoteReplyRenderer, // Voice Message Reply (NIP-A0)
1337: Kind1337Renderer, // Code Snippet (NIP-C0)
1617: PatchRenderer, // Patch (NIP-34)
1618: PullRequestRenderer, // Pull Request (NIP-34)
@@ -176,4 +179,10 @@ export { Kind20Renderer } from "./PictureRenderer";
export { Kind21Renderer } from "./VideoRenderer";
export { Kind22Renderer } from "./ShortVideoRenderer";
export { Kind1063Renderer } from "./FileMetadataRenderer";
export {
VoiceNoteRenderer,
VoiceNoteReplyRenderer,
Kind1222Renderer,
Kind1244Renderer,
} from "./VoiceNoteRenderer";
export { Kind9735Renderer } from "./ZapReceiptRenderer";

View File

@@ -1 +1 @@
{"root":["./vite.config.ts"],"version":"5.6.3"}
{"root":["./vite.config.ts"],"errors":true,"version":"5.9.3"}