Merge branch 'main' into codex/add-setup-files-for-backend-deployment
This commit is contained in:
commit
77be975a1b
@ -0,0 +1,6 @@
|
||||
"""Beatmatchr backend package."""
|
||||
|
||||
from .config import settings
|
||||
from .db import db_session, init_db
|
||||
|
||||
__all__ = ["settings", "db_session", "init_db"]
|
||||
32
backend/app.py
Normal file
32
backend/app.py
Normal file
@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
from .db import init_db
|
||||
from .routers import audio, lyrics, media
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
app = FastAPI(title="Beatmatchr API", version="0.1.0")
|
||||
|
||||
@app.on_event("startup")
|
||||
def _startup() -> None: # pragma: no cover - FastAPI lifecycle
|
||||
init_db()
|
||||
logger.info("Database initialized")
|
||||
|
||||
app.include_router(media.router, prefix="/api")
|
||||
app.include_router(audio.router, prefix="/api")
|
||||
app.include_router(lyrics.router, prefix="/api")
|
||||
|
||||
@app.get("/health")
|
||||
async def healthcheck() -> dict:
|
||||
return {"status": "ok"}
|
||||
|
||||
return app
|
||||
|
||||
|
||||
app = create_app()
|
||||
@ -59,6 +59,47 @@ class Settings:
|
||||
"celery_broker_url": self.celery_broker_url,
|
||||
"celery_result_backend": self.celery_result_backend,
|
||||
}
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseSettings, Field
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application configuration loaded from environment variables."""
|
||||
|
||||
database_url: str = Field(
|
||||
default="sqlite+aiosqlite:///./beatmatchr.db",
|
||||
description="SQLAlchemy database URL",
|
||||
)
|
||||
sync_database_url: Optional[str] = Field(
|
||||
default="sqlite:///./beatmatchr.db",
|
||||
description="Optional sync URL for background workers",
|
||||
)
|
||||
storage_base_path: Path = Field(
|
||||
default=Path(os.getenv("BEATMATCHR_STORAGE", "./storage")),
|
||||
description="Base path for file storage when using local filesystem backend.",
|
||||
)
|
||||
celery_broker_url: str = Field(
|
||||
default=os.getenv("CELERY_BROKER_URL", "redis://localhost:6379/0"),
|
||||
description="Broker URL for Celery workers.",
|
||||
)
|
||||
celery_result_backend: str = Field(
|
||||
default=os.getenv("CELERY_RESULT_BACKEND", "redis://localhost:6379/0"),
|
||||
description="Result backend URL for Celery workers.",
|
||||
)
|
||||
transcription_api_url: Optional[str] = Field(
|
||||
default=os.getenv("TRANSCRIPTION_API_URL"),
|
||||
description="External transcription service endpoint.",
|
||||
)
|
||||
transcription_api_key: Optional[str] = Field(
|
||||
default=os.getenv("TRANSCRIPTION_API_KEY"),
|
||||
description="API key for transcription service if required.",
|
||||
)
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
env_file_encoding = "utf-8"
|
||||
|
||||
|
||||
@lru_cache()
|
||||
@ -66,6 +107,12 @@ def get_settings() -> Settings:
|
||||
"""Return a cached instance of :class:`Settings`."""
|
||||
|
||||
return Settings()
|
||||
"""Return cached application settings instance."""
|
||||
|
||||
settings = Settings()
|
||||
base_path = Path(settings.storage_base_path)
|
||||
base_path.mkdir(parents=True, exist_ok=True)
|
||||
return settings
|
||||
|
||||
|
||||
settings = get_settings()
|
||||
|
||||
37
backend/db.py
Normal file
37
backend/db.py
Normal file
@ -0,0 +1,37 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import contextmanager
|
||||
from typing import Iterator
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import declarative_base, sessionmaker, Session
|
||||
|
||||
from .config import settings
|
||||
|
||||
|
||||
SYNC_DATABASE_URL = settings.sync_database_url or settings.database_url
|
||||
|
||||
engine = create_engine(SYNC_DATABASE_URL, future=True)
|
||||
SessionLocal = sessionmaker(bind=engine, autoflush=False, autocommit=False, expire_on_commit=False)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
def init_db() -> None:
|
||||
"""Create all tables in the database."""
|
||||
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def db_session() -> Iterator[Session]:
|
||||
"""Provide a transactional scope around a series of operations."""
|
||||
|
||||
session: Session = SessionLocal()
|
||||
try:
|
||||
yield session
|
||||
except Exception:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
83
backend/models.py
Normal file
83
backend/models.py
Normal file
@ -0,0 +1,83 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Column, DateTime, Float, ForeignKey, JSON, String, Text, Integer
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from .db import Base, db_session
|
||||
|
||||
|
||||
class TimestampMixin:
|
||||
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
|
||||
updated_at = Column(
|
||||
DateTime,
|
||||
default=datetime.utcnow,
|
||||
onupdate=datetime.utcnow,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
|
||||
class Project(Base, TimestampMixin):
|
||||
__tablename__ = "projects"
|
||||
|
||||
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
name = Column(String, nullable=False)
|
||||
|
||||
audio_tracks = relationship("AudioTrack", back_populates="project", cascade="all, delete-orphan")
|
||||
source_clips = relationship("SourceClip", back_populates="project", cascade="all, delete-orphan")
|
||||
lyrics = relationship("Lyrics", back_populates="project", uselist=False, cascade="all, delete-orphan")
|
||||
|
||||
|
||||
class AudioTrack(Base, TimestampMixin):
|
||||
__tablename__ = "audio_tracks"
|
||||
|
||||
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
project_id = Column(String, ForeignKey("projects.id"), nullable=False, index=True)
|
||||
storage_path = Column(Text, nullable=False)
|
||||
local_path = Column(Text, nullable=True)
|
||||
duration_seconds = Column(Float, nullable=True)
|
||||
bpm = Column(Float, nullable=True)
|
||||
beat_grid = Column(JSON, nullable=True)
|
||||
|
||||
project = relationship("Project", back_populates="audio_tracks")
|
||||
|
||||
|
||||
class SourceClip(Base, TimestampMixin):
|
||||
__tablename__ = "source_clips"
|
||||
|
||||
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
project_id = Column(String, ForeignKey("projects.id"), nullable=False, index=True)
|
||||
origin = Column(String, nullable=False)
|
||||
original_url = Column(Text, nullable=True)
|
||||
storage_path = Column(Text, nullable=False)
|
||||
thumbnail_path = Column(Text, nullable=True)
|
||||
duration_seconds = Column(Float, nullable=True)
|
||||
width = Column(Integer, nullable=True)
|
||||
height = Column(Integer, nullable=True)
|
||||
fps = Column(Float, nullable=True)
|
||||
|
||||
project = relationship("Project", back_populates="source_clips")
|
||||
|
||||
|
||||
class Lyrics(Base, TimestampMixin):
|
||||
__tablename__ = "lyrics"
|
||||
|
||||
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
project_id = Column(String, ForeignKey("projects.id"), nullable=False, unique=True, index=True)
|
||||
source = Column(String, nullable=False)
|
||||
raw_text = Column(Text, nullable=False)
|
||||
timed_words = Column(JSON, nullable=True)
|
||||
timed_lines = Column(JSON, nullable=True)
|
||||
|
||||
project = relationship("Project", back_populates="lyrics")
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Project",
|
||||
"AudioTrack",
|
||||
"SourceClip",
|
||||
"Lyrics",
|
||||
"db_session",
|
||||
]
|
||||
5
backend/routers/__init__.py
Normal file
5
backend/routers/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
"""API routers for the Beatmatchr service."""
|
||||
|
||||
from . import audio, lyrics, media
|
||||
|
||||
__all__ = ["audio", "lyrics", "media"]
|
||||
54
backend/routers/audio.py
Normal file
54
backend/routers/audio.py
Normal file
@ -0,0 +1,54 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
from fastapi import APIRouter, File, HTTPException, UploadFile, status
|
||||
|
||||
from ..db import db_session
|
||||
from ..models import AudioTrack, Project
|
||||
from ..services import storage
|
||||
from ..workers.tasks import task_analyze_audio, task_transcribe_lyrics
|
||||
|
||||
router = APIRouter(prefix="/projects/{project_id}/audio", tags=["audio"])
|
||||
|
||||
|
||||
@router.post("", status_code=status.HTTP_201_CREATED)
|
||||
async def upload_audio(project_id: str, file: UploadFile = File(...)) -> dict:
|
||||
if not file.content_type or not file.content_type.startswith("audio/"):
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Uploaded file must be audio")
|
||||
|
||||
audio_id = str(uuid.uuid4())
|
||||
extension = "." + file.filename.split(".")[-1] if file.filename and "." in file.filename else ".mp3"
|
||||
storage_dest = f"audio/{project_id}/{audio_id}{extension}"
|
||||
storage_path = ""
|
||||
|
||||
try:
|
||||
with db_session() as session:
|
||||
project = session.query(Project).filter_by(id=project_id).one_or_none()
|
||||
if project is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Project not found")
|
||||
|
||||
file.file.seek(0)
|
||||
storage_path = storage.upload_file(file.file, storage_dest)
|
||||
|
||||
audio_track = AudioTrack(
|
||||
id=audio_id,
|
||||
project_id=project_id,
|
||||
storage_path=storage_path,
|
||||
)
|
||||
session.add(audio_track)
|
||||
session.commit()
|
||||
finally:
|
||||
file.file.close()
|
||||
|
||||
if not storage_path:
|
||||
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to store audio file")
|
||||
|
||||
task_analyze_audio.delay(audio_track_id=audio_id)
|
||||
task_transcribe_lyrics.delay(project_id=project_id, audio_track_id=audio_id)
|
||||
|
||||
return {
|
||||
"audio_track_id": audio_id,
|
||||
"project_id": project_id,
|
||||
"storage_path": storage_path,
|
||||
}
|
||||
59
backend/routers/lyrics.py
Normal file
59
backend/routers/lyrics.py
Normal file
@ -0,0 +1,59 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
|
||||
from ..db import db_session
|
||||
from ..models import Lyrics, Project
|
||||
|
||||
router = APIRouter(prefix="/projects/{project_id}/lyrics", tags=["lyrics"])
|
||||
|
||||
|
||||
@router.get("")
|
||||
def get_lyrics(project_id: str) -> dict:
|
||||
with db_session() as session:
|
||||
project = session.query(Project).filter_by(id=project_id).one_or_none()
|
||||
if project is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Project not found")
|
||||
|
||||
lyrics = session.query(Lyrics).filter_by(project_id=project_id).one_or_none()
|
||||
if lyrics is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Lyrics not found")
|
||||
|
||||
return {
|
||||
"project_id": lyrics.project_id,
|
||||
"source": lyrics.source,
|
||||
"raw_text": lyrics.raw_text,
|
||||
"timed_lines": lyrics.timed_lines or [],
|
||||
"timed_words": lyrics.timed_words or [],
|
||||
"created_at": lyrics.created_at,
|
||||
"updated_at": lyrics.updated_at,
|
||||
}
|
||||
|
||||
|
||||
@router.put("")
|
||||
def update_lyrics(project_id: str, payload: dict) -> dict:
|
||||
new_text = payload.get("raw_text")
|
||||
if not isinstance(new_text, str) or not new_text.strip():
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="raw_text must be provided")
|
||||
|
||||
with db_session() as session:
|
||||
project = session.query(Project).filter_by(id=project_id).one_or_none()
|
||||
if project is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Project not found")
|
||||
|
||||
lyrics = session.query(Lyrics).filter_by(project_id=project_id).one_or_none()
|
||||
if lyrics is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Lyrics not found")
|
||||
|
||||
lyrics.raw_text = new_text.strip()
|
||||
session.commit()
|
||||
|
||||
return {
|
||||
"project_id": lyrics.project_id,
|
||||
"source": lyrics.source,
|
||||
"raw_text": lyrics.raw_text,
|
||||
"timed_lines": lyrics.timed_lines or [],
|
||||
"timed_words": lyrics.timed_words or [],
|
||||
"created_at": lyrics.created_at,
|
||||
"updated_at": lyrics.updated_at,
|
||||
}
|
||||
108
backend/routers/media.py
Normal file
108
backend/routers/media.py
Normal file
@ -0,0 +1,108 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from typing import List
|
||||
|
||||
from fastapi import APIRouter, File, HTTPException, UploadFile, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ..db import db_session
|
||||
from ..models import Project, SourceClip
|
||||
from ..services import storage
|
||||
from ..workers.tasks import task_ingest_url, task_process_uploaded_video
|
||||
|
||||
router = APIRouter(prefix="/projects/{project_id}/source-clips", tags=["source-clips"])
|
||||
|
||||
|
||||
def get_project(session: Session, project_id: str) -> Project:
|
||||
project = session.query(Project).filter_by(id=project_id).one_or_none()
|
||||
if project is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Project not found")
|
||||
return project
|
||||
|
||||
|
||||
@router.post("/urls")
|
||||
def enqueue_url_ingest(project_id: str, payload: dict) -> dict:
|
||||
urls = payload.get("urls") or []
|
||||
if not isinstance(urls, list) or not urls:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="urls must be a non-empty list")
|
||||
origin = payload.get("origin") or "url"
|
||||
|
||||
for value in urls:
|
||||
if not isinstance(value, str) or not value.strip():
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="All URLs must be non-empty strings")
|
||||
|
||||
with db_session() as session:
|
||||
get_project(session, project_id)
|
||||
|
||||
for url in urls:
|
||||
task_ingest_url.delay(project_id=project_id, input_url=url, origin=origin)
|
||||
|
||||
return {"status": "queued", "count": len(urls)}
|
||||
|
||||
|
||||
@router.post("/upload", status_code=status.HTTP_201_CREATED)
|
||||
async def upload_source_clip(project_id: str, file: UploadFile = File(...)) -> dict:
|
||||
if not file.content_type or not file.content_type.startswith("video/"):
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Uploaded file must be a video")
|
||||
|
||||
clip_id = str(uuid.uuid4())
|
||||
extension = "." + file.filename.split(".")[-1] if file.filename and "." in file.filename else ".mp4"
|
||||
storage_dest = f"videos/{project_id}/{clip_id}{extension}"
|
||||
storage_path = ""
|
||||
|
||||
try:
|
||||
with db_session() as session:
|
||||
get_project(session, project_id)
|
||||
|
||||
file.file.seek(0)
|
||||
storage_path = storage.upload_file(file.file, storage_dest)
|
||||
|
||||
clip = SourceClip(
|
||||
id=clip_id,
|
||||
project_id=project_id,
|
||||
origin="upload",
|
||||
original_url=None,
|
||||
storage_path=storage_path,
|
||||
)
|
||||
session.add(clip)
|
||||
session.commit()
|
||||
finally:
|
||||
file.file.close()
|
||||
|
||||
if not storage_path:
|
||||
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to store uploaded file")
|
||||
|
||||
task_process_uploaded_video.delay(source_clip_id=clip_id)
|
||||
|
||||
return {
|
||||
"id": clip_id,
|
||||
"project_id": project_id,
|
||||
"storage_path": storage_path,
|
||||
"origin": "upload",
|
||||
"status": "processing",
|
||||
}
|
||||
|
||||
|
||||
@router.get("")
|
||||
def list_source_clips(project_id: str) -> List[dict]:
|
||||
with db_session() as session:
|
||||
get_project(session, project_id)
|
||||
clips = session.query(SourceClip).filter_by(project_id=project_id).order_by(SourceClip.created_at.asc()).all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": clip.id,
|
||||
"origin": clip.origin,
|
||||
"original_url": clip.original_url,
|
||||
"storage_path": clip.storage_path,
|
||||
"thumbnail_path": clip.thumbnail_path,
|
||||
"duration_seconds": clip.duration_seconds,
|
||||
"width": clip.width,
|
||||
"height": clip.height,
|
||||
"fps": clip.fps,
|
||||
"created_at": clip.created_at,
|
||||
"updated_at": clip.updated_at,
|
||||
}
|
||||
for clip in clips
|
||||
]
|
||||
34
backend/services/audio_analysis.py
Normal file
34
backend/services/audio_analysis.py
Normal file
@ -0,0 +1,34 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Dict, List
|
||||
|
||||
try:
|
||||
import librosa
|
||||
except ImportError: # pragma: no cover - optional dependency
|
||||
librosa = None
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def analyze_audio(local_path: str) -> Dict[str, object]:
|
||||
"""Analyze the uploaded audio file for duration, BPM, and beat grid."""
|
||||
|
||||
if librosa is None:
|
||||
logger.warning("librosa not available; returning stubbed audio analysis values")
|
||||
return {
|
||||
"duration_seconds": 0.0,
|
||||
"bpm": 120.0,
|
||||
"beat_grid": [i * 0.5 for i in range(16)],
|
||||
}
|
||||
|
||||
y, sr = librosa.load(local_path)
|
||||
tempo, beats = librosa.beat.beat_track(y=y, sr=sr)
|
||||
beat_times: List[float] = librosa.frames_to_time(beats, sr=sr).tolist()
|
||||
duration = librosa.get_duration(y=y, sr=sr)
|
||||
|
||||
return {
|
||||
"duration_seconds": float(duration),
|
||||
"bpm": float(tempo),
|
||||
"beat_grid": beat_times,
|
||||
}
|
||||
128
backend/services/lyrics_from_audio.py
Normal file
128
backend/services/lyrics_from_audio.py
Normal file
@ -0,0 +1,128 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
from ..config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TranscriptionResult:
|
||||
raw_text: str
|
||||
words: List[Dict[str, Any]]
|
||||
|
||||
|
||||
class TranscriptionClient:
|
||||
"""Client for an external transcription API that returns word timestamps."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
api_url: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
timeout: int = 300,
|
||||
) -> None:
|
||||
self.api_url = api_url or settings.transcription_api_url
|
||||
self.api_key = api_key or settings.transcription_api_key
|
||||
self.timeout = timeout
|
||||
|
||||
def transcribe(self, audio_path: str) -> TranscriptionResult:
|
||||
if not self.api_url:
|
||||
raise RuntimeError(
|
||||
"Transcription API URL is not configured. Set TRANSCRIPTION_API_URL to enable lyrics extraction."
|
||||
)
|
||||
|
||||
file_path = Path(audio_path)
|
||||
if not file_path.exists():
|
||||
raise FileNotFoundError(f"Audio file not found: {audio_path}")
|
||||
|
||||
headers = {}
|
||||
if self.api_key:
|
||||
headers["Authorization"] = f"Bearer {self.api_key}"
|
||||
|
||||
with file_path.open("rb") as audio_file:
|
||||
files = {"file": (file_path.name, audio_file, "application/octet-stream")}
|
||||
data = {"timestamps": "word"}
|
||||
response = requests.post(
|
||||
self.api_url,
|
||||
headers=headers,
|
||||
data=data,
|
||||
files=files,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
payload = response.json()
|
||||
raw_text = payload.get("text") or payload.get("raw_text")
|
||||
if raw_text is None:
|
||||
raise ValueError("Transcription response missing 'text' field")
|
||||
|
||||
words = payload.get("words") or []
|
||||
normalized_words: List[Dict[str, Any]] = []
|
||||
for word in words:
|
||||
try:
|
||||
normalized_words.append(
|
||||
{
|
||||
"start": float(word["start"]),
|
||||
"end": float(word["end"]),
|
||||
"word": str(word.get("word") or word.get("text") or "").strip(),
|
||||
}
|
||||
)
|
||||
except (KeyError, TypeError, ValueError) as exc:
|
||||
logger.warning("Skipping malformed word entry %s: %s", word, exc)
|
||||
return TranscriptionResult(raw_text=raw_text.strip(), words=normalized_words)
|
||||
|
||||
|
||||
def words_to_lines(words: List[Dict[str, Any]], max_silence_gap: float = 0.7) -> List[Dict[str, Any]]:
|
||||
"""Group word-level timestamps into line-level segments."""
|
||||
|
||||
if not words:
|
||||
return []
|
||||
|
||||
sorted_words = sorted(words, key=lambda w: w["start"])
|
||||
lines: List[Dict[str, Any]] = []
|
||||
current_line_words: List[Dict[str, Any]] = [sorted_words[0]]
|
||||
|
||||
for previous_word, current_word in zip(sorted_words, sorted_words[1:]):
|
||||
gap = float(current_word["start"]) - float(previous_word["end"])
|
||||
if gap > max_silence_gap:
|
||||
lines.append(
|
||||
{
|
||||
"start": float(current_line_words[0]["start"]),
|
||||
"end": float(current_line_words[-1]["end"]),
|
||||
"text": " ".join(word["word"] for word in current_line_words).strip(),
|
||||
}
|
||||
)
|
||||
current_line_words = [current_word]
|
||||
else:
|
||||
current_line_words.append(current_word)
|
||||
|
||||
if current_line_words:
|
||||
lines.append(
|
||||
{
|
||||
"start": float(current_line_words[0]["start"]),
|
||||
"end": float(current_line_words[-1]["end"]),
|
||||
"text": " ".join(word["word"] for word in current_line_words).strip(),
|
||||
}
|
||||
)
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def transcribe_audio_to_lyrics(local_audio_path: str) -> Dict[str, Any]:
|
||||
"""Transcribe the uploaded audio file into lyrics with timing information."""
|
||||
|
||||
client = TranscriptionClient()
|
||||
result = client.transcribe(local_audio_path)
|
||||
lines = words_to_lines(result.words)
|
||||
|
||||
return {
|
||||
"raw_text": result.raw_text,
|
||||
"words": result.words,
|
||||
"lines": lines,
|
||||
}
|
||||
220
backend/services/media_ingest.py
Normal file
220
backend/services/media_ingest.py
Normal file
@ -0,0 +1,220 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
import requests
|
||||
from PIL import Image
|
||||
|
||||
from ..db import db_session
|
||||
from ..models import Project, SourceClip
|
||||
from . import storage
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def resolve_media_urls_from_input(url: str) -> List[str]:
|
||||
"""Resolve direct media URLs for a given user-provided URL using yt-dlp."""
|
||||
|
||||
try:
|
||||
process = subprocess.run(
|
||||
[
|
||||
"yt-dlp",
|
||||
"--dump-json",
|
||||
"--skip-download",
|
||||
url,
|
||||
],
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
logger.warning("yt-dlp not installed; returning provided URL directly")
|
||||
return [url]
|
||||
|
||||
if process.returncode != 0:
|
||||
logger.error("yt-dlp failed for %s: %s", url, process.stderr.strip())
|
||||
return [url]
|
||||
|
||||
media_urls: List[str] = []
|
||||
for line in process.stdout.strip().splitlines():
|
||||
try:
|
||||
payload = json.loads(line)
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
url_field = payload.get("url") or payload.get("webpage_url")
|
||||
if url_field:
|
||||
media_urls.append(url_field)
|
||||
if not media_urls:
|
||||
media_urls.append(url)
|
||||
return media_urls
|
||||
|
||||
|
||||
def download_media_file(media_url: str) -> str:
|
||||
"""Download a media file to a temporary local path and return it."""
|
||||
|
||||
response = requests.get(media_url, stream=True, timeout=60)
|
||||
response.raise_for_status()
|
||||
|
||||
suffix = Path(media_url).suffix or ".mp4"
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as temp_file:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
temp_file.write(chunk)
|
||||
temp_path = temp_file.name
|
||||
return temp_path
|
||||
|
||||
|
||||
def extract_video_metadata(local_path: str) -> Dict[str, float | int | None]:
|
||||
"""Extract video metadata using ffprobe."""
|
||||
|
||||
command = [
|
||||
"ffprobe",
|
||||
"-v",
|
||||
"error",
|
||||
"-select_streams",
|
||||
"v:0",
|
||||
"-show_entries",
|
||||
"stream=width,height,r_frame_rate:format=duration",
|
||||
"-of",
|
||||
"json",
|
||||
local_path,
|
||||
]
|
||||
result = subprocess.run(command, capture_output=True, text=True, check=False)
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError(f"ffprobe failed: {result.stderr}")
|
||||
|
||||
payload = json.loads(result.stdout)
|
||||
stream = (payload.get("streams") or [{}])[0]
|
||||
format_info = payload.get("format") or {}
|
||||
|
||||
r_frame_rate = stream.get("r_frame_rate", "0/1")
|
||||
try:
|
||||
num, den = r_frame_rate.split("/")
|
||||
fps = float(num) / float(den) if float(den) else None
|
||||
except (ValueError, ZeroDivisionError):
|
||||
fps = None
|
||||
|
||||
metadata = {
|
||||
"duration_seconds": float(format_info.get("duration")) if format_info.get("duration") else None,
|
||||
"width": stream.get("width"),
|
||||
"height": stream.get("height"),
|
||||
"fps": fps,
|
||||
}
|
||||
return metadata
|
||||
|
||||
|
||||
def generate_thumbnail(local_video_path: str, time_seconds: float = 0.5) -> bytes:
|
||||
"""Generate a thumbnail image for a video clip using ffmpeg."""
|
||||
|
||||
resized_path: str | None = None
|
||||
with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as temp_image:
|
||||
temp_image_path = temp_image.name
|
||||
|
||||
try:
|
||||
command = [
|
||||
"ffmpeg",
|
||||
"-ss",
|
||||
str(time_seconds),
|
||||
"-i",
|
||||
local_video_path,
|
||||
"-frames:v",
|
||||
"1",
|
||||
"-q:v",
|
||||
"2",
|
||||
temp_image_path,
|
||||
]
|
||||
result = subprocess.run(command, capture_output=True, check=False)
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError(f"ffmpeg thumbnail generation failed: {result.stderr}")
|
||||
|
||||
with Image.open(temp_image_path) as img:
|
||||
width = 480
|
||||
ratio = width / float(img.width)
|
||||
resized = img.resize((width, int(img.height * ratio)))
|
||||
with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as resized_file:
|
||||
resized.save(resized_file.name, format="JPEG", quality=90)
|
||||
resized_path = resized_file.name
|
||||
if resized_path is None:
|
||||
raise RuntimeError("Failed to create thumbnail image")
|
||||
with open(resized_path, "rb") as thumbnail_file:
|
||||
data = thumbnail_file.read()
|
||||
finally:
|
||||
if os.path.exists(temp_image_path):
|
||||
os.remove(temp_image_path)
|
||||
if resized_path and os.path.exists(resized_path):
|
||||
os.remove(resized_path)
|
||||
return data
|
||||
|
||||
|
||||
def ingest_single_media_url(project_id: str, input_url: str, origin: str = "url") -> List[Dict]:
|
||||
"""Ingest media from a URL and persist SourceClip entries."""
|
||||
|
||||
media_urls = resolve_media_urls_from_input(input_url)
|
||||
created_clips: List[Dict] = []
|
||||
|
||||
with db_session() as session:
|
||||
project = session.query(Project).filter_by(id=project_id).one_or_none()
|
||||
if project is None:
|
||||
raise ValueError(f"Project {project_id} does not exist")
|
||||
|
||||
for media_url in media_urls:
|
||||
local_path = download_media_file(media_url)
|
||||
try:
|
||||
metadata = extract_video_metadata(local_path)
|
||||
thumbnail_bytes = generate_thumbnail(local_path)
|
||||
|
||||
clip_id = str(uuid.uuid4())
|
||||
extension = Path(local_path).suffix or ".mp4"
|
||||
storage_dest = f"videos/{project_id}/{clip_id}{extension}"
|
||||
thumb_dest = f"thumbnails/{project_id}/{clip_id}.jpg"
|
||||
|
||||
with open(local_path, "rb") as infile:
|
||||
storage_path = storage.upload_file(infile, storage_dest)
|
||||
thumbnail_path = storage.upload_bytes(thumbnail_bytes, thumb_dest, content_type="image/jpeg")
|
||||
|
||||
now = datetime.utcnow()
|
||||
clip = SourceClip(
|
||||
id=clip_id,
|
||||
project_id=project_id,
|
||||
origin=origin,
|
||||
original_url=input_url,
|
||||
storage_path=storage_path,
|
||||
thumbnail_path=thumbnail_path,
|
||||
duration_seconds=metadata.get("duration_seconds"),
|
||||
width=metadata.get("width"),
|
||||
height=metadata.get("height"),
|
||||
fps=metadata.get("fps"),
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
session.add(clip)
|
||||
session.flush()
|
||||
|
||||
created_clips.append(
|
||||
{
|
||||
"id": clip.id,
|
||||
"project_id": clip.project_id,
|
||||
"storage_path": clip.storage_path,
|
||||
"thumbnail_path": clip.thumbnail_path,
|
||||
"duration_seconds": clip.duration_seconds,
|
||||
"width": clip.width,
|
||||
"height": clip.height,
|
||||
"fps": clip.fps,
|
||||
"origin": clip.origin,
|
||||
"original_url": clip.original_url,
|
||||
}
|
||||
)
|
||||
finally:
|
||||
if os.path.exists(local_path):
|
||||
os.remove(local_path)
|
||||
session.commit()
|
||||
|
||||
return created_clips
|
||||
97
backend/services/rendering.py
Normal file
97
backend/services/rendering.py
Normal file
@ -0,0 +1,97 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Dict, List
|
||||
|
||||
import moviepy.editor as mpe
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _parse_resolution(resolution: str) -> tuple[int, int]:
|
||||
try:
|
||||
width_str, height_str = resolution.lower().split("x")
|
||||
return int(width_str), int(height_str)
|
||||
except (ValueError, AttributeError) as exc:
|
||||
raise ValueError(f"Invalid resolution format: {resolution}") from exc
|
||||
|
||||
|
||||
def render_video(
|
||||
audio_path: str,
|
||||
timeline: List[Dict],
|
||||
lyrics_timed_lines: List[Dict],
|
||||
output_path: str,
|
||||
resolution: str = "1080x1920",
|
||||
fps: int = 30,
|
||||
) -> None:
|
||||
"""Render the final video composition with lyrics overlays."""
|
||||
|
||||
width, height = _parse_resolution(resolution)
|
||||
video_segments: List[mpe.VideoClip] = []
|
||||
|
||||
try:
|
||||
for segment in timeline:
|
||||
clip_path = segment["clip_path"]
|
||||
video_start = float(segment.get("video_start", 0.0))
|
||||
video_end = float(segment.get("video_end")) if segment.get("video_end") is not None else None
|
||||
song_start = float(segment.get("song_start", 0.0))
|
||||
|
||||
clip = mpe.VideoFileClip(clip_path)
|
||||
if video_end is not None:
|
||||
clip = clip.subclip(video_start, video_end)
|
||||
else:
|
||||
clip = clip.subclip(video_start)
|
||||
clip = clip.resize(newsize=(width, height)).set_start(song_start)
|
||||
video_segments.append(clip)
|
||||
|
||||
if not video_segments:
|
||||
raise ValueError("Timeline is empty; cannot render video")
|
||||
|
||||
base_video = mpe.concatenate_videoclips(video_segments, method="compose")
|
||||
|
||||
text_clips: List[mpe.VideoClip] = []
|
||||
for line in lyrics_timed_lines:
|
||||
text = line.get("text", "").strip()
|
||||
if not text:
|
||||
continue
|
||||
start = float(line["start"])
|
||||
end = float(line["end"])
|
||||
duration = max(end - start, 0.1)
|
||||
text_clip = (
|
||||
mpe.TextClip(
|
||||
txt=text,
|
||||
fontsize=60,
|
||||
color="white",
|
||||
stroke_color="black",
|
||||
stroke_width=2,
|
||||
method="caption",
|
||||
size=(width - 200, None),
|
||||
)
|
||||
.set_duration(duration)
|
||||
.set_start(start)
|
||||
.set_position(("center", height - 150))
|
||||
)
|
||||
text_clips.append(text_clip)
|
||||
|
||||
composite = mpe.CompositeVideoClip([base_video, *text_clips], size=(width, height))
|
||||
audio_clip = mpe.AudioFileClip(audio_path)
|
||||
composite = composite.set_audio(audio_clip)
|
||||
composite.write_videofile(
|
||||
output_path,
|
||||
codec="libx264",
|
||||
audio_codec="aac",
|
||||
fps=fps,
|
||||
preset="medium",
|
||||
)
|
||||
finally:
|
||||
for clip in video_segments:
|
||||
clip.close()
|
||||
if "base_video" in locals():
|
||||
base_video.close() # type: ignore[union-attr]
|
||||
if "text_clips" in locals():
|
||||
for clip in text_clips:
|
||||
clip.close()
|
||||
if "composite" in locals():
|
||||
composite.close() # type: ignore[union-attr]
|
||||
if "audio_clip" in locals():
|
||||
audio_clip.close() # type: ignore[union-attr]
|
||||
48
backend/services/storage.py
Normal file
48
backend/services/storage.py
Normal file
@ -0,0 +1,48 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import BinaryIO
|
||||
|
||||
from ..config import settings
|
||||
|
||||
|
||||
def _resolve_destination(dest_path: str) -> Path:
|
||||
base_path = Path(settings.storage_base_path)
|
||||
full_path = base_path / dest_path
|
||||
full_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
return full_path
|
||||
|
||||
|
||||
def upload_file(file_obj: BinaryIO, dest_path: str) -> str:
|
||||
"""Upload a file-like object to object storage."""
|
||||
|
||||
destination = _resolve_destination(dest_path)
|
||||
with open(destination, "wb") as out_file:
|
||||
shutil.copyfileobj(file_obj, out_file)
|
||||
return dest_path
|
||||
|
||||
|
||||
def upload_bytes(data: bytes, dest_path: str, content_type: str | None = None) -> str:
|
||||
"""Upload raw bytes to object storage."""
|
||||
|
||||
destination = _resolve_destination(dest_path)
|
||||
with open(destination, "wb") as out_file:
|
||||
out_file.write(data)
|
||||
return dest_path
|
||||
|
||||
|
||||
def download_to_temp(path: str) -> str:
|
||||
"""Download a file from storage to a temporary local file."""
|
||||
|
||||
source_path = Path(settings.storage_base_path) / path
|
||||
if not source_path.exists():
|
||||
raise FileNotFoundError(f"Storage path does not exist: {path}")
|
||||
|
||||
suffix = source_path.suffix
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as temp_file:
|
||||
with open(source_path, "rb") as in_file:
|
||||
shutil.copyfileobj(in_file, temp_file)
|
||||
temp_file_path = temp_file.name
|
||||
return temp_file_path
|
||||
5
backend/workers/__init__.py
Normal file
5
backend/workers/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
"""Background workers and Celery tasks."""
|
||||
|
||||
from .tasks import celery_app
|
||||
|
||||
__all__ = ["celery_app"]
|
||||
114
backend/workers/tasks.py
Normal file
114
backend/workers/tasks.py
Normal file
@ -0,0 +1,114 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from celery import Celery
|
||||
|
||||
from ..config import settings
|
||||
from ..db import db_session
|
||||
from ..models import AudioTrack, Lyrics, SourceClip
|
||||
from ..services import audio_analysis, lyrics_from_audio, media_ingest, storage
|
||||
|
||||
celery_app = Celery(
|
||||
"beatmatchr",
|
||||
broker=settings.celery_broker_url,
|
||||
backend=settings.celery_result_backend,
|
||||
)
|
||||
|
||||
|
||||
@celery_app.task(name="media.ingest_url")
|
||||
def task_ingest_url(project_id: str, input_url: str, origin: str = "url") -> None:
|
||||
"""Ingest media from a URL for the specified project."""
|
||||
|
||||
media_ingest.ingest_single_media_url(project_id=project_id, input_url=input_url, origin=origin)
|
||||
|
||||
|
||||
@celery_app.task(name="media.process_uploaded_video")
|
||||
def task_process_uploaded_video(source_clip_id: str) -> None:
|
||||
"""Extract metadata and thumbnails for an uploaded video clip."""
|
||||
|
||||
with db_session() as session:
|
||||
clip = session.query(SourceClip).filter_by(id=source_clip_id).one()
|
||||
local_video = storage.download_to_temp(clip.storage_path)
|
||||
try:
|
||||
meta = media_ingest.extract_video_metadata(local_video)
|
||||
thumb_bytes = media_ingest.generate_thumbnail(local_video, time_seconds=0.5)
|
||||
|
||||
thumb_dest = f"thumbnails/{clip.project_id}/{clip.id}.jpg"
|
||||
thumb_path = storage.upload_bytes(thumb_bytes, thumb_dest, content_type="image/jpeg")
|
||||
|
||||
clip.duration_seconds = meta.get("duration_seconds")
|
||||
clip.width = meta.get("width")
|
||||
clip.height = meta.get("height")
|
||||
clip.fps = meta.get("fps")
|
||||
clip.thumbnail_path = thumb_path
|
||||
clip.updated_at = datetime.utcnow()
|
||||
|
||||
session.commit()
|
||||
finally:
|
||||
if os.path.exists(local_video):
|
||||
os.remove(local_video)
|
||||
|
||||
|
||||
@celery_app.task(name="audio.analyze")
|
||||
def task_analyze_audio(audio_track_id: str) -> None:
|
||||
"""Analyze audio track to compute BPM and beat grid."""
|
||||
|
||||
with db_session() as session:
|
||||
audio = session.query(AudioTrack).filter_by(id=audio_track_id).one()
|
||||
local_path = storage.download_to_temp(audio.storage_path)
|
||||
try:
|
||||
result = audio_analysis.analyze_audio(local_path)
|
||||
|
||||
audio.duration_seconds = result.get("duration_seconds")
|
||||
audio.bpm = result.get("bpm")
|
||||
audio.beat_grid = result.get("beat_grid")
|
||||
audio.updated_at = datetime.utcnow()
|
||||
|
||||
session.commit()
|
||||
finally:
|
||||
if os.path.exists(local_path):
|
||||
os.remove(local_path)
|
||||
|
||||
|
||||
@celery_app.task(name="lyrics.transcribe")
|
||||
def task_transcribe_lyrics(project_id: str, audio_track_id: str) -> None:
|
||||
"""Transcribe lyrics from the project's audio track."""
|
||||
|
||||
with db_session() as session:
|
||||
audio = session.query(AudioTrack).filter_by(id=audio_track_id).one()
|
||||
local_path = storage.download_to_temp(audio.storage_path)
|
||||
try:
|
||||
result = lyrics_from_audio.transcribe_audio_to_lyrics(local_path)
|
||||
raw_text = result["raw_text"]
|
||||
words = result.get("words", [])
|
||||
lines = result.get("lines", [])
|
||||
|
||||
existing = session.query(Lyrics).filter_by(project_id=project_id).one_or_none()
|
||||
now = datetime.utcnow()
|
||||
|
||||
if existing is None:
|
||||
lyrics = Lyrics(
|
||||
id=str(uuid.uuid4()),
|
||||
project_id=project_id,
|
||||
source="audio_transcription",
|
||||
raw_text=raw_text,
|
||||
timed_words=words,
|
||||
timed_lines=lines,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
session.add(lyrics)
|
||||
else:
|
||||
existing.source = "audio_transcription"
|
||||
existing.raw_text = raw_text
|
||||
existing.timed_words = words
|
||||
existing.timed_lines = lines
|
||||
existing.updated_at = now
|
||||
|
||||
session.commit()
|
||||
finally:
|
||||
if os.path.exists(local_path):
|
||||
os.remove(local_path)
|
||||
12
frontend/app/globals.css
Normal file
12
frontend/app/globals.css
Normal file
@ -0,0 +1,12 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
:root {
|
||||
color-scheme: light;
|
||||
}
|
||||
|
||||
body {
|
||||
min-height: 100vh;
|
||||
background-color: #f8fafc;
|
||||
}
|
||||
44
frontend/app/layout.tsx
Normal file
44
frontend/app/layout.tsx
Normal file
@ -0,0 +1,44 @@
|
||||
import './globals.css';
|
||||
import type { Metadata } from 'next';
|
||||
import { Providers } from '../components/providers';
|
||||
import Link from 'next/link';
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: 'Beatmatchr',
|
||||
description: 'Frontend for Beatmatchr project editor'
|
||||
};
|
||||
|
||||
export default function RootLayout({
|
||||
children
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
return (
|
||||
<html lang="en">
|
||||
<body className="bg-slate-50 text-slate-900">
|
||||
<Providers>
|
||||
<div className="min-h-screen flex flex-col">
|
||||
<header className="border-b border-slate-200 bg-white">
|
||||
<div className="mx-auto flex w-full max-w-5xl items-center justify-between px-6 py-4">
|
||||
<Link href="/" className="text-lg font-semibold">
|
||||
Beatmatchr
|
||||
</Link>
|
||||
<nav className="flex items-center gap-4 text-sm">
|
||||
<Link href="/" className="hover:text-slate-600">
|
||||
Home
|
||||
</Link>
|
||||
<Link href="/projects" className="hover:text-slate-600">
|
||||
Projects
|
||||
</Link>
|
||||
</nav>
|
||||
</div>
|
||||
</header>
|
||||
<main className="flex-1">
|
||||
<div className="mx-auto w-full max-w-5xl px-6 py-8">{children}</div>
|
||||
</main>
|
||||
</div>
|
||||
</Providers>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
18
frontend/app/page.tsx
Normal file
18
frontend/app/page.tsx
Normal file
@ -0,0 +1,18 @@
|
||||
import Link from 'next/link';
|
||||
|
||||
export default function HomePage() {
|
||||
return (
|
||||
<div className="flex flex-col items-center justify-center gap-6 py-24 text-center">
|
||||
<h1 className="text-4xl font-bold text-slate-900">Welcome to Beatmatchr</h1>
|
||||
<p className="max-w-xl text-lg text-slate-600">
|
||||
Manage your audio projects, organize source clips, and fine-tune lyrics in a clean, focused editor.
|
||||
</p>
|
||||
<Link
|
||||
href="/projects"
|
||||
className="rounded-md bg-indigo-600 px-6 py-3 text-white shadow-sm transition hover:bg-indigo-500"
|
||||
>
|
||||
Create Project
|
||||
</Link>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
57
frontend/app/projects/[id]/page.tsx
Normal file
57
frontend/app/projects/[id]/page.tsx
Normal file
@ -0,0 +1,57 @@
|
||||
'use client';
|
||||
|
||||
import { useParams } from 'next/navigation';
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { getProject, Project } from '../../../lib/api';
|
||||
import { AudioUpload } from '../../../components/AudioUpload';
|
||||
import { ClipGrid } from '../../../components/ClipGrid';
|
||||
import { LyricsEditor } from '../../../components/LyricsEditor';
|
||||
|
||||
export default function ProjectDetailPage() {
|
||||
const params = useParams<{ id: string }>();
|
||||
const projectId = params?.id;
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data: project,
|
||||
isLoading,
|
||||
isError,
|
||||
error
|
||||
} = useQuery<Project, Error>({
|
||||
queryKey: ['projects', projectId],
|
||||
queryFn: () => getProject(projectId as string),
|
||||
enabled: Boolean(projectId)
|
||||
});
|
||||
|
||||
if (!projectId) {
|
||||
return <p className="text-sm text-red-600">Project ID is missing.</p>;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
{isLoading && <p className="text-slate-500">Loading project...</p>}
|
||||
{isError && <p className="text-red-600">{error.message}</p>}
|
||||
{project && (
|
||||
<>
|
||||
<div className="space-y-2">
|
||||
<h1 className="text-3xl font-semibold text-slate-900">{project.name}</h1>
|
||||
{project.description && <p className="text-slate-600">{project.description}</p>}
|
||||
</div>
|
||||
<div className="grid gap-6 lg:grid-cols-2">
|
||||
<AudioUpload
|
||||
projectId={projectId}
|
||||
onSuccess={() =>
|
||||
queryClient.invalidateQueries({ queryKey: ['projects', projectId, 'source-clips'] })
|
||||
}
|
||||
/>
|
||||
<LyricsEditor projectId={projectId} />
|
||||
</div>
|
||||
<section className="space-y-4">
|
||||
<h2 className="text-xl font-semibold text-slate-900">Source Clips</h2>
|
||||
<ClipGrid projectId={projectId} />
|
||||
</section>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
60
frontend/app/projects/page.tsx
Normal file
60
frontend/app/projects/page.tsx
Normal file
@ -0,0 +1,60 @@
|
||||
'use client';
|
||||
|
||||
import Link from 'next/link';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { getProjects, Project } from '../../lib/api';
|
||||
|
||||
export default function ProjectsPage() {
|
||||
const {
|
||||
data: projects,
|
||||
isLoading,
|
||||
isError,
|
||||
error
|
||||
} = useQuery<Project[], Error>({
|
||||
queryKey: ['projects'],
|
||||
queryFn: getProjects
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<h1 className="text-2xl font-semibold text-slate-900">Projects</h1>
|
||||
<p className="text-sm text-slate-600">Browse and open your existing audio projects.</p>
|
||||
</div>
|
||||
<Link
|
||||
href="/"
|
||||
className="rounded-md border border-indigo-200 px-3 py-2 text-sm text-indigo-600 hover:bg-indigo-50"
|
||||
>
|
||||
New Project
|
||||
</Link>
|
||||
</div>
|
||||
|
||||
{isLoading && <p className="text-slate-500">Loading projects...</p>}
|
||||
{isError && <p className="text-red-600">{error.message}</p>}
|
||||
|
||||
{projects && projects.length === 0 && (
|
||||
<div className="rounded-lg border border-dashed border-slate-300 bg-white p-8 text-center text-slate-500">
|
||||
No projects yet. Create one from the backend or CLI.
|
||||
</div>
|
||||
)}
|
||||
|
||||
{projects && projects.length > 0 && (
|
||||
<ul className="grid gap-4 sm:grid-cols-2">
|
||||
{projects.map((project) => (
|
||||
<li key={project.id} className="rounded-lg border border-slate-200 bg-white p-4 shadow-sm">
|
||||
<h2 className="text-lg font-medium text-slate-900">{project.name}</h2>
|
||||
{project.description && <p className="mt-1 text-sm text-slate-600">{project.description}</p>}
|
||||
<div className="mt-4 flex items-center justify-between text-sm text-slate-500">
|
||||
{project.created_at && <span>Created {new Date(project.created_at).toLocaleDateString()}</span>}
|
||||
<Link href={`/projects/${project.id}`} className="text-indigo-600 hover:text-indigo-500">
|
||||
Open
|
||||
</Link>
|
||||
</div>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
55
frontend/components/AudioUpload.tsx
Normal file
55
frontend/components/AudioUpload.tsx
Normal file
@ -0,0 +1,55 @@
|
||||
'use client';
|
||||
|
||||
import { useMutation } from '@tanstack/react-query';
|
||||
import { FormEvent, useState } from 'react';
|
||||
import { uploadProjectAudio } from '../lib/api';
|
||||
|
||||
interface AudioUploadProps {
|
||||
projectId: string;
|
||||
onSuccess?: () => void;
|
||||
}
|
||||
|
||||
export function AudioUpload({ projectId, onSuccess }: AudioUploadProps) {
|
||||
const [file, setFile] = useState<File | null>(null);
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: async () => {
|
||||
if (!file) {
|
||||
throw new Error('Please select an audio file to upload.');
|
||||
}
|
||||
await uploadProjectAudio(projectId, file);
|
||||
},
|
||||
onSuccess
|
||||
});
|
||||
|
||||
const handleSubmit = (event: FormEvent<HTMLFormElement>) => {
|
||||
event.preventDefault();
|
||||
mutation.mutate();
|
||||
};
|
||||
|
||||
return (
|
||||
<form onSubmit={handleSubmit} className="space-y-3 rounded-lg border border-slate-200 bg-white p-4 shadow-sm">
|
||||
<div>
|
||||
<h2 className="text-lg font-semibold text-slate-900">Upload Audio</h2>
|
||||
<p className="text-sm text-slate-600">Attach a new audio track to this project.</p>
|
||||
</div>
|
||||
<input
|
||||
type="file"
|
||||
accept="audio/*"
|
||||
onChange={(event) => setFile(event.target.files?.[0] ?? null)}
|
||||
className="block w-full rounded-md border border-slate-300 px-3 py-2 text-sm"
|
||||
/>
|
||||
<button
|
||||
type="submit"
|
||||
disabled={mutation.isPending}
|
||||
className="rounded-md bg-indigo-600 px-4 py-2 text-sm font-medium text-white transition hover:bg-indigo-500 disabled:cursor-not-allowed disabled:opacity-60"
|
||||
>
|
||||
{mutation.isPending ? 'Uploading...' : 'Upload'}
|
||||
</button>
|
||||
{mutation.isError && (
|
||||
<p className="text-sm text-red-600">{(mutation.error as Error).message}</p>
|
||||
)}
|
||||
{mutation.isSuccess && <p className="text-sm text-green-600">Audio uploaded successfully!</p>}
|
||||
</form>
|
||||
);
|
||||
}
|
||||
52
frontend/components/ClipGrid.tsx
Normal file
52
frontend/components/ClipGrid.tsx
Normal file
@ -0,0 +1,52 @@
|
||||
'use client';
|
||||
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { getSourceClips, SourceClip } from '../lib/api';
|
||||
|
||||
interface ClipGridProps {
|
||||
projectId: string;
|
||||
}
|
||||
|
||||
export function ClipGrid({ projectId }: ClipGridProps) {
|
||||
const {
|
||||
data: clips,
|
||||
isLoading,
|
||||
isError,
|
||||
error
|
||||
} = useQuery<SourceClip[], Error>({
|
||||
queryKey: ['projects', projectId, 'source-clips'],
|
||||
queryFn: () => getSourceClips(projectId)
|
||||
});
|
||||
|
||||
if (isLoading) {
|
||||
return <p className="text-sm text-slate-500">Loading source clips...</p>;
|
||||
}
|
||||
|
||||
if (isError) {
|
||||
return <p className="text-sm text-red-600">{error.message}</p>;
|
||||
}
|
||||
|
||||
if (!clips || clips.length === 0) {
|
||||
return (
|
||||
<div className="rounded-lg border border-dashed border-slate-300 bg-white p-6 text-center text-sm text-slate-500">
|
||||
No source clips found yet.
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="grid gap-4 sm:grid-cols-2">
|
||||
{clips.map((clip) => (
|
||||
<div key={clip.id} className="rounded-lg border border-slate-200 bg-white p-4 shadow-sm">
|
||||
<h3 className="text-base font-medium text-slate-900">{clip.name}</h3>
|
||||
{clip.duration != null && (
|
||||
<p className="text-sm text-slate-500">Duration: {clip.duration.toFixed(1)}s</p>
|
||||
)}
|
||||
{clip.waveform_url && (
|
||||
<p className="truncate text-sm text-indigo-600">Waveform: {clip.waveform_url}</p>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
65
frontend/components/LyricsEditor.tsx
Normal file
65
frontend/components/LyricsEditor.tsx
Normal file
@ -0,0 +1,65 @@
|
||||
'use client';
|
||||
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { getLyrics, Lyrics, updateLyrics } from '../lib/api';
|
||||
import { useEffect, useState } from 'react';
|
||||
|
||||
interface LyricsEditorProps {
|
||||
projectId: string;
|
||||
}
|
||||
|
||||
export function LyricsEditor({ projectId }: LyricsEditorProps) {
|
||||
const queryClient = useQueryClient();
|
||||
const { data, isLoading, isError, error } = useQuery<Lyrics, Error>({
|
||||
queryKey: ['projects', projectId, 'lyrics'],
|
||||
queryFn: () => getLyrics(projectId)
|
||||
});
|
||||
|
||||
const [text, setText] = useState('');
|
||||
|
||||
useEffect(() => {
|
||||
if (data) {
|
||||
setText(data.raw_text ?? '');
|
||||
}
|
||||
}, [data]);
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: () => updateLyrics(projectId, { raw_text: text }),
|
||||
onSuccess: (updated) => {
|
||||
queryClient.setQueryData(['projects', projectId, 'lyrics'], updated);
|
||||
}
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="space-y-3 rounded-lg border border-slate-200 bg-white p-4 shadow-sm">
|
||||
<div>
|
||||
<h2 className="text-lg font-semibold text-slate-900">Lyrics</h2>
|
||||
<p className="text-sm text-slate-600">Review and edit the current lyrics.</p>
|
||||
</div>
|
||||
{isLoading && <p className="text-sm text-slate-500">Loading lyrics...</p>}
|
||||
{isError && <p className="text-sm text-red-600">{error.message}</p>}
|
||||
{!isLoading && !isError && (
|
||||
<>
|
||||
<textarea
|
||||
value={text}
|
||||
onChange={(event) => setText(event.target.value)}
|
||||
rows={8}
|
||||
className="w-full rounded-md border border-slate-300 px-3 py-2 text-sm focus:border-indigo-500 focus:outline-none focus:ring-1 focus:ring-indigo-500"
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => mutation.mutate()}
|
||||
disabled={mutation.isPending}
|
||||
className="rounded-md bg-indigo-600 px-4 py-2 text-sm font-medium text-white transition hover:bg-indigo-500 disabled:cursor-not-allowed disabled:opacity-60"
|
||||
>
|
||||
{mutation.isPending ? 'Saving...' : 'Save Lyrics'}
|
||||
</button>
|
||||
{mutation.isError && (
|
||||
<p className="text-sm text-red-600">{(mutation.error as Error).message}</p>
|
||||
)}
|
||||
{mutation.isSuccess && <p className="text-sm text-green-600">Lyrics saved.</p>}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
14
frontend/components/providers.tsx
Normal file
14
frontend/components/providers.tsx
Normal file
@ -0,0 +1,14 @@
|
||||
'use client';
|
||||
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import { ReactNode, useState } from 'react';
|
||||
|
||||
interface ProvidersProps {
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
export function Providers({ children }: ProvidersProps) {
|
||||
const [queryClient] = useState(() => new QueryClient());
|
||||
|
||||
return <QueryClientProvider client={queryClient}>{children}</QueryClientProvider>;
|
||||
}
|
||||
78
frontend/lib/api.ts
Normal file
78
frontend/lib/api.ts
Normal file
@ -0,0 +1,78 @@
|
||||
const API_BASE_URL = process.env.NEXT_PUBLIC_API_BASE_URL ?? 'http://localhost:8000/api';
|
||||
|
||||
export interface Project {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string | null;
|
||||
created_at?: string;
|
||||
}
|
||||
|
||||
export interface SourceClip {
|
||||
id: string;
|
||||
name: string;
|
||||
duration?: number;
|
||||
waveform_url?: string;
|
||||
}
|
||||
|
||||
export interface Lyrics {
|
||||
raw_text: string;
|
||||
updated_at?: string;
|
||||
}
|
||||
|
||||
async function handleResponse<T>(response: Response): Promise<T> {
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(errorText || response.statusText);
|
||||
}
|
||||
if (response.status === 204) {
|
||||
return undefined as T;
|
||||
}
|
||||
return (await response.json()) as T;
|
||||
}
|
||||
|
||||
export async function getProjects(): Promise<Project[]> {
|
||||
const response = await fetch(`${API_BASE_URL}/projects`, { cache: 'no-store' });
|
||||
return handleResponse<Project[]>(response);
|
||||
}
|
||||
|
||||
export async function getProject(projectId: string): Promise<Project> {
|
||||
const response = await fetch(`${API_BASE_URL}/projects/${projectId}`, { cache: 'no-store' });
|
||||
return handleResponse<Project>(response);
|
||||
}
|
||||
|
||||
export async function uploadProjectAudio(projectId: string, file: File): Promise<void> {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const response = await fetch(`${API_BASE_URL}/projects/${projectId}/audio`, {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
});
|
||||
|
||||
await handleResponse<void>(response);
|
||||
}
|
||||
|
||||
export async function getSourceClips(projectId: string): Promise<SourceClip[]> {
|
||||
const response = await fetch(`${API_BASE_URL}/projects/${projectId}/source-clips`, {
|
||||
cache: 'no-store'
|
||||
});
|
||||
return handleResponse<SourceClip[]>(response);
|
||||
}
|
||||
|
||||
export async function getLyrics(projectId: string): Promise<Lyrics> {
|
||||
const response = await fetch(`${API_BASE_URL}/projects/${projectId}/lyrics`, {
|
||||
cache: 'no-store'
|
||||
});
|
||||
return handleResponse<Lyrics>(response);
|
||||
}
|
||||
|
||||
export async function updateLyrics(projectId: string, lyrics: Lyrics): Promise<Lyrics> {
|
||||
const response = await fetch(`${API_BASE_URL}/projects/${projectId}/lyrics`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(lyrics)
|
||||
});
|
||||
return handleResponse<Lyrics>(response);
|
||||
}
|
||||
5
frontend/next-env.d.ts
vendored
Normal file
5
frontend/next-env.d.ts
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/basic-features/typescript for more information.
|
||||
8
frontend/next.config.mjs
Normal file
8
frontend/next.config.mjs
Normal file
@ -0,0 +1,8 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
experimental: {
|
||||
typedRoutes: true
|
||||
}
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
28
frontend/package.json
Normal file
28
frontend/package.json
Normal file
@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "beatmatchr-frontend",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tanstack/react-query": "^5.25.4",
|
||||
"next": "14.1.0",
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "20.11.30",
|
||||
"@types/react": "18.2.64",
|
||||
"@types/react-dom": "18.2.19",
|
||||
"autoprefixer": "10.4.17",
|
||||
"eslint": "8.57.0",
|
||||
"eslint-config-next": "14.1.0",
|
||||
"postcss": "8.4.35",
|
||||
"tailwindcss": "3.4.1",
|
||||
"typescript": "5.3.3"
|
||||
}
|
||||
}
|
||||
6
frontend/postcss.config.mjs
Normal file
6
frontend/postcss.config.mjs
Normal file
@ -0,0 +1,6 @@
|
||||
export default {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {}
|
||||
}
|
||||
};
|
||||
14
frontend/tailwind.config.ts
Normal file
14
frontend/tailwind.config.ts
Normal file
@ -0,0 +1,14 @@
|
||||
import type { Config } from 'tailwindcss';
|
||||
|
||||
const config: Config = {
|
||||
content: [
|
||||
'./app/**/*.{js,ts,jsx,tsx}',
|
||||
'./components/**/*.{js,ts,jsx,tsx}'
|
||||
],
|
||||
theme: {
|
||||
extend: {}
|
||||
},
|
||||
plugins: []
|
||||
};
|
||||
|
||||
export default config;
|
||||
21
frontend/tsconfig.json
Normal file
21
frontend/tsconfig.json
Normal file
@ -0,0 +1,21 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"lib": ["DOM", "DOM.Iterable", "ES2020"],
|
||||
"allowJs": false,
|
||||
"skipLibCheck": true,
|
||||
"esModuleInterop": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"strict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Bundler",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "preserve",
|
||||
"incremental": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user