Skip to content

Commit

Permalink
feat(media-annotation): add local scrobble
Browse files Browse the repository at this point in the history
  • Loading branch information
vnghia committed Apr 10, 2024
1 parent 86c4e01 commit 2cf2390
Show file tree
Hide file tree
Showing 12 changed files with 289 additions and 2 deletions.
2 changes: 2 additions & 0 deletions migrations/2024-04-10-145416_create_playbacks/down.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
-- This file should undo anything in `up.sql`
drop table playbacks;
11 changes: 11 additions & 0 deletions migrations/2024-04-10-145416_create_playbacks/up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
-- Your SQL goes here
create table
playbacks (
user_id uuid not null,
song_id uuid not null,
count integer not null default 1 check (count > 0),
updated_at timestamptz not null default now(),
constraint playbacks_pkey primary key (user_id, song_id)
);

select add_updated_at('playbacks');
5 changes: 3 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ use axum::Router;
use nghe::config::Config;
use nghe::open_subsonic::browsing::refresh_music_folders;
use nghe::open_subsonic::{
bookmarks, browsing, extension, media_list, media_retrieval, permission, scan, searching,
system, user,
bookmarks, browsing, extension, media_annotation, media_list, media_retrieval, permission,
scan, searching, system, user,
};
use nghe::Database;
use nghe_types::constant::{SERVER_NAME, SERVER_VERSION};
Expand Down Expand Up @@ -73,6 +73,7 @@ fn app(database: Database, config: Config) -> Router {
.merge(bookmarks::router())
.merge(searching::router())
.merge(scan::router(config.artist_index, config.parsing, config.scan, config.art))
.merge(media_annotation::router())
.layer(
ServiceBuilder::new().layer(TraceLayer::new_for_http()).layer(CorsLayer::permissive()),
)
Expand Down
1 change: 1 addition & 0 deletions src/models/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ pub mod configs;
pub mod genres;
pub mod lyrics;
pub mod music_folders;
pub mod playbacks;
pub mod scans;
pub mod song_cover_arts;
pub mod songs;
Expand Down
15 changes: 15 additions & 0 deletions src/models/playbacks.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
use diesel::prelude::*;
pub use playbacks::*;
use time::OffsetDateTime;
use uuid::Uuid;

pub use crate::schema::playbacks;

#[derive(Insertable)]
#[diesel(table_name = playbacks)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct NewScrobble {
pub user_id: Uuid,
pub song_id: Uuid,
pub updated_at: Option<OffsetDateTime>,
}
10 changes: 10 additions & 0 deletions src/open_subsonic/media_annotation/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
mod scrobble;

use axum::routing::get;
use axum::Router;

pub fn router() -> Router<crate::Database> {
Router::new()
.route("/rest/scrobble", get(scrobble::scrobble_handler))
.route("/rest/scrobble.view", get(scrobble::scrobble_handler))
}
220 changes: 220 additions & 0 deletions src/open_subsonic/media_annotation/scrobble.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,220 @@
use std::ops::Add;

use anyhow::Result;
use axum::extract::State;
use diesel::upsert::excluded;
use diesel::ExpressionMethods;
use diesel_async::RunQueryDsl;
use itertools::Itertools;
use nghe_proc_macros::{add_axum_response, add_common_validate};
use time::OffsetDateTime;
use uuid::Uuid;

use crate::models::*;
use crate::{Database, DatabasePool, OSError};

add_common_validate!(ScrobbleParams);
add_axum_response!(ScrobbleBody);

async fn scrobble(pool: &DatabasePool, user_id: Uuid, params: &ScrobbleParams) -> Result<()> {
if params.submission {
if let Some(ref times) = params.times {
if params.ids.len() != times.len() {
anyhow::bail!(OSError::InvalidParameter(
"song ids and times must have the same size".into()
))
} else {
// convert milliseconds to nanoseconds
let updated_ats: Vec<_> = times
.iter()
.map(|t| OffsetDateTime::from_unix_timestamp_nanos(t * 1000000))
.try_collect()?;
diesel::insert_into(playbacks::table)
.values(
params
.ids
.iter()
.copied()
.zip(updated_ats)
.map(|(song_id, updated_at)| playbacks::NewScrobble {
user_id,
song_id,
updated_at: Some(updated_at),
})
.collect_vec(),
)
.on_conflict((playbacks::user_id, playbacks::song_id))
.do_update()
.set((
playbacks::count.eq(playbacks::count.add(1)),
playbacks::updated_at.eq(excluded(playbacks::updated_at)),
))
.execute(&mut pool.get().await?)
.await?;
}
} else {
diesel::insert_into(playbacks::table)
.values(
params
.ids
.iter()
.copied()
.map(|song_id| playbacks::NewScrobble {
user_id,
song_id,
updated_at: None,
})
.collect_vec(),
)
.on_conflict((playbacks::user_id, playbacks::song_id))
.do_update()
.set(playbacks::count.eq(playbacks::count.add(1)))
.execute(&mut pool.get().await?)
.await?;
}
}
Ok(())
}

pub async fn scrobble_handler(
State(database): State<Database>,
req: ScrobbleRequest,
) -> ScrobbleJsonResponse {
scrobble(&database.pool, req.user_id, &req.params).await?;
Ok(axum::Json(ScrobbleBody {}.into()))
}

#[cfg(test)]
mod tests {
use diesel::QueryDsl;
use fake::faker::time::en::*;
use fake::Fake;
use time::macros::datetime;

use super::*;
use crate::utils::test::Infra;

#[tokio::test]
async fn test_scrobble() {
let mut infra = Infra::new().await.n_folder(1).await.add_user(None).await;
infra.add_n_song(0, 1).scan(.., None).await;
let user_id = infra.user_id(0);
let song_id = infra.song_ids(..).await[0];

for _ in 0..50 {
scrobble(
infra.pool(),
user_id,
&ScrobbleParams { ids: vec![song_id], times: None, submission: true },
)
.await
.unwrap();
}
let play_count = playbacks::table
.filter(playbacks::user_id.eq(user_id))
.filter(playbacks::song_id.eq(song_id))
.select(playbacks::count)
.get_result::<i32>(&mut infra.pool().get().await.unwrap())
.await
.unwrap();
assert_eq!(50, play_count);
}

#[tokio::test]
async fn test_scrobble_multiples() {
let n_song = 10_usize;
let more_play_count = 10_usize;
let mut infra = Infra::new().await.n_folder(1).await.add_user(None).await;
infra.add_n_song(0, n_song).scan(.., None).await;
let user_id = infra.user_id(0);
let song_ids = infra.song_ids(..).await;
let mut play_counts = vec![0; n_song];

for (i, song_id) in song_ids.iter().copied().enumerate() {
let play_count = (5..10).fake();
for _ in 0..play_count {
scrobble(
infra.pool(),
user_id,
&ScrobbleParams { ids: vec![song_id], times: None, submission: true },
)
.await
.unwrap();
}
play_counts[i] = play_count + more_play_count;
}

for _ in 0..more_play_count {
scrobble(
infra.pool(),
user_id,
&ScrobbleParams { ids: song_ids.clone(), times: None, submission: true },
)
.await
.unwrap();
}

for (i, song_id) in song_ids.iter().copied().enumerate() {
let play_count = playbacks::table
.filter(playbacks::user_id.eq(user_id))
.filter(playbacks::song_id.eq(song_id))
.select(playbacks::count)
.get_result::<i32>(&mut infra.pool().get().await.unwrap())
.await
.unwrap();
assert_eq!(play_counts[i], play_count as usize);
}
}

#[tokio::test]
async fn test_scrobble_multiples_time() {
let n_song = 10_usize;
let mut infra = Infra::new().await.n_folder(1).await.add_user(None).await;
infra.add_n_song(0, n_song).scan(.., None).await;
let user_id = infra.user_id(0);
let song_ids = infra.song_ids(..).await;
let start_dt = datetime!(1000-01-01 0:00 UTC);
let end_dt = datetime!(2000-01-01 0:00 UTC);
let times = (0..n_song).map(|_| DateTimeBetween(start_dt, end_dt).fake()).collect_vec();

for song_id in song_ids.iter().copied() {
for _ in 0..10 {
scrobble(
infra.pool(),
user_id,
&ScrobbleParams { ids: vec![song_id], times: None, submission: true },
)
.await
.unwrap();
}
}
scrobble(
infra.pool(),
user_id,
&ScrobbleParams {
ids: song_ids.clone(),
times: Some(
times
.iter()
.map(|t: &OffsetDateTime| t.unix_timestamp_nanos() / 1000000)
.collect(),
),
submission: true,
},
)
.await
.unwrap();

for (i, song_id) in song_ids.iter().copied().enumerate() {
let (play_count, time) = playbacks::table
.filter(playbacks::user_id.eq(user_id))
.filter(playbacks::song_id.eq(song_id))
.select((playbacks::count, playbacks::updated_at))
.get_result::<(i32, OffsetDateTime)>(&mut infra.pool().get().await.unwrap())
.await
.unwrap();
assert_eq!(11, play_count);
assert_eq!(times[i], time);
}
}
}
1 change: 1 addition & 0 deletions src/open_subsonic/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ pub mod bookmarks;
pub mod browsing;
mod common;
pub mod extension;
pub mod media_annotation;
pub mod media_list;
pub mod media_retrieval;
pub mod permission;
Expand Down
10 changes: 10 additions & 0 deletions src/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,15 @@ diesel::table! {
}
}

diesel::table! {
playbacks (user_id, song_id) {
user_id -> Uuid,
song_id -> Uuid,
count -> Int4,
updated_at -> Timestamptz,
}
}

diesel::table! {
scans (started_at) {
started_at -> Timestamptz,
Expand Down Expand Up @@ -196,6 +205,7 @@ diesel::allow_tables_to_appear_in_same_query!(
genres,
lyrics,
music_folders,
playbacks,
scans,
song_cover_arts,
songs,
Expand Down
1 change: 1 addition & 0 deletions types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ pub mod bookmarks;
pub mod browsing;
pub mod common;
pub mod extension;
pub mod media_annotation;
pub mod media_list;
pub mod media_retrieval;
pub mod scan;
Expand Down
1 change: 1 addition & 0 deletions types/src/media_annotation/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pub mod scrobble;
14 changes: 14 additions & 0 deletions types/src/media_annotation/scrobble.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
use nghe_proc_macros::{add_common_convert, add_subsonic_response};
use uuid::Uuid;

#[add_common_convert]
pub struct ScrobbleParams {
#[serde(rename = "id")]
pub ids: Vec<Uuid>,
#[serde(rename = "time")]
pub times: Option<Vec<i128>>,
pub submission: bool,
}

#[add_subsonic_response]
pub struct ScrobbleBody {}

0 comments on commit 2cf2390

Please sign in to comment.