From 32a4949f25571c8af1bb0b39bc837da43f0e93d6 Mon Sep 17 00:00:00 2001 From: Bat Date: Mon, 3 Sep 2018 19:53:20 +0100 Subject: [PATCH] Update user information if needed When a remote is displayed, if it has not been updated since at least 24 hours, newer informations are fetched. Fixes #135 --- .../down.sql | 2 ++ .../up.sql | 2 ++ plume-models/src/schema.rs | 1 + plume-models/src/users.rs | 35 +++++++++++++++++-- src/main.rs | 3 ++ src/routes/user.rs | 16 ++++++--- 6 files changed, 52 insertions(+), 7 deletions(-) create mode 100644 migrations/2018-09-03-170848_user_add_last_fetched_date/down.sql create mode 100644 migrations/2018-09-03-170848_user_add_last_fetched_date/up.sql diff --git a/migrations/2018-09-03-170848_user_add_last_fetched_date/down.sql b/migrations/2018-09-03-170848_user_add_last_fetched_date/down.sql new file mode 100644 index 0000000..2b3eb2d --- /dev/null +++ b/migrations/2018-09-03-170848_user_add_last_fetched_date/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +ALTER TABLE users DROP COLUMN last_fetched_date; diff --git a/migrations/2018-09-03-170848_user_add_last_fetched_date/up.sql b/migrations/2018-09-03-170848_user_add_last_fetched_date/up.sql new file mode 100644 index 0000000..9f823f0 --- /dev/null +++ b/migrations/2018-09-03-170848_user_add_last_fetched_date/up.sql @@ -0,0 +1,2 @@ +-- Your SQL goes here +ALTER TABLE users ADD COLUMN last_fetched_date TIMESTAMP NOT NULL DEFAULT '2000-01-01 00:00:01'; diff --git a/plume-models/src/schema.rs b/plume-models/src/schema.rs index eacc19c..6a625bb 100644 --- a/plume-models/src/schema.rs +++ b/plume-models/src/schema.rs @@ -156,6 +156,7 @@ table! { shared_inbox_url -> Nullable, followers_endpoint -> Varchar, avatar_id -> Nullable, + last_fetched_date -> Timestamp, } } diff --git a/plume-models/src/users.rs b/plume-models/src/users.rs index 9144812..418a988 100644 --- a/plume-models/src/users.rs +++ b/plume-models/src/users.rs @@ -5,7 +5,7 @@ use activitypub::{ object::Image, }; use bcrypt; -use chrono::NaiveDateTime; +use chrono::{NaiveDateTime, Utc}; use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods, BelongingToDsl, PgConnection, dsl::any}; use openssl::{ hash::MessageDigest, @@ -68,6 +68,7 @@ pub struct User { pub shared_inbox_url: Option, pub followers_endpoint: String, pub avatar_id: Option, + pub last_fetched_date: NaiveDateTime } #[derive(Insertable)] @@ -158,7 +159,7 @@ impl User { } } - pub fn fetch_from_url(conn: &PgConnection, url: String) -> Option { + fn fetch(url: String) -> Option { let req = Client::new() .get(&url[..]) .header(Accept(ap_accept_header().into_iter().map(|h| qitem(h.parse::().expect("Invalid Content-Type"))).collect())) @@ -169,7 +170,7 @@ impl User { if let Ok(ap_sign) = serde_json::from_str::(text) { if let Ok(mut json) = serde_json::from_str::(text) { json.custom_props = ap_sign; // without this workaround, publicKey is not correctly deserialized - Some(User::from_activity(conn, json, Url::parse(url.as_ref()).unwrap().host_str().unwrap().to_string())) + Some(json) } else { None } } else { None } } else { None } @@ -181,6 +182,10 @@ impl User { } } + pub fn fetch_from_url(conn: &PgConnection, url: String) -> Option { + User::fetch(url.clone()).map(|json| (User::from_activity(conn, json, Url::parse(url.as_ref()).unwrap().host_str().unwrap().to_string()))) + } + fn from_activity(conn: &PgConnection, acct: CustomPerson, inst: String) -> User { let instance = match Instance::find_by_domain(conn, inst.clone()) { Some(instance) => instance, @@ -227,6 +232,26 @@ impl User { user } + pub fn refetch(&self, conn: &PgConnection) { + User::fetch(self.ap_url.clone()).map(|json| { + let avatar = Media::save_remote(conn, json.object.object_props.icon_image().expect("User::refetch: icon error") + .object_props.url_string().expect("User::refetch: icon.url error")); + + diesel::update(self) + .set(( + users::username.eq(json.object.ap_actor_props.preferred_username_string().expect("User::refetch: preferredUsername error")), + users::display_name.eq(json.object.object_props.name_string().expect("User::refetch: name error")), + users::outbox_url.eq(json.object.ap_actor_props.outbox_string().expect("User::refetch: outbox error")), + users::inbox_url.eq(json.object.ap_actor_props.inbox_string().expect("User::refetch: inbox error")), + users::summary.eq(SafeString::new(&json.object.object_props.summary_string().unwrap_or(String::new()))), + users::followers_endpoint.eq(json.object.ap_actor_props.followers_string().expect("User::refetch: followers error")), + users::avatar_id.eq(Some(avatar.id)), + users::last_fetched_date.eq(Utc::now().naive_utc()) + )).execute(conn) + .expect("Couldn't update user") + }); + } + pub fn hash_pass(pass: String) -> String { bcrypt::hash(pass.as_str(), 10).unwrap() } @@ -504,6 +529,10 @@ impl User { .execute(conn) .expect("Couldn't update user avatar"); } + + pub fn needs_update(&self) -> bool { + (Utc::now().naive_utc() - self.last_fetched_date).num_days() > 1 + } } impl<'a, 'r> FromRequest<'a, 'r> for User { diff --git a/src/main.rs b/src/main.rs index 07699b6..f0f9fce 100644 --- a/src/main.rs +++ b/src/main.rs @@ -29,6 +29,7 @@ extern crate validator_derive; extern crate webfinger; extern crate workerpool; +use rocket::State; use rocket_contrib::Template; use rocket_csrf::CsrfFairingBuilder; use workerpool::{Pool, thunk::ThunkWorker}; @@ -37,6 +38,8 @@ mod inbox; mod setup; mod routes; +type Worker<'a> = State<'a, Pool>>; + fn main() { let pool = setup::check(); rocket::ignite() diff --git a/src/routes/user.rs b/src/routes/user.rs index f4982ff..ae3eb6d 100644 --- a/src/routes/user.rs +++ b/src/routes/user.rs @@ -5,7 +5,6 @@ use activitypub::{ }; use atom_syndication::{Entry, FeedBuilder}; use rocket::{ - State, request::LenientForm, response::{Redirect, Flash, Content}, http::ContentType @@ -13,7 +12,7 @@ use rocket::{ use rocket_contrib::Template; use serde_json; use validator::{Validate, ValidationError}; -use workerpool::{Pool, thunk::*}; +use workerpool::thunk::*; use plume_common::activity_pub::{ ActivityStream, broadcast, Id, IntoId, ApRequest, @@ -31,6 +30,7 @@ use plume_models::{ }; use inbox::Inbox; use routes::Page; +use Worker; #[get("/me")] fn me(user: Option) -> Result> { @@ -41,7 +41,7 @@ fn me(user: Option) -> Result> { } #[get("/@/", rank = 2)] -fn details<'r>(name: String, conn: DbConn, account: Option, worker: State>>, fecth_articles_conn: DbConn, fecth_followers_conn: DbConn) -> Template { +fn details(name: String, conn: DbConn, account: Option, worker: Worker, fecth_articles_conn: DbConn, fecth_followers_conn: DbConn, update_conn: DbConn) -> Template { may_fail!(account.map(|a| a.to_json(&*conn)), User::find_by_fqn(&*conn, name), "Couldn't find requested user", |user| { let recents = Post::get_recents_for_author(&*conn, &user, 6); let reshares = Reshare::get_recents_for_author(&*conn, &user, 6); @@ -75,6 +75,14 @@ fn details<'r>(name: String, conn: DbConn, account: Option, worker: State< }); } })); + + // Update profile information if needed + let user_clone = user.clone(); + if user.needs_update() { + worker.execute(Thunk::of(move || { + user_clone.refetch(&*update_conn); + })) + } } Template::render("users/details", json!({ @@ -106,7 +114,7 @@ fn dashboard_auth() -> Flash { } #[get("/@//follow")] -fn follow(name: String, conn: DbConn, user: User, worker: State>>) -> Redirect { +fn follow(name: String, conn: DbConn, user: User, worker: Worker) -> Redirect { let target = User::find_by_fqn(&*conn, name.clone()).unwrap(); let f = follows::Follow::insert(&*conn, follows::NewFollow { follower_id: user.id,