Add support for generic timeline (#525)

* Begin adding support for timeline

* fix some bugs with parser

* fmt

* add error reporting for parser

* add tests for timeline query parser

* add rejection tests for parse

* begin adding support for lists

also run migration before compiling, so schema.rs is up to date

* add sqlite migration

* end adding lists

still miss tests and query integration

* cargo fmt

* try to add some tests

* Add some constraint to db, and fix list test

and refactor other tests to use begin_transaction

* add more tests for lists

* add support for lists in query executor

* add keywords for including/excluding boosts and likes

* cargo fmt

* add function to list lists used by query

will make it easier to warn users when creating timeline with unknown lists

* add lang support

* add timeline creation error message when using unexisting lists

* Update .po files

* WIP: interface for timelines

* don't use diesel for migrations

not sure how it passed the ci on the other branch

* add some tests for timeline

add an int representing the order of timelines (first one will be on
top, second just under...)
use first() instead of limit(1).get().into_iter().nth(0)
remove migrations from build artifacts as they are now compiled in

* cargo fmt

* remove timeline order

* fix tests

* add tests for timeline creation failure

* cargo fmt

* add tests for timelines

* add test for matching direct lists and keywords

* add test for language filtering

* Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or

* Make the main crate compile + FMT

* Use the new timeline system

- Replace the old "feed" system with timelines
- Display all timelines someone can access on their home page (either their personal ones, or instance timelines)
- Remove functions that were used to get user/local/federated feed
- Add new posts to timelines
- Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines

@fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes.

I hope I didn't forgot anything…

* Cargo fmt

* Try to fix the migration

* Fix tests

* Fix the test (for real this time ?)

* Fix the tests ? + fmt

* Use Kind::Like and Kind::Reshare when needed

* Forgot to run cargo fmt once again

* revert translations

* fix reviewed stuff

* reduce code duplication by macros

* cargo fmt
This commit is contained in:
fdb-hiroshima 2019-10-07 19:08:20 +02:00 committed by Ana Gelez
parent a0e3fe8c94
commit 006b44f580
42 changed files with 2691 additions and 359 deletions

View File

@ -317,6 +317,10 @@ p.error {
flex: 1; flex: 1;
margin: 0 1em; margin: 0 1em;
} }
.grow:first-child {
margin: 1em 0;
}
} }
.left-icon { .left-icon {

View File

@ -1,6 +1,6 @@
extern crate rsass; extern crate rsass;
extern crate ructe; extern crate ructe;
use ructe::*; use ructe::Ructe;
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
use std::{env, ffi::OsStr, fs::*, io::Write, path::*}; use std::{env, ffi::OsStr, fs::*, io::Write, path::*};
@ -37,9 +37,10 @@ fn compute_static_hash() -> String {
} }
fn main() { fn main() {
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); Ructe::from_env()
let in_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()).join("templates"); .expect("This must be run with cargo")
compile_templates(&in_dir, &out_dir).expect("compile templates"); .compile_templates("templates")
.expect("compile templates");
compile_themes().expect("Theme compilation error"); compile_themes().expect("Theme compilation error");
recursive_copy(&Path::new("assets").join("icons"), &Path::new("static")) recursive_copy(&Path::new("assets").join("icons"), &Path::new("static"))

View File

@ -0,0 +1,6 @@
-- This file should undo anything in `up.sql`
DROP TABLE timeline;
DROP TABLE timeline_definition;
DROP TABLE list_elems;
DROP TABLE lists;

View File

@ -0,0 +1,31 @@
-- Your SQL goes here
CREATE TABLE timeline_definition(
id SERIAL PRIMARY KEY,
user_id integer REFERENCES users ON DELETE CASCADE,
name VARCHAR NOT NULL,
query VARCHAR NOT NULL,
CONSTRAINT timeline_unique_user_name UNIQUE(user_id, name)
);
CREATE TABLE timeline(
id SERIAL PRIMARY KEY,
post_id integer NOT NULL REFERENCES posts ON DELETE CASCADE,
timeline_id integer NOT NULL REFERENCES timeline_definition ON DELETE CASCADE
);
CREATE TABLE lists(
id SERIAL PRIMARY KEY,
name VARCHAR NOT NULL,
user_id integer REFERENCES users ON DELETE CASCADE,
type integer NOT NULL,
CONSTRAINT list_unique_user_name UNIQUE(user_id, name)
);
CREATE TABLE list_elems(
id SERIAL PRIMARY KEY,
list_id integer NOT NULL REFERENCES lists ON DELETE CASCADE,
user_id integer REFERENCES users ON DELETE CASCADE,
blog_id integer REFERENCES blogs ON DELETE CASCADE,
word VARCHAR
);

View File

@ -0,0 +1,4 @@
-- This file should undo anything in `up.sql`
DELETE FROM timeline_definition WHERE name = 'Your feed';
DELETE FROM timeline_definition WHERE name = 'Local feed' AND query = 'local';
DELETE FROM timeline_definition WHERE name = 'Federared feed' AND query = 'all';

View File

@ -0,0 +1,17 @@
-- Your SQL goes here
--#!|conn: &Connection, path: &Path| {
--#! super::timeline::Timeline::new_for_instance(conn, "Local feed".into(), "local".into()).expect("Local feed creation error");
--#! super::timeline::Timeline::new_for_instance(conn, "Federated feed".into(), "all".into()).expect("Federated feed creation error");
--#!
--#! for i in 0.. {
--#! if let Some(users) = super::users::User::get_local_page(conn, (i * 20, (i + 1) * 20)).ok().filter(|l| !l.is_empty()) {
--#! for u in users {
--#! super::timeline::Timeline::new_for_user(conn, u.id, "Your feed".into(), format!("followed or author in [ {} ]", u.fqn)).expect("User feed creation error");
--#! }
--#! } else {
--#! break;
--#! }
--#! }
--#!
--#! Ok(())
--#!}

View File

@ -0,0 +1,6 @@
-- This file should undo anything in `up.sql`
DROP TABLE timeline;
DROP TABLE timeline_definition;
DROP TABLE list_elems;
DROP TABLE lists;

View File

@ -0,0 +1,31 @@
-- Your SQL goes here
CREATE TABLE timeline_definition(
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
user_id INTEGER REFERENCES users(id) ON DELETE CASCADE,
name VARCHAR NOT NULL,
query VARCHAR NOT NULL,
CONSTRAINT timeline_unique_user_name UNIQUE(user_id, name)
);
CREATE TABLE timeline(
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
post_id integer NOT NULL REFERENCES posts(id) ON DELETE CASCADE,
timeline_id integer NOT NULL REFERENCES timeline_definition(id) ON DELETE CASCADE
);
CREATE TABLE lists(
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
name VARCHAR NOT NULL,
user_id integer REFERENCES users(id) ON DELETE CASCADE,
type integer NOT NULL,
CONSTRAINT timeline_unique_user_name UNIQUE(user_id, name)
);
CREATE TABLE list_elems(
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
list_id integer NOT NULL REFERENCES lists(id) ON DELETE CASCADE,
user_id integer REFERENCES users(id) ON DELETE CASCADE,
blog_id integer REFERENCES blogs(id) ON DELETE CASCADE,
word VARCHAR
);

View File

@ -0,0 +1,4 @@
-- This file should undo anything in `up.sql`
DELETE FROM timeline_definition WHERE name = 'Your feed';
DELETE FROM timeline_definition WHERE name = 'Local feed' AND query = 'local';
DELETE FROM timeline_definition WHERE name = 'Federared feed' AND query = 'all';

View File

@ -0,0 +1,17 @@
-- Your SQL goes here
--#!|conn: &Connection, path: &Path| {
--#! super::timeline::Timeline::new_for_instance(conn, "Local feed".into(), "local".into()).expect("Local feed creation error");
--#! super::timeline::Timeline::new_for_instance(conn, "Federated feed".into(), "all".into()).expect("Federated feed creation error");
--#!
--#! for i in 0.. {
--#! if let Some(users) = super::users::User::get_local_page(conn, (i * 20, (i + 1) * 20)).ok().filter(|l| !l.is_empty()) {
--#! for u in users {
--#! super::timeline::Timeline::new_for_user(conn, u.id, "Your feed".into(), format!("followed or author in [ {} ]", u.fqn)).expect("User feed creation error");
--#! }
--#! } else {
--#! break;
--#! }
--#! }
--#!
--#! Ok(())
--#!}

View File

@ -1,6 +1,6 @@
use activitypub::{actor::Group, collection::OrderedCollection, object::Image, CustomObject}; use activitypub::{actor::Group, collection::OrderedCollection, object::Image, CustomObject};
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use diesel::{self, ExpressionMethods, QueryDsl, RunQueryDsl, SaveChangesDsl}; use diesel::{self, ExpressionMethods, OptionalExtension, QueryDsl, RunQueryDsl, SaveChangesDsl};
use openssl::{ use openssl::{
hash::MessageDigest, hash::MessageDigest,
pkey::{PKey, Private}, pkey::{PKey, Private},
@ -135,10 +135,8 @@ impl Blog {
pub fn find_by_fqn(c: &PlumeRocket, fqn: &str) -> Result<Blog> { pub fn find_by_fqn(c: &PlumeRocket, fqn: &str) -> Result<Blog> {
let from_db = blogs::table let from_db = blogs::table
.filter(blogs::fqn.eq(fqn)) .filter(blogs::fqn.eq(fqn))
.limit(1) .first(&*c.conn)
.load::<Blog>(&*c.conn)? .optional()?;
.into_iter()
.next();
if let Some(from_db) = from_db { if let Some(from_db) = from_db {
Ok(from_db) Ok(from_db)
} else { } else {
@ -572,9 +570,8 @@ pub(crate) mod tests {
Instance::get_local().unwrap().id Instance::get_local().unwrap().id
); );
// TODO add tests for remote instance // TODO add tests for remote instance
Ok(()) Ok(())
}); })
} }
#[test] #[test]
@ -674,9 +671,8 @@ pub(crate) mod tests {
.unwrap() .unwrap()
.iter() .iter()
.any(|b| b.id == blog[1].id)); .any(|b| b.id == blog[1].id));
Ok(()) Ok(())
}); })
} }
#[test] #[test]
@ -699,9 +695,8 @@ pub(crate) mod tests {
.unwrap(); .unwrap();
assert_eq!(Blog::find_by_fqn(&r, "SomeName").unwrap().id, blog.id); assert_eq!(Blog::find_by_fqn(&r, "SomeName").unwrap().id, blog.id);
Ok(()) Ok(())
}); })
} }
#[test] #[test]
@ -723,9 +718,8 @@ pub(crate) mod tests {
.unwrap(); .unwrap();
assert_eq!(blog.fqn, "SomeName"); assert_eq!(blog.fqn, "SomeName");
Ok(()) Ok(())
}); })
} }
#[test] #[test]
@ -736,9 +730,8 @@ pub(crate) mod tests {
blogs[0].delete(conn, &get_searcher()).unwrap(); blogs[0].delete(conn, &get_searcher()).unwrap();
assert!(Blog::get(conn, blogs[0].id).is_err()); assert!(Blog::get(conn, blogs[0].id).is_err());
Ok(()) Ok(())
}); })
} }
#[test] #[test]
@ -807,9 +800,8 @@ pub(crate) mod tests {
assert!(Blog::get(conn, blog[1].id).is_err()); assert!(Blog::get(conn, blog[1].id).is_err());
user[1].delete(conn, &searcher).unwrap(); user[1].delete(conn, &searcher).unwrap();
assert!(Blog::get(conn, blog[0].id).is_err()); assert!(Blog::get(conn, blog[0].id).is_err());
Ok(()) Ok(())
}); })
} }
#[test] #[test]
@ -870,6 +862,6 @@ pub(crate) mod tests {
assert_eq!(blog.banner_url(conn), blogs[0].banner_url(conn)); assert_eq!(blog.banner_url(conn), blogs[0].banner_url(conn));
Ok(()) Ok(())
}); })
} }
} }

View File

@ -436,8 +436,7 @@ mod tests {
} }
_ => panic!("Unexpected result"), _ => panic!("Unexpected result"),
}; };
Ok(()) Ok(())
}); })
} }
} }

View File

@ -59,3 +59,19 @@ impl CustomizeConnection<Connection, ConnError> for PragmaForeignKey {
}) })
} }
} }
#[cfg(test)]
pub(crate) mod tests {
use super::*;
use diesel::Connection as _;
#[derive(Debug)]
pub struct TestConnectionCustomizer;
impl CustomizeConnection<Connection, ConnError> for TestConnectionCustomizer {
fn on_acquire(&self, conn: &mut Connection) -> Result<(), ConnError> {
PragmaForeignKey.on_acquire(conn)?;
Ok(conn.begin_test_transaction().unwrap())
}
}
}

View File

@ -235,7 +235,8 @@ mod tests {
) )
.expect("Couldn't insert new follow"); .expect("Couldn't insert new follow");
assert_eq!(follow.ap_url, String::from("https://some.url/")); assert_eq!(follow.ap_url, String::from("https://some.url/"));
Ok(()) Ok(())
}); })
} }
} }

View File

@ -135,7 +135,6 @@ pub(crate) mod tests {
} }
_ => panic!("Unexpected result"), _ => panic!("Unexpected result"),
}; };
Ok(()) Ok(())
}); });
} }
@ -170,7 +169,6 @@ pub(crate) mod tests {
} }
_ => panic!("Unexpected result"), _ => panic!("Unexpected result"),
}; };
Ok(()) Ok(())
}); });
} }
@ -212,7 +210,6 @@ pub(crate) mod tests {
} }
_ => panic!("Unexpected result"), _ => panic!("Unexpected result"),
}; };
Ok(()) Ok(())
}); });
} }
@ -255,9 +252,8 @@ pub(crate) mod tests {
"type": "Delete", "type": "Delete",
}); });
assert!(super::inbox(&r, ok_act).is_ok()); assert!(super::inbox(&r, ok_act).is_ok());
Ok(()) Ok(())
}); })
} }
#[test] #[test]
@ -282,7 +278,6 @@ pub(crate) mod tests {
"type": "Delete", "type": "Delete",
}); });
assert!(super::inbox(&r, ok_act).is_ok()); assert!(super::inbox(&r, ok_act).is_ok());
Ok(()) Ok(())
}); });
} }
@ -336,7 +331,6 @@ pub(crate) mod tests {
} }
_ => panic!("Unexpected result"), _ => panic!("Unexpected result"),
} }
Ok(()) Ok(())
}); });
} }
@ -362,7 +356,6 @@ pub(crate) mod tests {
} }
_ => panic!("Unexpected result"), _ => panic!("Unexpected result"),
} }
Ok(()) Ok(())
}); });
} }
@ -401,7 +394,6 @@ pub(crate) mod tests {
"type": "Undo", "type": "Undo",
}); });
assert!(super::inbox(&r, ok_act).is_ok()); assert!(super::inbox(&r, ok_act).is_ok());
Ok(()) Ok(())
}); });
} }
@ -440,7 +432,6 @@ pub(crate) mod tests {
"type": "Undo", "type": "Undo",
}); });
assert!(super::inbox(&r, ok_act).is_ok()); assert!(super::inbox(&r, ok_act).is_ok());
Ok(()) Ok(())
}); });
} }
@ -479,7 +470,6 @@ pub(crate) mod tests {
"type": "Undo", "type": "Undo",
}); });
assert!(super::inbox(&r, ok_act).is_ok()); assert!(super::inbox(&r, ok_act).is_ok());
Ok(()) Ok(())
}); });
} }
@ -509,7 +499,6 @@ pub(crate) mod tests {
}); });
super::inbox(&r, act).unwrap(); super::inbox(&r, act).unwrap();
Ok(()) Ok(())
}); });
} }

View File

@ -1,6 +1,5 @@
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use diesel::{self, ExpressionMethods, QueryDsl, RunQueryDsl}; use diesel::{self, ExpressionMethods, QueryDsl, RunQueryDsl};
use std::iter::Iterator;
use std::sync::RwLock; use std::sync::RwLock;
use ap_url; use ap_url;
@ -61,11 +60,8 @@ impl Instance {
pub fn get_local_uncached(conn: &Connection) -> Result<Instance> { pub fn get_local_uncached(conn: &Connection) -> Result<Instance> {
instances::table instances::table
.filter(instances::local.eq(true)) .filter(instances::local.eq(true))
.limit(1) .first(conn)
.load::<Instance>(conn)? .map_err(Error::from)
.into_iter()
.nth(0)
.ok_or(Error::NotFound)
} }
pub fn cache_local(conn: &Connection) { pub fn cache_local(conn: &Connection) {
@ -127,8 +123,7 @@ impl Instance {
users::table users::table
.filter(users::instance_id.eq(self.id)) .filter(users::instance_id.eq(self.id))
.filter(users::role.eq(Role::Admin as i32)) .filter(users::role.eq(Role::Admin as i32))
.limit(1) .first(conn)
.get_result::<User>(conn)
.map_err(Error::from) .map_err(Error::from)
} }
@ -343,7 +338,6 @@ pub(crate) mod tests {
res.short_description_html.get(), res.short_description_html.get(),
&inserted.short_description_html &inserted.short_description_html
); );
Ok(()) Ok(())
}); });
} }
@ -404,7 +398,6 @@ pub(crate) mod tests {
assert!(last_domaine <= page[0].public_domain); assert!(last_domaine <= page[0].public_domain);
last_domaine = page[0].public_domain.clone(); last_domaine = page[0].public_domain.clone();
} }
Ok(()) Ok(())
}); });
} }
@ -467,7 +460,6 @@ pub(crate) mod tests {
.count(), .count(),
0 0
); );
Ok(()) Ok(())
}); });
} }
@ -504,7 +496,6 @@ pub(crate) mod tests {
SafeString::new("<p><a href=\"#link\">short</a></p>\n") SafeString::new("<p><a href=\"#link\">short</a></p>\n")
); );
assert_eq!(inst.default_license, "CC-BY-SAO".to_owned()); assert_eq!(inst.default_license, "CC-BY-SAO".to_owned());
Ok(()) Ok(())
}); });
} }

View File

@ -63,6 +63,7 @@ pub enum Error {
SerDe, SerDe,
Search(search::SearcherError), Search(search::SearcherError),
Signature, Signature,
TimelineQuery(timeline::query::QueryError),
Unauthorized, Unauthorized,
Url, Url,
Webfinger, Webfinger,
@ -138,6 +139,12 @@ impl From<search::SearcherError> for Error {
} }
} }
impl From<timeline::query::QueryError> for Error {
fn from(err: timeline::query::QueryError) -> Self {
Error::TimelineQuery(err)
}
}
impl From<std::io::Error> for Error { impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Self { fn from(err: std::io::Error) -> Self {
Error::Io(err) Error::Io(err)
@ -174,11 +181,8 @@ macro_rules! find_by {
pub fn $fn(conn: &crate::Connection, $($col: $type),+) -> Result<Self> { pub fn $fn(conn: &crate::Connection, $($col: $type),+) -> Result<Self> {
$table::table $table::table
$(.filter($table::$col.eq($col)))+ $(.filter($table::$col.eq($col)))+
.limit(1) .first(conn)
.load::<Self>(conn)? .map_err(Error::from)
.into_iter()
.next()
.ok_or(Error::NotFound)
} }
}; };
} }
@ -224,11 +228,8 @@ macro_rules! get {
pub fn get(conn: &crate::Connection, id: i32) -> Result<Self> { pub fn get(conn: &crate::Connection, id: i32) -> Result<Self> {
$table::table $table::table
.filter($table::id.eq(id)) .filter($table::id.eq(id))
.limit(1) .first(conn)
.load::<Self>(conn)? .map_err(Error::from)
.into_iter()
.next()
.ok_or(Error::NotFound)
} }
}; };
} }
@ -281,11 +282,8 @@ macro_rules! last {
pub fn last(conn: &crate::Connection) -> Result<Self> { pub fn last(conn: &crate::Connection) -> Result<Self> {
$table::table $table::table
.order_by($table::id.desc()) .order_by($table::id.desc())
.limit(1) .first(conn)
.load::<Self>(conn)? .map_err(Error::from)
.into_iter()
.next()
.ok_or(Error::NotFound)
} }
}; };
} }
@ -302,8 +300,6 @@ pub fn ap_url(url: &str) -> String {
mod tests { mod tests {
use db_conn; use db_conn;
use diesel::r2d2::ConnectionManager; use diesel::r2d2::ConnectionManager;
#[cfg(feature = "sqlite")]
use diesel::{dsl::sql_query, RunQueryDsl};
use migrations::IMPORTED_MIGRATIONS; use migrations::IMPORTED_MIGRATIONS;
use plume_common::utils::random_hex; use plume_common::utils::random_hex;
use scheduled_thread_pool::ScheduledThreadPool; use scheduled_thread_pool::ScheduledThreadPool;
@ -331,7 +327,7 @@ mod tests {
lazy_static! { lazy_static! {
static ref DB_POOL: db_conn::DbPool = { static ref DB_POOL: db_conn::DbPool = {
let pool = db_conn::DbPool::builder() let pool = db_conn::DbPool::builder()
.connection_customizer(Box::new(db_conn::PragmaForeignKey)) .connection_customizer(Box::new(db_conn::tests::TestConnectionCustomizer))
.build(ConnectionManager::<Conn>::new(CONFIG.database_url.as_str())) .build(ConnectionManager::<Conn>::new(CONFIG.database_url.as_str()))
.unwrap(); .unwrap();
let dir = temp_dir().join(format!("plume-test-{}", random_hex())); let dir = temp_dir().join(format!("plume-test-{}", random_hex()));
@ -365,6 +361,7 @@ pub mod headers;
pub mod inbox; pub mod inbox;
pub mod instance; pub mod instance;
pub mod likes; pub mod likes;
pub mod lists;
pub mod medias; pub mod medias;
pub mod mentions; pub mod mentions;
pub mod migrations; pub mod migrations;
@ -379,5 +376,6 @@ pub mod safe_string;
pub mod schema; pub mod schema;
pub mod search; pub mod search;
pub mod tags; pub mod tags;
pub mod timeline;
pub mod users; pub mod users;
pub use plume_rocket::PlumeRocket; pub use plume_rocket::PlumeRocket;

View File

@ -9,6 +9,7 @@ use plume_common::activity_pub::{
}; };
use posts::Post; use posts::Post;
use schema::likes; use schema::likes;
use timeline::*;
use users::User; use users::User;
use {Connection, Error, PlumeRocket, Result}; use {Connection, Error, PlumeRocket, Result};
@ -99,6 +100,8 @@ impl AsObject<User, activity::Like, &PlumeRocket> for Post {
}, },
)?; )?;
res.notify(&c.conn)?; res.notify(&c.conn)?;
Timeline::add_to_all_timelines(c, &self, Kind::Like(&actor))?;
Ok(res) Ok(res)
} }
} }

555
plume-models/src/lists.rs Normal file
View File

@ -0,0 +1,555 @@
use diesel::{self, ExpressionMethods, QueryDsl, RunQueryDsl};
use blogs::Blog;
use schema::{blogs, list_elems, lists, users};
use std::convert::{TryFrom, TryInto};
use users::User;
use {Connection, Error, Result};
/// Represent what a list is supposed to store. Represented in database as an integer
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ListType {
User,
Blog,
Word,
Prefix,
}
impl TryFrom<i32> for ListType {
type Error = ();
fn try_from(i: i32) -> std::result::Result<Self, ()> {
match i {
0 => Ok(ListType::User),
1 => Ok(ListType::Blog),
2 => Ok(ListType::Word),
3 => Ok(ListType::Prefix),
_ => Err(()),
}
}
}
impl Into<i32> for ListType {
fn into(self) -> i32 {
match self {
ListType::User => 0,
ListType::Blog => 1,
ListType::Word => 2,
ListType::Prefix => 3,
}
}
}
#[derive(Clone, Queryable, Identifiable)]
pub struct List {
pub id: i32,
pub name: String,
pub user_id: Option<i32>,
type_: i32,
}
#[derive(Default, Insertable)]
#[table_name = "lists"]
struct NewList<'a> {
pub name: &'a str,
pub user_id: Option<i32>,
type_: i32,
}
macro_rules! func {
(@elem User $id:expr, $value:expr) => {
NewListElem {
list_id: $id,
user_id: Some(*$value),
blog_id: None,
word: None,
}
};
(@elem Blog $id:expr, $value:expr) => {
NewListElem {
list_id: $id,
user_id: None,
blog_id: Some(*$value),
word: None,
}
};
(@elem Word $id:expr, $value:expr) => {
NewListElem {
list_id: $id,
user_id: None,
blog_id: None,
word: Some($value),
}
};
(@elem Prefix $id:expr, $value:expr) => {
NewListElem {
list_id: $id,
user_id: None,
blog_id: None,
word: Some($value),
}
};
(@in_type User) => { i32 };
(@in_type Blog) => { i32 };
(@in_type Word) => { &str };
(@in_type Prefix) => { &str };
(@out_type User) => { User };
(@out_type Blog) => { Blog };
(@out_type Word) => { String };
(@out_type Prefix) => { String };
(add: $fn:ident, $kind:ident) => {
pub fn $fn(&self, conn: &Connection, vals: &[func!(@in_type $kind)]) -> Result<()> {
if self.kind() != ListType::$kind {
return Err(Error::InvalidValue);
}
diesel::insert_into(list_elems::table)
.values(
vals
.iter()
.map(|u| func!(@elem $kind self.id, u))
.collect::<Vec<_>>(),
)
.execute(conn)?;
Ok(())
}
};
(list: $fn:ident, $kind:ident, $table:ident) => {
pub fn $fn(&self, conn: &Connection) -> Result<Vec<func!(@out_type $kind)>> {
if self.kind() != ListType::$kind {
return Err(Error::InvalidValue);
}
list_elems::table
.filter(list_elems::list_id.eq(self.id))
.inner_join($table::table)
.select($table::all_columns)
.load(conn)
.map_err(Error::from)
}
};
(set: $fn:ident, $kind:ident, $add:ident) => {
pub fn $fn(&self, conn: &Connection, val: &[func!(@in_type $kind)]) -> Result<()> {
if self.kind() != ListType::$kind {
return Err(Error::InvalidValue);
}
self.clear(conn)?;
self.$add(conn, val)
}
}
}
#[derive(Clone, Queryable, Identifiable)]
struct ListElem {
pub id: i32,
pub list_id: i32,
pub user_id: Option<i32>,
pub blog_id: Option<i32>,
pub word: Option<String>,
}
#[derive(Default, Insertable)]
#[table_name = "list_elems"]
struct NewListElem<'a> {
pub list_id: i32,
pub user_id: Option<i32>,
pub blog_id: Option<i32>,
pub word: Option<&'a str>,
}
impl List {
last!(lists);
get!(lists);
fn insert(conn: &Connection, val: NewList) -> Result<Self> {
diesel::insert_into(lists::table)
.values(val)
.execute(conn)?;
List::last(conn)
}
pub fn list_for_user(conn: &Connection, user_id: Option<i32>) -> Result<Vec<Self>> {
if let Some(user_id) = user_id {
lists::table
.filter(lists::user_id.eq(user_id))
.load::<Self>(conn)
.map_err(Error::from)
} else {
lists::table
.filter(lists::user_id.is_null())
.load::<Self>(conn)
.map_err(Error::from)
}
}
pub fn find_for_user_by_name(
conn: &Connection,
user_id: Option<i32>,
name: &str,
) -> Result<Self> {
if let Some(user_id) = user_id {
lists::table
.filter(lists::user_id.eq(user_id))
.filter(lists::name.eq(name))
.first(conn)
.map_err(Error::from)
} else {
lists::table
.filter(lists::user_id.is_null())
.filter(lists::name.eq(name))
.first(conn)
.map_err(Error::from)
}
}
pub fn new(conn: &Connection, name: &str, user: Option<&User>, kind: ListType) -> Result<Self> {
Self::insert(
conn,
NewList {
name,
user_id: user.map(|u| u.id),
type_: kind.into(),
},
)
}
/// Returns the kind of a list
pub fn kind(&self) -> ListType {
self.type_.try_into().expect("invalid list was constructed")
}
/// Return Ok(true) if the list contain the given user, Ok(false) otherwiser,
/// and Err(_) on error
pub fn contains_user(&self, conn: &Connection, user: i32) -> Result<bool> {
private::ListElem::user_in_list(conn, self, user)
}
/// Return Ok(true) if the list contain the given blog, Ok(false) otherwiser,
/// and Err(_) on error
pub fn contains_blog(&self, conn: &Connection, blog: i32) -> Result<bool> {
private::ListElem::blog_in_list(conn, self, blog)
}
/// Return Ok(true) if the list contain the given word, Ok(false) otherwiser,
/// and Err(_) on error
pub fn contains_word(&self, conn: &Connection, word: &str) -> Result<bool> {
private::ListElem::word_in_list(conn, self, word)
}
/// Return Ok(true) if the list match the given prefix, Ok(false) otherwiser,
/// and Err(_) on error
pub fn contains_prefix(&self, conn: &Connection, word: &str) -> Result<bool> {
private::ListElem::prefix_in_list(conn, self, word)
}
/// Insert new users in a list
func! {add: add_users, User}
/// Insert new blogs in a list
func! {add: add_blogs, Blog}
/// Insert new words in a list
func! {add: add_words, Word}
/// Insert new prefixes in a list
func! {add: add_prefixes, Prefix}
/// Get all users in the list
func! {list: list_users, User, users}
/// Get all blogs in the list
func! {list: list_blogs, Blog, blogs}
/// Get all words in the list
pub fn list_words(&self, conn: &Connection) -> Result<Vec<String>> {
self.list_stringlike(conn, ListType::Word)
}
/// Get all prefixes in the list
pub fn list_prefixes(&self, conn: &Connection) -> Result<Vec<String>> {
self.list_stringlike(conn, ListType::Prefix)
}
#[inline(always)]
fn list_stringlike(&self, conn: &Connection, t: ListType) -> Result<Vec<String>> {
if self.kind() != t {
return Err(Error::InvalidValue);
}
list_elems::table
.filter(list_elems::list_id.eq(self.id))
.filter(list_elems::word.is_not_null())
.select(list_elems::word)
.load::<Option<String>>(conn)
.map_err(Error::from)
.map(|r| r.into_iter().filter_map(|o| o).collect::<Vec<String>>())
}
pub fn clear(&self, conn: &Connection) -> Result<()> {
diesel::delete(list_elems::table.filter(list_elems::list_id.eq(self.id)))
.execute(conn)
.map(|_| ())
.map_err(Error::from)
}
func! {set: set_users, User, add_users}
func! {set: set_blogs, Blog, add_blogs}
func! {set: set_words, Word, add_words}
func! {set: set_prefixes, Prefix, add_prefixes}
}
mod private {
pub use super::*;
use diesel::{
dsl,
sql_types::{Nullable, Text},
IntoSql, TextExpressionMethods,
};
impl ListElem {
insert!(list_elems, NewListElem);
pub fn user_in_list(conn: &Connection, list: &List, user: i32) -> Result<bool> {
dsl::select(dsl::exists(
list_elems::table
.filter(list_elems::list_id.eq(list.id))
.filter(list_elems::user_id.eq(Some(user))),
))
.get_result(conn)
.map_err(Error::from)
}
pub fn blog_in_list(conn: &Connection, list: &List, blog: i32) -> Result<bool> {
dsl::select(dsl::exists(
list_elems::table
.filter(list_elems::list_id.eq(list.id))
.filter(list_elems::blog_id.eq(Some(blog))),
))
.get_result(conn)
.map_err(Error::from)
}
pub fn word_in_list(conn: &Connection, list: &List, word: &str) -> Result<bool> {
dsl::select(dsl::exists(
list_elems::table
.filter(list_elems::list_id.eq(list.id))
.filter(list_elems::word.eq(word)),
))
.get_result(conn)
.map_err(Error::from)
}
pub fn prefix_in_list(conn: &Connection, list: &List, word: &str) -> Result<bool> {
dsl::select(dsl::exists(
list_elems::table
.filter(
word.into_sql::<Nullable<Text>>()
.like(list_elems::word.concat("%")),
)
.filter(list_elems::list_id.eq(list.id)),
))
.get_result(conn)
.map_err(Error::from)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use blogs::tests as blog_tests;
use diesel::Connection;
use tests::db;
#[test]
fn list_type() {
for i in 0..4 {
assert_eq!(i, Into::<i32>::into(ListType::try_from(i).unwrap()));
}
ListType::try_from(4).unwrap_err();
}
#[test]
fn list_lists() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
let (users, _) = blog_tests::fill_database(conn);
let l1 = List::new(conn, "list1", None, ListType::User).unwrap();
let l2 = List::new(conn, "list2", None, ListType::Blog).unwrap();
let l1u = List::new(conn, "list1", Some(&users[0]), ListType::Word).unwrap();
let l_eq = |l1: &List, l2: &List| {
assert_eq!(l1.id, l2.id);
assert_eq!(l1.user_id, l2.user_id);
assert_eq!(l1.name, l2.name);
assert_eq!(l1.type_, l2.type_);
};
let l1bis = List::get(conn, l1.id).unwrap();
l_eq(&l1, &l1bis);
let l_inst = List::list_for_user(conn, None).unwrap();
let l_user = List::list_for_user(conn, Some(users[0].id)).unwrap();
assert_eq!(2, l_inst.len());
assert_eq!(1, l_user.len());
assert!(l_inst.iter().all(|l| l.id != l1u.id));
l_eq(&l1u, &l_user[0]);
if l_inst[0].id == l1.id {
l_eq(&l1, &l_inst[0]);
l_eq(&l2, &l_inst[1]);
} else {
l_eq(&l1, &l_inst[1]);
l_eq(&l2, &l_inst[0]);
}
l_eq(
&l1,
&List::find_for_user_by_name(conn, l1.user_id, &l1.name).unwrap(),
);
l_eq(
&&l1u,
&List::find_for_user_by_name(conn, l1u.user_id, &l1u.name).unwrap(),
);
Ok(())
});
}
#[test]
fn test_user_list() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blog_tests::fill_database(conn);
let l = List::new(conn, "list", None, ListType::User).unwrap();
assert_eq!(l.kind(), ListType::User);
assert!(l.list_users(conn).unwrap().is_empty());
assert!(!l.contains_user(conn, users[0].id).unwrap());
assert!(l.add_users(conn, &[users[0].id]).is_ok());
assert!(l.contains_user(conn, users[0].id).unwrap());
assert!(l.add_users(conn, &[users[1].id]).is_ok());
assert!(l.contains_user(conn, users[0].id).unwrap());
assert!(l.contains_user(conn, users[1].id).unwrap());
assert_eq!(2, l.list_users(conn).unwrap().len());
assert!(l.set_users(conn, &[users[0].id]).is_ok());
assert!(l.contains_user(conn, users[0].id).unwrap());
assert!(!l.contains_user(conn, users[1].id).unwrap());
assert_eq!(1, l.list_users(conn).unwrap().len());
assert!(users[0] == l.list_users(conn).unwrap()[0]);
l.clear(conn).unwrap();
assert!(l.list_users(conn).unwrap().is_empty());
assert!(l.add_blogs(conn, &[blogs[0].id]).is_err());
Ok(())
});
}
#[test]
fn test_blog_list() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blog_tests::fill_database(conn);
let l = List::new(conn, "list", None, ListType::Blog).unwrap();
assert_eq!(l.kind(), ListType::Blog);
assert!(l.list_blogs(conn).unwrap().is_empty());
assert!(!l.contains_blog(conn, blogs[0].id).unwrap());
assert!(l.add_blogs(conn, &[blogs[0].id]).is_ok());
assert!(l.contains_blog(conn, blogs[0].id).unwrap());
assert!(l.add_blogs(conn, &[blogs[1].id]).is_ok());
assert!(l.contains_blog(conn, blogs[0].id).unwrap());
assert!(l.contains_blog(conn, blogs[1].id).unwrap());
assert_eq!(2, l.list_blogs(conn).unwrap().len());
assert!(l.set_blogs(conn, &[blogs[0].id]).is_ok());
assert!(l.contains_blog(conn, blogs[0].id).unwrap());
assert!(!l.contains_blog(conn, blogs[1].id).unwrap());
assert_eq!(1, l.list_blogs(conn).unwrap().len());
assert_eq!(blogs[0].id, l.list_blogs(conn).unwrap()[0].id);
l.clear(conn).unwrap();
assert!(l.list_blogs(conn).unwrap().is_empty());
assert!(l.add_users(conn, &[users[0].id]).is_err());
Ok(())
});
}
#[test]
fn test_word_list() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
let l = List::new(conn, "list", None, ListType::Word).unwrap();
assert_eq!(l.kind(), ListType::Word);
assert!(l.list_words(conn).unwrap().is_empty());
assert!(!l.contains_word(conn, "plume").unwrap());
assert!(l.add_words(conn, &["plume"]).is_ok());
assert!(l.contains_word(conn, "plume").unwrap());
assert!(!l.contains_word(conn, "plumelin").unwrap());
assert!(l.add_words(conn, &["amsterdam"]).is_ok());
assert!(l.contains_word(conn, "plume").unwrap());
assert!(l.contains_word(conn, "amsterdam").unwrap());
assert_eq!(2, l.list_words(conn).unwrap().len());
assert!(l.set_words(conn, &["plume"]).is_ok());
assert!(l.contains_word(conn, "plume").unwrap());
assert!(!l.contains_word(conn, "amsterdam").unwrap());
assert_eq!(1, l.list_words(conn).unwrap().len());
assert_eq!("plume", l.list_words(conn).unwrap()[0]);
l.clear(conn).unwrap();
assert!(l.list_words(conn).unwrap().is_empty());
assert!(l.add_prefixes(conn, &["something"]).is_err());
Ok(())
});
}
#[test]
fn test_prefix_list() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
let l = List::new(conn, "list", None, ListType::Prefix).unwrap();
assert_eq!(l.kind(), ListType::Prefix);
assert!(l.list_prefixes(conn).unwrap().is_empty());
assert!(!l.contains_prefix(conn, "plume").unwrap());
assert!(l.add_prefixes(conn, &["plume"]).is_ok());
assert!(l.contains_prefix(conn, "plume").unwrap());
assert!(l.contains_prefix(conn, "plumelin").unwrap());
assert!(l.add_prefixes(conn, &["amsterdam"]).is_ok());
assert!(l.contains_prefix(conn, "plume").unwrap());
assert!(l.contains_prefix(conn, "amsterdam").unwrap());
assert_eq!(2, l.list_prefixes(conn).unwrap().len());
assert!(l.set_prefixes(conn, &["plume"]).is_ok());
assert!(l.contains_prefix(conn, "plume").unwrap());
assert!(!l.contains_prefix(conn, "amsterdam").unwrap());
assert_eq!(1, l.list_prefixes(conn).unwrap().len());
assert_eq!("plume", l.list_prefixes(conn).unwrap()[0]);
l.clear(conn).unwrap();
assert!(l.list_prefixes(conn).unwrap().is_empty());
assert!(l.add_words(conn, &["something"]).is_err());
Ok(())
});
}
}

View File

@ -324,8 +324,6 @@ pub(crate) mod tests {
} }
} }
//set_owner
#[test] #[test]
fn delete() { fn delete() {
let conn = &db(); let conn = &db();
@ -354,13 +352,11 @@ pub(crate) mod tests {
assert!(!Path::new(&path).exists()); assert!(!Path::new(&path).exists());
clean(conn); clean(conn);
Ok(()) Ok(())
}); });
} }
#[test] #[test]
fn set_owner() { fn set_owner() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
@ -404,7 +400,6 @@ pub(crate) mod tests {
.any(|m| m.id == media.id)); .any(|m| m.id == media.id));
clean(conn); clean(conn);
Ok(()) Ok(())
}); });
} }

View File

@ -26,6 +26,7 @@ use safe_string::SafeString;
use schema::posts; use schema::posts;
use search::Searcher; use search::Searcher;
use tags::*; use tags::*;
use timeline::*;
use users::User; use users::User;
use {ap_url, Connection, Error, PlumeRocket, Result, CONFIG}; use {ap_url, Connection, Error, PlumeRocket, Result, CONFIG};
@ -182,15 +183,6 @@ impl Post {
query.get_results::<Post>(conn).map_err(Error::from) query.get_results::<Post>(conn).map_err(Error::from)
} }
pub fn get_recents(conn: &Connection, limit: i64) -> Result<Vec<Post>> {
posts::table
.order(posts::creation_date.desc())
.filter(posts::published.eq(true))
.limit(limit)
.load::<Post>(conn)
.map_err(Error::from)
}
pub fn get_recents_for_author( pub fn get_recents_for_author(
conn: &Connection, conn: &Connection,
author: &User, author: &User,
@ -246,60 +238,6 @@ impl Post {
.map_err(Error::from) .map_err(Error::from)
} }
/// Give a page of all the recent posts known to this instance (= federated timeline)
pub fn get_recents_page(conn: &Connection, (min, max): (i32, i32)) -> Result<Vec<Post>> {
posts::table
.order(posts::creation_date.desc())
.filter(posts::published.eq(true))
.offset(min.into())
.limit((max - min).into())
.load::<Post>(conn)
.map_err(Error::from)
}
/// Give a page of posts from a specific instance
pub fn get_instance_page(
conn: &Connection,
instance_id: i32,
(min, max): (i32, i32),
) -> Result<Vec<Post>> {
use schema::blogs;
let blog_ids = blogs::table
.filter(blogs::instance_id.eq(instance_id))
.select(blogs::id);
posts::table
.order(posts::creation_date.desc())
.filter(posts::published.eq(true))
.filter(posts::blog_id.eq_any(blog_ids))
.offset(min.into())
.limit((max - min).into())
.load::<Post>(conn)
.map_err(Error::from)
}
/// Give a page of customized user feed, based on a list of followed users
pub fn user_feed_page(
conn: &Connection,
followed: Vec<i32>,
(min, max): (i32, i32),
) -> Result<Vec<Post>> {
use schema::post_authors;
let post_ids = post_authors::table
.filter(post_authors::author_id.eq_any(followed))
.select(post_authors::post_id);
posts::table
.order(posts::creation_date.desc())
.filter(posts::published.eq(true))
.filter(posts::id.eq_any(post_ids))
.offset(min.into())
.limit((max - min).into())
.load::<Post>(conn)
.map_err(Error::from)
}
pub fn drafts_by_author(conn: &Connection, author: &User) -> Result<Vec<Post>> { pub fn drafts_by_author(conn: &Connection, author: &User) -> Result<Vec<Post>> {
use schema::post_authors; use schema::post_authors;
@ -335,11 +273,8 @@ impl Post {
use schema::blogs; use schema::blogs;
blogs::table blogs::table
.filter(blogs::id.eq(self.blog_id)) .filter(blogs::id.eq(self.blog_id))
.limit(1) .first(conn)
.load::<Blog>(conn)? .map_err(Error::from)
.into_iter()
.nth(0)
.ok_or(Error::NotFound)
} }
pub fn count_likes(&self, conn: &Connection) -> Result<i64> { pub fn count_likes(&self, conn: &Connection) -> Result<i64> {
@ -717,6 +652,9 @@ impl FromId<PlumeRocket> for Post {
.ok(); .ok();
} }
} }
Timeline::add_to_all_timelines(c, &post, Kind::Original)?;
Ok(post) Ok(post)
} }
} }
@ -928,7 +866,6 @@ mod tests {
} }
_ => panic!("Unexpected result"), _ => panic!("Unexpected result"),
}; };
Ok(()) Ok(())
}); });
} }

View File

@ -9,6 +9,7 @@ use plume_common::activity_pub::{
}; };
use posts::Post; use posts::Post;
use schema::reshares; use schema::reshares;
use timeline::*;
use users::User; use users::User;
use {Connection, Error, PlumeRocket, Result}; use {Connection, Error, PlumeRocket, Result};
@ -124,6 +125,8 @@ impl AsObject<User, Announce, &PlumeRocket> for Post {
}, },
)?; )?;
reshare.notify(conn)?; reshare.notify(conn)?;
Timeline::add_to_all_timelines(c, &self, Kind::Reshare(&actor))?;
Ok(reshare) Ok(reshare)
} }
} }

View File

@ -110,6 +110,26 @@ table! {
} }
} }
table! {
list_elems (id) {
id -> Int4,
list_id -> Int4,
user_id -> Nullable<Int4>,
blog_id -> Nullable<Int4>,
word -> Nullable<Varchar>,
}
}
table! {
lists (id) {
id -> Int4,
name -> Varchar,
user_id -> Nullable<Int4>,
#[sql_name = "type"]
type_ -> Int4,
}
}
table! { table! {
medias (id) { medias (id) {
id -> Int4, id -> Int4,
@ -195,6 +215,23 @@ table! {
} }
} }
table! {
timeline (id) {
id -> Int4,
post_id -> Int4,
timeline_id -> Int4,
}
}
table! {
timeline_definition (id) {
id -> Int4,
user_id -> Nullable<Int4>,
name -> Varchar,
query -> Varchar,
}
}
table! { table! {
users (id) { users (id) {
id -> Int4, id -> Int4,
@ -233,6 +270,10 @@ joinable!(comments -> posts (post_id));
joinable!(comments -> users (author_id)); joinable!(comments -> users (author_id));
joinable!(likes -> posts (post_id)); joinable!(likes -> posts (post_id));
joinable!(likes -> users (user_id)); joinable!(likes -> users (user_id));
joinable!(list_elems -> blogs (blog_id));
joinable!(list_elems -> lists (list_id));
joinable!(list_elems -> users (user_id));
joinable!(lists -> users (user_id));
joinable!(mentions -> comments (comment_id)); joinable!(mentions -> comments (comment_id));
joinable!(mentions -> posts (post_id)); joinable!(mentions -> posts (post_id));
joinable!(mentions -> users (mentioned_id)); joinable!(mentions -> users (mentioned_id));
@ -244,6 +285,9 @@ joinable!(posts -> medias (cover_id));
joinable!(reshares -> posts (post_id)); joinable!(reshares -> posts (post_id));
joinable!(reshares -> users (user_id)); joinable!(reshares -> users (user_id));
joinable!(tags -> posts (post_id)); joinable!(tags -> posts (post_id));
joinable!(timeline -> posts (post_id));
joinable!(timeline -> timeline_definition (timeline_id));
joinable!(timeline_definition -> users (user_id));
joinable!(users -> instances (instance_id)); joinable!(users -> instances (instance_id));
allow_tables_to_appear_in_same_query!( allow_tables_to_appear_in_same_query!(
@ -256,6 +300,8 @@ allow_tables_to_appear_in_same_query!(
follows, follows,
instances, instances,
likes, likes,
list_elems,
lists,
medias, medias,
mentions, mentions,
notifications, notifications,
@ -264,5 +310,7 @@ allow_tables_to_appear_in_same_query!(
posts, posts,
reshares, reshares,
tags, tags,
timeline,
timeline_definition,
users, users,
); );

View File

@ -19,7 +19,7 @@ pub(crate) mod tests {
use tests::db; use tests::db;
pub(crate) fn get_searcher() -> Searcher { pub(crate) fn get_searcher() -> Searcher {
let dir = temp_dir().join("plume-test"); let dir = temp_dir().join(&format!("plume-test-{}", random_hex()));
if dir.exists() { if dir.exists() {
Searcher::open(&dir) Searcher::open(&dir)
} else { } else {
@ -96,11 +96,10 @@ pub(crate) mod tests {
#[test] #[test]
fn open() { fn open() {
let dir = temp_dir().join(format!("plume-test-{}", random_hex()));
{ {
get_searcher() Searcher::create(&dir).unwrap();
}; //make sure $tmp/plume-test-tantivy exist }
let dir = temp_dir().join("plume-test");
Searcher::open(&dir).unwrap(); Searcher::open(&dir).unwrap();
} }
@ -175,7 +174,6 @@ pub(crate) mod tests {
assert!(searcher assert!(searcher
.search_document(conn, Query::from_str(&newtitle).unwrap(), (0, 1)) .search_document(conn, Query::from_str(&newtitle).unwrap(), (0, 1))
.is_empty()); .is_empty());
Ok(()) Ok(())
}); });
} }

View File

@ -0,0 +1,845 @@
use diesel::{self, BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl};
use lists::List;
use posts::Post;
use schema::{posts, timeline, timeline_definition};
use std::ops::Deref;
use {Connection, Error, PlumeRocket, Result};
pub(crate) mod query;
use self::query::{QueryError, TimelineQuery};
pub use self::query::Kind;
#[derive(Clone, Debug, PartialEq, Queryable, Identifiable, AsChangeset)]
#[table_name = "timeline_definition"]
pub struct Timeline {
pub id: i32,
pub user_id: Option<i32>,
pub name: String,
pub query: String,
}
#[derive(Default, Insertable)]
#[table_name = "timeline_definition"]
pub struct NewTimeline {
user_id: Option<i32>,
name: String,
query: String,
}
#[derive(Default, Insertable)]
#[table_name = "timeline"]
struct TimelineEntry {
pub post_id: i32,
pub timeline_id: i32,
}
impl Timeline {
insert!(timeline_definition, NewTimeline);
get!(timeline_definition);
pub fn find_for_user_by_name(
conn: &Connection,
user_id: Option<i32>,
name: &str,
) -> Result<Self> {
if let Some(user_id) = user_id {
timeline_definition::table
.filter(timeline_definition::user_id.eq(user_id))
.filter(timeline_definition::name.eq(name))
.first(conn)
.map_err(Error::from)
} else {
timeline_definition::table
.filter(timeline_definition::user_id.is_null())
.filter(timeline_definition::name.eq(name))
.first(conn)
.map_err(Error::from)
}
}
pub fn list_for_user(conn: &Connection, user_id: Option<i32>) -> Result<Vec<Self>> {
if let Some(user_id) = user_id {
timeline_definition::table
.filter(timeline_definition::user_id.eq(user_id))
.load::<Self>(conn)
.map_err(Error::from)
} else {
timeline_definition::table
.filter(timeline_definition::user_id.is_null())
.load::<Self>(conn)
.map_err(Error::from)
}
}
/// Same as `list_for_user`, but also includes instance timelines if `user_id` is `Some`.
pub fn list_all_for_user(conn: &Connection, user_id: Option<i32>) -> Result<Vec<Self>> {
if let Some(user_id) = user_id {
timeline_definition::table
.filter(
timeline_definition::user_id
.eq(user_id)
.or(timeline_definition::user_id.is_null()),
)
.load::<Self>(conn)
.map_err(Error::from)
} else {
timeline_definition::table
.filter(timeline_definition::user_id.is_null())
.load::<Self>(conn)
.map_err(Error::from)
}
}
pub fn new_for_user(
conn: &Connection,
user_id: i32,
name: String,
query_string: String,
) -> Result<Timeline> {
{
let query = TimelineQuery::parse(&query_string)?; // verify the query is valid
if let Some(err) =
query
.list_used_lists()
.into_iter()
.find_map(|(name, kind)| {
let list = List::find_for_user_by_name(conn, Some(user_id), &name)
.map(|l| l.kind() == kind);
match list {
Ok(true) => None,
Ok(false) => Some(Error::TimelineQuery(QueryError::RuntimeError(
format!("list '{}' has the wrong type for this usage", name),
))),
Err(_) => Some(Error::TimelineQuery(QueryError::RuntimeError(
format!("list '{}' was not found", name),
))),
}
})
{
Err(err)?;
}
}
Self::insert(
conn,
NewTimeline {
user_id: Some(user_id),
name,
query: query_string,
},
)
}
pub fn new_for_instance(
conn: &Connection,
name: String,
query_string: String,
) -> Result<Timeline> {
{
let query = TimelineQuery::parse(&query_string)?; // verify the query is valid
if let Some(err) =
query
.list_used_lists()
.into_iter()
.find_map(|(name, kind)| {
let list = List::find_for_user_by_name(conn, None, &name)
.map(|l| l.kind() == kind);
match list {
Ok(true) => None,
Ok(false) => Some(Error::TimelineQuery(QueryError::RuntimeError(
format!("list '{}' has the wrong type for this usage", name),
))),
Err(_) => Some(Error::TimelineQuery(QueryError::RuntimeError(
format!("list '{}' was not found", name),
))),
}
})
{
Err(err)?;
}
}
Self::insert(
conn,
NewTimeline {
user_id: None,
name,
query: query_string,
},
)
}
pub fn update(&self, conn: &Connection) -> Result<Self> {
diesel::update(self).set(self).execute(conn)?;
let timeline = Self::get(conn, self.id)?;
Ok(timeline)
}
pub fn delete(&self, conn: &Connection) -> Result<()> {
diesel::delete(self)
.execute(conn)
.map(|_| ())
.map_err(Error::from)
}
pub fn get_latest(&self, conn: &Connection, count: i32) -> Result<Vec<Post>> {
self.get_page(conn, (0, count))
}
pub fn get_page(&self, conn: &Connection, (min, max): (i32, i32)) -> Result<Vec<Post>> {
timeline::table
.filter(timeline::timeline_id.eq(self.id))
.inner_join(posts::table)
.order(posts::creation_date.desc())
.offset(min.into())
.limit((max - min).into())
.select(posts::all_columns)
.load::<Post>(conn)
.map_err(Error::from)
}
pub fn count_posts(&self, conn: &Connection) -> Result<i64> {
timeline::table
.filter(timeline::timeline_id.eq(self.id))
.inner_join(posts::table)
.count()
.get_result(conn)
.map_err(Error::from)
}
pub fn add_to_all_timelines(rocket: &PlumeRocket, post: &Post, kind: Kind) -> Result<()> {
let timelines = timeline_definition::table
.load::<Self>(rocket.conn.deref())
.map_err(Error::from)?;
for t in timelines {
if t.matches(rocket, post, kind)? {
t.add_post(&rocket.conn, post)?;
}
}
Ok(())
}
pub fn add_post(&self, conn: &Connection, post: &Post) -> Result<()> {
diesel::insert_into(timeline::table)
.values(TimelineEntry {
post_id: post.id,
timeline_id: self.id,
})
.execute(conn)?;
Ok(())
}
pub fn matches(&self, rocket: &PlumeRocket, post: &Post, kind: Kind) -> Result<bool> {
let query = TimelineQuery::parse(&self.query)?;
query.matches(rocket, self, post, kind)
}
}
#[cfg(test)]
mod tests {
use super::*;
use blogs::tests as blogTests;
use diesel::Connection;
use follows::*;
use lists::ListType;
use post_authors::{NewPostAuthor, PostAuthor};
use posts::NewPost;
use safe_string::SafeString;
use tags::Tag;
use tests::{db, rockets};
use users::tests as userTests;
#[test]
fn test_timeline() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
let users = userTests::fill_database(conn);
let mut tl1_u1 = Timeline::new_for_user(
conn,
users[0].id,
"my timeline".to_owned(),
"all".to_owned(),
)
.unwrap();
List::new(conn, "languages I speak", Some(&users[1]), ListType::Prefix).unwrap();
let tl2_u1 = Timeline::new_for_user(
conn,
users[0].id,
"another timeline".to_owned(),
"followed".to_owned(),
)
.unwrap();
let tl1_u2 = Timeline::new_for_user(
conn,
users[1].id,
"english posts".to_owned(),
"lang in \"languages I speak\"".to_owned(),
)
.unwrap();
let tl1_instance = Timeline::new_for_instance(
conn,
"english posts".to_owned(),
"license in [cc]".to_owned(),
)
.unwrap();
assert_eq!(tl1_u1, Timeline::get(conn, tl1_u1.id).unwrap());
assert_eq!(
tl2_u1,
Timeline::find_for_user_by_name(conn, Some(users[0].id), "another timeline")
.unwrap()
);
assert_eq!(
tl1_instance,
Timeline::find_for_user_by_name(conn, None, "english posts").unwrap()
);
let tl_u1 = Timeline::list_for_user(conn, Some(users[0].id)).unwrap();
assert_eq!(3, tl_u1.len()); // it is not 2 because there is a "Your feed" tl created for each user automatically
assert!(tl_u1.iter().fold(false, |res, tl| { res || *tl == tl1_u1 }));
assert!(tl_u1.iter().fold(false, |res, tl| { res || *tl == tl2_u1 }));
let tl_instance = Timeline::list_for_user(conn, None).unwrap();
assert_eq!(3, tl_instance.len()); // there are also the local and federated feed by default
assert!(tl_instance
.iter()
.fold(false, |res, tl| { res || *tl == tl1_instance }));
tl1_u1.name = "My Super TL".to_owned();
let new_tl1_u2 = tl1_u2.update(conn).unwrap();
let tl_u2 = Timeline::list_for_user(conn, Some(users[1].id)).unwrap();
assert_eq!(2, tl_u2.len()); // same here
assert!(tl_u2
.iter()
.fold(false, |res, tl| { res || *tl == new_tl1_u2 }));
Ok(())
});
}
#[test]
fn test_timeline_creation_error() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
let users = userTests::fill_database(conn);
assert!(Timeline::new_for_user(
conn,
users[0].id,
"my timeline".to_owned(),
"invalid keyword".to_owned(),
)
.is_err());
assert!(Timeline::new_for_instance(
conn,
"my timeline".to_owned(),
"invalid keyword".to_owned(),
)
.is_err());
assert!(Timeline::new_for_user(
conn,
users[0].id,
"my timeline".to_owned(),
"author in non_existant_list".to_owned(),
)
.is_err());
assert!(Timeline::new_for_instance(
conn,
"my timeline".to_owned(),
"lang in dont-exist".to_owned(),
)
.is_err());
List::new(conn, "friends", Some(&users[0]), ListType::User).unwrap();
List::new(conn, "idk", None, ListType::Blog).unwrap();
assert!(Timeline::new_for_user(
conn,
users[0].id,
"my timeline".to_owned(),
"blog in friends".to_owned(),
)
.is_err());
assert!(Timeline::new_for_instance(
conn,
"my timeline".to_owned(),
"not author in idk".to_owned(),
)
.is_err());
Ok(())
});
}
#[test]
fn test_simple_match() {
let r = &rockets();
let conn = &r.conn;
conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blogTests::fill_database(conn);
let gnu_tl = Timeline::new_for_user(
conn,
users[0].id,
"GNU timeline".to_owned(),
"license in [AGPL, LGPL, GPL]".to_owned(),
)
.unwrap();
let gnu_post = Post::insert(
conn,
NewPost {
blog_id: blogs[0].id,
slug: "slug".to_string(),
title: "About Linux".to_string(),
content: SafeString::new("you must say GNU/Linux, not Linux!!!"),
published: true,
license: "GPL".to_string(),
ap_url: "".to_string(),
creation_date: None,
subtitle: "".to_string(),
source: "you must say GNU/Linux, not Linux!!!".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
assert!(gnu_tl.matches(r, &gnu_post, Kind::Original).unwrap());
let non_free_post = Post::insert(
conn,
NewPost {
blog_id: blogs[0].id,
slug: "slug2".to_string(),
title: "Private is bad".to_string(),
content: SafeString::new("so is Microsoft"),
published: true,
license: "all right reserved".to_string(),
ap_url: "".to_string(),
creation_date: None,
subtitle: "".to_string(),
source: "so is Microsoft".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
assert!(!gnu_tl.matches(r, &non_free_post, Kind::Original).unwrap());
Ok(())
});
}
#[test]
fn test_complex_match() {
let r = &rockets();
let conn = &r.conn;
conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blogTests::fill_database(conn);
Follow::insert(
conn,
NewFollow {
follower_id: users[0].id,
following_id: users[1].id,
ap_url: String::new(),
},
)
.unwrap();
let fav_blogs_list =
List::new(conn, "fav_blogs", Some(&users[0]), ListType::Blog).unwrap();
fav_blogs_list.add_blogs(conn, &[blogs[0].id]).unwrap();
let my_tl = Timeline::new_for_user(
conn,
users[0].id,
"My timeline".to_owned(),
"blog in fav_blogs and not has_cover or local and followed exclude likes"
.to_owned(),
)
.unwrap();
let post = Post::insert(
conn,
NewPost {
blog_id: blogs[0].id,
slug: "about-linux".to_string(),
title: "About Linux".to_string(),
content: SafeString::new("you must say GNU/Linux, not Linux!!!"),
published: true,
license: "GPL".to_string(),
source: "you must say GNU/Linux, not Linux!!!".to_string(),
ap_url: "".to_string(),
creation_date: None,
subtitle: "".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
assert!(my_tl.matches(r, &post, Kind::Original).unwrap()); // matches because of "blog in fav_blogs" (and there is no cover)
let post = Post::insert(
conn,
NewPost {
blog_id: blogs[1].id,
slug: "about-linux-2".to_string(),
title: "About Linux (2)".to_string(),
content: SafeString::new(
"Actually, GNU+Linux, GNU×Linux, or GNU¿Linux are better.",
),
published: true,
license: "GPL".to_string(),
source: "Actually, GNU+Linux, GNU×Linux, or GNU¿Linux are better."
.to_string(),
ap_url: "".to_string(),
creation_date: None,
subtitle: "".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
assert!(!my_tl.matches(r, &post, Kind::Like(&users[1])).unwrap());
Ok(())
});
}
#[test]
fn test_add_to_all_timelines() {
let r = &rockets();
let conn = &r.conn;
conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blogTests::fill_database(conn);
let gnu_tl = Timeline::new_for_user(
conn,
users[0].id,
"GNU timeline".to_owned(),
"license in [AGPL, LGPL, GPL]".to_owned(),
)
.unwrap();
let non_gnu_tl = Timeline::new_for_user(
conn,
users[0].id,
"Stallman disapproved timeline".to_owned(),
"not license in [AGPL, LGPL, GPL]".to_owned(),
)
.unwrap();
let gnu_post = Post::insert(
conn,
NewPost {
blog_id: blogs[0].id,
slug: "slug".to_string(),
title: "About Linux".to_string(),
content: SafeString::new("you must say GNU/Linux, not Linux!!!"),
published: true,
license: "GPL".to_string(),
ap_url: "".to_string(),
creation_date: None,
subtitle: "".to_string(),
source: "you must say GNU/Linux, not Linux!!!".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
let non_free_post = Post::insert(
conn,
NewPost {
blog_id: blogs[0].id,
slug: "slug2".to_string(),
title: "Private is bad".to_string(),
content: SafeString::new("so is Microsoft"),
published: true,
license: "all right reserved".to_string(),
ap_url: "".to_string(),
creation_date: None,
subtitle: "".to_string(),
source: "so is Microsoft".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
Timeline::add_to_all_timelines(r, &gnu_post, Kind::Original).unwrap();
Timeline::add_to_all_timelines(r, &non_free_post, Kind::Original).unwrap();
let res = gnu_tl.get_latest(conn, 2).unwrap();
assert_eq!(res.len(), 1);
assert_eq!(res[0].id, gnu_post.id);
let res = non_gnu_tl.get_latest(conn, 2).unwrap();
assert_eq!(res.len(), 1);
assert_eq!(res[0].id, non_free_post.id);
Ok(())
});
}
#[test]
fn test_matches_lists_direct() {
let r = &rockets();
let conn = &r.conn;
conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blogTests::fill_database(conn);
let gnu_post = Post::insert(
conn,
NewPost {
blog_id: blogs[0].id,
slug: "slug".to_string(),
title: "About Linux".to_string(),
content: SafeString::new("you must say GNU/Linux, not Linux!!!"),
published: true,
license: "GPL".to_string(),
ap_url: "".to_string(),
creation_date: None,
subtitle: "".to_string(),
source: "you must say GNU/Linux, not Linux!!!".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
gnu_post
.update_tags(conn, vec![Tag::build_activity("free".to_owned()).unwrap()])
.unwrap();
PostAuthor::insert(
conn,
NewPostAuthor {
post_id: gnu_post.id,
author_id: blogs[0].list_authors(conn).unwrap()[0].id,
},
)
.unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"blog timeline".to_owned(),
format!("blog in [{}]", blogs[0].fqn),
)
.unwrap();
assert!(tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"blog timeline".to_owned(),
"blog in [no_one@nowhere]".to_owned(),
)
.unwrap();
assert!(!tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"author timeline".to_owned(),
format!(
"author in [{}]",
blogs[0].list_authors(conn).unwrap()[0].fqn
),
)
.unwrap();
assert!(tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"author timeline".to_owned(),
format!("author in [{}]", users[2].fqn),
)
.unwrap();
assert!(!tl.matches(r, &gnu_post, Kind::Original).unwrap());
assert!(tl.matches(r, &gnu_post, Kind::Reshare(&users[2])).unwrap());
assert!(!tl.matches(r, &gnu_post, Kind::Like(&users[2])).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"author timeline".to_owned(),
format!(
"author in [{}] include likes exclude reshares",
users[2].fqn
),
)
.unwrap();
assert!(!tl.matches(r, &gnu_post, Kind::Original).unwrap());
assert!(!tl.matches(r, &gnu_post, Kind::Reshare(&users[2])).unwrap());
assert!(tl.matches(r, &gnu_post, Kind::Like(&users[2])).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"tag timeline".to_owned(),
"tags in [free]".to_owned(),
)
.unwrap();
assert!(tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"tag timeline".to_owned(),
"tags in [private]".to_owned(),
)
.unwrap();
assert!(!tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"english timeline".to_owned(),
"lang in [en]".to_owned(),
)
.unwrap();
assert!(tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"franco-italian timeline".to_owned(),
"lang in [fr, it]".to_owned(),
)
.unwrap();
assert!(!tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
Ok(())
});
}
/*
#[test]
fn test_matches_lists_saved() {
let r = &rockets();
let conn = &r.conn;
conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blogTests::fill_database(conn);
let gnu_post = Post::insert(
conn,
NewPost {
blog_id: blogs[0].id,
slug: "slug".to_string(),
title: "About Linux".to_string(),
content: SafeString::new("you must say GNU/Linux, not Linux!!!"),
published: true,
license: "GPL".to_string(),
ap_url: "".to_string(),
creation_date: None,
subtitle: "".to_string(),
source: "you must say GNU/Linux, not Linux!!!".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
gnu_post.update_tags(conn, vec![Tag::build_activity("free".to_owned()).unwrap()]).unwrap();
PostAuthor::insert(conn, NewPostAuthor {post_id: gnu_post.id, author_id: blogs[0].list_authors(conn).unwrap()[0].id}).unwrap();
unimplemented!();
Ok(())
});
}*/
#[test]
fn test_matches_keyword() {
let r = &rockets();
let conn = &r.conn;
conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blogTests::fill_database(conn);
let gnu_post = Post::insert(
conn,
NewPost {
blog_id: blogs[0].id,
slug: "slug".to_string(),
title: "About Linux".to_string(),
content: SafeString::new("you must say GNU/Linux, not Linux!!!"),
published: true,
license: "GPL".to_string(),
ap_url: "".to_string(),
creation_date: None,
subtitle: "Stallman is our god".to_string(),
source: "you must say GNU/Linux, not Linux!!!".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"Linux title".to_owned(),
"title contains Linux".to_owned(),
)
.unwrap();
assert!(tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"Microsoft title".to_owned(),
"title contains Microsoft".to_owned(),
)
.unwrap();
assert!(!tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"Linux subtitle".to_owned(),
"subtitle contains Stallman".to_owned(),
)
.unwrap();
assert!(tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"Microsoft subtitle".to_owned(),
"subtitle contains Nadella".to_owned(),
)
.unwrap();
assert!(!tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"Linux content".to_owned(),
"content contains Linux".to_owned(),
)
.unwrap();
assert!(tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
let tl = Timeline::new_for_user(
conn,
users[0].id,
"Microsoft content".to_owned(),
"subtitle contains Windows".to_owned(),
)
.unwrap();
assert!(!tl.matches(r, &gnu_post, Kind::Original).unwrap());
tl.delete(conn).unwrap();
Ok(())
});
}
}

View File

@ -0,0 +1,877 @@
use blogs::Blog;
use lists::{self, ListType};
use plume_common::activity_pub::inbox::AsActor;
use posts::Post;
use tags::Tag;
use users::User;
use whatlang::{self, Lang};
use {PlumeRocket, Result};
use super::Timeline;
#[derive(Debug, Clone, PartialEq)]
pub enum QueryError {
SyntaxError(usize, usize, String),
UnexpectedEndOfQuery,
RuntimeError(String),
}
impl From<std::option::NoneError> for QueryError {
fn from(_: std::option::NoneError) -> Self {
QueryError::UnexpectedEndOfQuery
}
}
pub type QueryResult<T> = std::result::Result<T, QueryError>;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Kind<'a> {
Original,
Reshare(&'a User),
Like(&'a User),
}
#[derive(Debug, Clone, Copy, PartialEq)]
enum Token<'a> {
LParent(usize),
RParent(usize),
LBracket(usize),
RBracket(usize),
Comma(usize),
Word(usize, usize, &'a str),
}
impl<'a> Token<'a> {
fn get_text(&self) -> &'a str {
match self {
Token::Word(_, _, s) => s,
Token::LParent(_) => "(",
Token::RParent(_) => ")",
Token::LBracket(_) => "[",
Token::RBracket(_) => "]",
Token::Comma(_) => ",",
}
}
fn get_pos(&self) -> (usize, usize) {
match self {
Token::Word(a, b, _) => (*a, *b),
Token::LParent(a)
| Token::RParent(a)
| Token::LBracket(a)
| Token::RBracket(a)
| Token::Comma(a) => (*a, 1),
}
}
fn get_error<T>(&self, token: Token) -> QueryResult<T> {
let (b, e) = self.get_pos();
let message = format!(
"Syntax Error: Expected {}, got {}",
token.to_string(),
self.to_string()
);
Err(QueryError::SyntaxError(b, e, message))
}
}
impl<'a> ToString for Token<'a> {
fn to_string(&self) -> String {
if let Token::Word(0, 0, v) = self {
return v.to_string();
}
format!(
"'{}'",
match self {
Token::Word(_, _, v) => v,
Token::LParent(_) => "(",
Token::RParent(_) => ")",
Token::LBracket(_) => "[",
Token::RBracket(_) => "]",
Token::Comma(_) => ",",
}
)
}
}
macro_rules! gen_tokenizer {
( ($c:ident,$i:ident), $state:ident, $quote:ident; $([$char:tt, $variant:tt]),*) => {
match $c {
space if !*$quote && space.is_whitespace() => match $state.take() {
Some(v) => vec![v],
None => vec![],
},
$(
$char if !*$quote => match $state.take() {
Some(v) => vec![v, Token::$variant($i)],
None => vec![Token::$variant($i)],
},
)*
'"' => {
*$quote = !*$quote;
vec![]
},
_ => match $state.take() {
Some(Token::Word(b, l, _)) => {
*$state = Some(Token::Word(b, l+1, &""));
vec![]
},
None => {
*$state = Some(Token::Word($i,1,&""));
vec![]
},
_ => unreachable!(),
}
}
}
}
fn lex(stream: &str) -> Vec<Token> {
stream
.chars()
.chain(" ".chars()) // force a last whitespace to empty scan's state
.zip(0..)
.scan((None, false), |(state, quote), (c, i)| {
Some(gen_tokenizer!((c,i), state, quote;
['(', LParent], [')', RParent],
['[', LBracket], [']', RBracket],
[',', Comma]))
})
.flatten()
.map(|t| {
if let Token::Word(b, e, _) = t {
Token::Word(b, e, &stream[b..b + e])
} else {
t
}
})
.collect()
}
/// Private internals of TimelineQuery
#[derive(Debug, Clone, PartialEq)]
enum TQ<'a> {
Or(Vec<TQ<'a>>),
And(Vec<TQ<'a>>),
Arg(Arg<'a>, bool),
}
impl<'a> TQ<'a> {
fn matches(
&self,
rocket: &PlumeRocket,
timeline: &Timeline,
post: &Post,
kind: Kind,
) -> Result<bool> {
match self {
TQ::Or(inner) => inner.iter().try_fold(false, |s, e| {
e.matches(rocket, timeline, post, kind).map(|r| s || r)
}),
TQ::And(inner) => inner.iter().try_fold(true, |s, e| {
e.matches(rocket, timeline, post, kind).map(|r| s && r)
}),
TQ::Arg(inner, invert) => Ok(inner.matches(rocket, timeline, post, kind)? ^ invert),
}
}
fn list_used_lists(&self) -> Vec<(String, ListType)> {
match self {
TQ::Or(inner) => inner.iter().flat_map(TQ::list_used_lists).collect(),
TQ::And(inner) => inner.iter().flat_map(TQ::list_used_lists).collect(),
TQ::Arg(Arg::In(typ, List::List(name)), _) => vec![(
name.to_string(),
match typ {
WithList::Blog => ListType::Blog,
WithList::Author { .. } => ListType::User,
WithList::License => ListType::Word,
WithList::Tags => ListType::Word,
WithList::Lang => ListType::Prefix,
},
)],
TQ::Arg(_, _) => vec![],
}
}
}
#[derive(Debug, Clone, PartialEq)]
enum Arg<'a> {
In(WithList, List<'a>),
Contains(WithContains, &'a str),
Boolean(Bool),
}
impl<'a> Arg<'a> {
pub fn matches(
&self,
rocket: &PlumeRocket,
timeline: &Timeline,
post: &Post,
kind: Kind,
) -> Result<bool> {
match self {
Arg::In(t, l) => t.matches(rocket, timeline, post, l, kind),
Arg::Contains(t, v) => t.matches(post, v),
Arg::Boolean(t) => t.matches(rocket, timeline, post, kind),
}
}
}
#[derive(Debug, Clone, PartialEq)]
enum WithList {
Blog,
Author { boosts: bool, likes: bool },
License,
Tags,
Lang,
}
impl WithList {
pub fn matches(
&self,
rocket: &PlumeRocket,
timeline: &Timeline,
post: &Post,
list: &List,
kind: Kind,
) -> Result<bool> {
match list {
List::List(name) => {
let list =
lists::List::find_for_user_by_name(&rocket.conn, timeline.user_id, &name)?;
match (self, list.kind()) {
(WithList::Blog, ListType::Blog) => {
list.contains_blog(&rocket.conn, post.blog_id)
}
(WithList::Author { boosts, likes }, ListType::User) => match kind {
Kind::Original => Ok(list
.list_users(&rocket.conn)?
.iter()
.any(|a| post.is_author(&rocket.conn, a.id).unwrap_or(false))),
Kind::Reshare(u) => {
if *boosts {
list.contains_user(&rocket.conn, u.id)
} else {
Ok(false)
}
}
Kind::Like(u) => {
if *likes {
list.contains_user(&rocket.conn, u.id)
} else {
Ok(false)
}
}
},
(WithList::License, ListType::Word) => {
list.contains_word(&rocket.conn, &post.license)
}
(WithList::Tags, ListType::Word) => {
let tags = Tag::for_post(&rocket.conn, post.id)?;
Ok(list
.list_words(&rocket.conn)?
.iter()
.any(|s| tags.iter().any(|t| s == &t.tag)))
}
(WithList::Lang, ListType::Prefix) => {
let lang = whatlang::detect(post.content.get())
.and_then(|i| {
if i.is_reliable() {
Some(i.lang())
} else {
None
}
})
.unwrap_or(Lang::Eng)
.name();
list.contains_prefix(&rocket.conn, lang)
}
(_, _) => Err(QueryError::RuntimeError(format!(
"The list '{}' is of the wrong type for this usage",
name
)))?,
}
}
List::Array(list) => match self {
WithList::Blog => Ok(list
.iter()
.filter_map(|b| Blog::find_by_fqn(rocket, b).ok())
.any(|b| b.id == post.blog_id)),
WithList::Author { boosts, likes } => match kind {
Kind::Original => Ok(list
.iter()
.filter_map(|a| User::find_by_fqn(rocket, a).ok())
.any(|a| post.is_author(&rocket.conn, a.id).unwrap_or(false))),
Kind::Reshare(u) => {
if *boosts {
Ok(list.iter().any(|user| &u.fqn == user))
} else {
Ok(false)
}
}
Kind::Like(u) => {
if *likes {
Ok(list.iter().any(|user| &u.fqn == user))
} else {
Ok(false)
}
}
},
WithList::License => Ok(list.iter().any(|s| s == &post.license)),
WithList::Tags => {
let tags = Tag::for_post(&rocket.conn, post.id)?;
Ok(list.iter().any(|s| tags.iter().any(|t| s == &t.tag)))
}
WithList::Lang => {
let lang = whatlang::detect(post.content.get())
.and_then(|i| {
if i.is_reliable() {
Some(i.lang())
} else {
None
}
})
.unwrap_or(Lang::Eng)
.name()
.to_lowercase();
Ok(list.iter().any(|s| lang.starts_with(&s.to_lowercase())))
}
},
}
}
}
#[derive(Debug, Clone, PartialEq)]
enum WithContains {
Title,
Subtitle,
Content,
}
impl WithContains {
pub fn matches(&self, post: &Post, value: &str) -> Result<bool> {
match self {
WithContains::Title => Ok(post.title.contains(value)),
WithContains::Subtitle => Ok(post.subtitle.contains(value)),
WithContains::Content => Ok(post.content.contains(value)),
}
}
}
#[derive(Debug, Clone, PartialEq)]
enum Bool {
Followed { boosts: bool, likes: bool },
HasCover,
Local,
All,
}
impl Bool {
pub fn matches(
&self,
rocket: &PlumeRocket,
timeline: &Timeline,
post: &Post,
kind: Kind,
) -> Result<bool> {
match self {
Bool::Followed { boosts, likes } => {
if timeline.user_id.is_none() {
return Ok(false);
}
let user = timeline.user_id.unwrap();
match kind {
Kind::Original => post
.get_authors(&rocket.conn)?
.iter()
.try_fold(false, |s, a| {
a.is_followed_by(&rocket.conn, user).map(|r| s || r)
}),
Kind::Reshare(u) => {
if *boosts {
u.is_followed_by(&rocket.conn, user)
} else {
Ok(false)
}
}
Kind::Like(u) => {
if *likes {
u.is_followed_by(&rocket.conn, user)
} else {
Ok(false)
}
}
}
}
Bool::HasCover => Ok(post.cover_id.is_some()),
Bool::Local => Ok(post.get_blog(&rocket.conn)?.is_local()),
Bool::All => Ok(true),
}
}
}
#[derive(Debug, Clone, PartialEq)]
enum List<'a> {
List(&'a str),
Array(Vec<&'a str>),
}
fn parse_s<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> {
let mut res = Vec::new();
let (left, token) = parse_a(&stream)?;
res.push(token);
stream = left;
while !stream.is_empty() {
match stream[0] {
Token::Word(_, _, and) if and == "or" => {}
_ => break,
}
let (left, token) = parse_a(&stream[1..])?;
res.push(token);
stream = left;
}
if res.len() == 1 {
Ok((stream, res.remove(0)))
} else {
Ok((stream, TQ::Or(res)))
}
}
fn parse_a<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> {
let mut res = Vec::new();
let (left, token) = parse_b(&stream)?;
res.push(token);
stream = left;
while !stream.is_empty() {
match stream[0] {
Token::Word(_, _, and) if and == "and" => {}
_ => break,
}
let (left, token) = parse_b(&stream[1..])?;
res.push(token);
stream = left;
}
if res.len() == 1 {
Ok((stream, res.remove(0)))
} else {
Ok((stream, TQ::And(res)))
}
}
fn parse_b<'a, 'b>(stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> {
match stream.get(0) {
Some(Token::LParent(_)) => {
let (left, token) = parse_s(&stream[1..])?;
match left.get(0) {
Some(Token::RParent(_)) => Ok((&left[1..], token)),
Some(t) => t.get_error(Token::RParent(0)),
None => None?,
}
}
_ => parse_c(stream),
}
}
fn parse_c<'a, 'b>(stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> {
match stream.get(0) {
Some(Token::Word(_, _, not)) if not == &"not" => {
let (left, token) = parse_d(&stream[1..])?;
Ok((left, TQ::Arg(token, true)))
}
_ => {
let (left, token) = parse_d(stream)?;
Ok((left, TQ::Arg(token, false)))
}
}
}
fn parse_d<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], Arg<'a>)> {
match stream.get(0).map(Token::get_text)? {
s @ "blog" | s @ "author" | s @ "license" | s @ "tags" | s @ "lang" => {
match stream.get(1)? {
Token::Word(_, _, r#in) if r#in == &"in" => {
let (mut left, list) = parse_l(&stream[2..])?;
let kind = match s {
"blog" => WithList::Blog,
"author" => {
let mut boosts = true;
let mut likes = false;
while let Some(Token::Word(s, e, clude)) = left.get(0) {
if *clude != "include" && *clude != "exclude" {
break;
}
match (*clude, left.get(1).map(Token::get_text)?) {
("include", "reshares") | ("include", "reshare") => {
boosts = true
}
("exclude", "reshares") | ("exclude", "reshare") => {
boosts = false
}
("include", "likes") | ("include", "like") => likes = true,
("exclude", "likes") | ("exclude", "like") => likes = false,
(_, w) => {
return Token::Word(*s, *e, w).get_error(Token::Word(
0,
0,
"one of 'likes' or 'reshares'",
))
}
}
left = &left[2..];
}
WithList::Author { boosts, likes }
}
"license" => WithList::License,
"tags" => WithList::Tags,
"lang" => WithList::Lang,
_ => unreachable!(),
};
Ok((left, Arg::In(kind, list)))
}
t => t.get_error(Token::Word(0, 0, "'in'")),
}
}
s @ "title" | s @ "subtitle" | s @ "content" => match (stream.get(1)?, stream.get(2)?) {
(Token::Word(_, _, contains), Token::Word(_, _, w)) if contains == &"contains" => Ok((
&stream[3..],
Arg::Contains(
match s {
"title" => WithContains::Title,
"subtitle" => WithContains::Subtitle,
"content" => WithContains::Content,
_ => unreachable!(),
},
w,
),
)),
(Token::Word(_, _, contains), t) if contains == &"contains" => {
t.get_error(Token::Word(0, 0, "any word"))
}
(t, _) => t.get_error(Token::Word(0, 0, "'contains'")),
},
s @ "followed" | s @ "has_cover" | s @ "local" | s @ "all" => match s {
"followed" => {
let mut boosts = true;
let mut likes = false;
while let Some(Token::Word(s, e, clude)) = stream.get(1) {
if *clude != "include" && *clude != "exclude" {
break;
}
match (*clude, stream.get(2).map(Token::get_text)?) {
("include", "reshares") | ("include", "reshare") => boosts = true,
("exclude", "reshares") | ("exclude", "reshare") => boosts = false,
("include", "likes") | ("include", "like") => likes = true,
("exclude", "likes") | ("exclude", "like") => likes = false,
(_, w) => {
return Token::Word(*s, *e, w).get_error(Token::Word(
0,
0,
"one of 'likes' or 'boosts'",
))
}
}
stream = &stream[2..];
}
Ok((&stream[1..], Arg::Boolean(Bool::Followed { boosts, likes })))
}
"has_cover" => Ok((&stream[1..], Arg::Boolean(Bool::HasCover))),
"local" => Ok((&stream[1..], Arg::Boolean(Bool::Local))),
"all" => Ok((&stream[1..], Arg::Boolean(Bool::All))),
_ => unreachable!(),
},
_ => stream.get(0)?.get_error(Token::Word(
0,
0,
"one of 'blog', 'author', 'license', 'tags', 'lang', \
'title', 'subtitle', 'content', 'followed', 'has_cover', 'local' or 'all'",
)),
}
}
fn parse_l<'a, 'b>(stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], List<'a>)> {
match stream.get(0)? {
Token::LBracket(_) => {
let (left, list) = parse_m(&stream[1..])?;
match left.get(0)? {
Token::RBracket(_) => Ok((&left[1..], List::Array(list))),
t => t.get_error(Token::Word(0, 0, "one of ']' or ','")),
}
}
Token::Word(_, _, list) => Ok((&stream[1..], List::List(list))),
t => t.get_error(Token::Word(0, 0, "one of [list, of, words] or list_name")),
}
}
fn parse_m<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], Vec<&'a str>)> {
let mut res: Vec<&str> = Vec::new();
res.push(match stream.get(0)? {
Token::Word(_, _, w) => w,
t => return t.get_error(Token::Word(0, 0, "any word")),
});
stream = &stream[1..];
while let Token::Comma(_) = stream[0] {
res.push(match stream.get(1)? {
Token::Word(_, _, w) => w,
t => return t.get_error(Token::Word(0, 0, "any word")),
});
stream = &stream[2..];
}
Ok((stream, res))
}
#[derive(Debug, Clone)]
pub struct TimelineQuery<'a>(TQ<'a>);
impl<'a> TimelineQuery<'a> {
pub fn parse(query: &'a str) -> QueryResult<Self> {
parse_s(&lex(query))
.and_then(|(left, res)| {
if left.is_empty() {
Ok(res)
} else {
left[0].get_error(Token::Word(0, 0, "on of 'or' or 'and'"))
}
})
.map(TimelineQuery)
}
pub fn matches(
&self,
rocket: &PlumeRocket,
timeline: &Timeline,
post: &Post,
kind: Kind,
) -> Result<bool> {
self.0.matches(rocket, timeline, post, kind)
}
pub fn list_used_lists(&self) -> Vec<(String, ListType)> {
self.0.list_used_lists()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_lexer() {
assert_eq!(
lex("()[ ],two words \"something quoted with , and [\""),
vec![
Token::LParent(0),
Token::RParent(1),
Token::LBracket(2),
Token::RBracket(4),
Token::Comma(5),
Token::Word(6, 3, "two"),
Token::Word(10, 5, "words"),
Token::Word(17, 29, "something quoted with , and ["),
]
);
}
#[test]
fn test_parser() {
let q = TimelineQuery::parse(r#"lang in [fr, en] and (license in my_fav_lic or not followed) or title contains "Plume is amazing""#)
.unwrap();
assert_eq!(
q.0,
TQ::Or(vec![
TQ::And(vec![
TQ::Arg(
Arg::In(WithList::Lang, List::Array(vec!["fr", "en"]),),
false
),
TQ::Or(vec![
TQ::Arg(Arg::In(WithList::License, List::List("my_fav_lic"),), false),
TQ::Arg(
Arg::Boolean(Bool::Followed {
boosts: true,
likes: false
}),
true
),
]),
]),
TQ::Arg(
Arg::Contains(WithContains::Title, "Plume is amazing",),
false
),
])
);
let lists = TimelineQuery::parse(
r#"blog in a or author in b include likes or license in c or tags in d or lang in e "#,
)
.unwrap();
assert_eq!(
lists.0,
TQ::Or(vec![
TQ::Arg(Arg::In(WithList::Blog, List::List("a"),), false),
TQ::Arg(
Arg::In(
WithList::Author {
boosts: true,
likes: true
},
List::List("b"),
),
false
),
TQ::Arg(Arg::In(WithList::License, List::List("c"),), false),
TQ::Arg(Arg::In(WithList::Tags, List::List("d"),), false),
TQ::Arg(Arg::In(WithList::Lang, List::List("e"),), false),
])
);
let contains = TimelineQuery::parse(
r#"title contains a or subtitle contains b or content contains c"#,
)
.unwrap();
assert_eq!(
contains.0,
TQ::Or(vec![
TQ::Arg(Arg::Contains(WithContains::Title, "a"), false),
TQ::Arg(Arg::Contains(WithContains::Subtitle, "b"), false),
TQ::Arg(Arg::Contains(WithContains::Content, "c"), false),
])
);
let booleans = TimelineQuery::parse(
r#"followed include like exclude reshares and has_cover and local and all"#,
)
.unwrap();
assert_eq!(
booleans.0,
TQ::And(vec![
TQ::Arg(
Arg::Boolean(Bool::Followed {
boosts: false,
likes: true
}),
false
),
TQ::Arg(Arg::Boolean(Bool::HasCover), false),
TQ::Arg(Arg::Boolean(Bool::Local), false),
TQ::Arg(Arg::Boolean(Bool::All), false),
])
);
}
#[test]
fn test_rejection_parser() {
let missing_and_or = TimelineQuery::parse(r#"followed or has_cover local"#).unwrap_err();
assert_eq!(
missing_and_or,
QueryError::SyntaxError(
22,
5,
"Syntax Error: Expected on of 'or' or 'and', got 'local'".to_owned()
)
);
let unbalanced_parent =
TimelineQuery::parse(r#"followed and (has_cover or local"#).unwrap_err();
assert_eq!(unbalanced_parent, QueryError::UnexpectedEndOfQuery);
let missing_and_or_in_par =
TimelineQuery::parse(r#"(title contains "abc def" followed)"#).unwrap_err();
assert_eq!(
missing_and_or_in_par,
QueryError::SyntaxError(
26,
8,
"Syntax Error: Expected ')', got 'followed'".to_owned()
)
);
let expect_in = TimelineQuery::parse(r#"lang contains abc"#).unwrap_err();
assert_eq!(
expect_in,
QueryError::SyntaxError(
5,
8,
"Syntax Error: Expected 'in', got 'contains'".to_owned()
)
);
let expect_contains = TimelineQuery::parse(r#"title in abc"#).unwrap_err();
assert_eq!(
expect_contains,
QueryError::SyntaxError(
6,
2,
"Syntax Error: Expected 'contains', got 'in'".to_owned()
)
);
let expect_keyword = TimelineQuery::parse(r#"not_a_field contains something"#).unwrap_err();
assert_eq!(expect_keyword, QueryError::SyntaxError(0, 11, "Syntax Error: Expected one of 'blog', \
'author', 'license', 'tags', 'lang', 'title', 'subtitle', 'content', 'followed', 'has_cover', \
'local' or 'all', got 'not_a_field'".to_owned()));
let expect_bracket_or_comma = TimelineQuery::parse(r#"lang in [en ["#).unwrap_err();
assert_eq!(
expect_bracket_or_comma,
QueryError::SyntaxError(
12,
1,
"Syntax Error: Expected one of ']' or ',', \
got '['"
.to_owned()
)
);
let expect_bracket = TimelineQuery::parse(r#"lang in )abc"#).unwrap_err();
assert_eq!(
expect_bracket,
QueryError::SyntaxError(
8,
1,
"Syntax Error: Expected one of [list, of, words] or list_name, \
got ')'"
.to_owned()
)
);
let expect_word = TimelineQuery::parse(r#"title contains ,"#).unwrap_err();
assert_eq!(
expect_word,
QueryError::SyntaxError(15, 1, "Syntax Error: Expected any word, got ','".to_owned())
);
let got_bracket = TimelineQuery::parse(r#"lang in []"#).unwrap_err();
assert_eq!(
got_bracket,
QueryError::SyntaxError(9, 1, "Syntax Error: Expected any word, got ']'".to_owned())
);
let got_par = TimelineQuery::parse(r#"lang in [a, ("#).unwrap_err();
assert_eq!(
got_par,
QueryError::SyntaxError(12, 1, "Syntax Error: Expected any word, got '('".to_owned())
);
}
#[test]
fn test_list_used_lists() {
let q = TimelineQuery::parse(r#"lang in [fr, en] and blog in blogs or author in my_fav_authors or tags in hashtag and lang in spoken or license in copyleft"#)
.unwrap();
let used_lists = q.list_used_lists();
assert_eq!(
used_lists,
vec![
("blogs".to_owned(), ListType::Blog),
("my_fav_authors".to_owned(), ListType::User),
("hashtag".to_owned(), ListType::Word),
("spoken".to_owned(), ListType::Prefix),
("copyleft".to_owned(), ListType::Word),
]
);
}
}

View File

@ -7,7 +7,7 @@ use activitypub::{
}; };
use bcrypt; use bcrypt;
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use diesel::{self, BelongingToDsl, ExpressionMethods, QueryDsl, RunQueryDsl}; use diesel::{self, BelongingToDsl, ExpressionMethods, OptionalExtension, QueryDsl, RunQueryDsl};
use openssl::{ use openssl::{
hash::MessageDigest, hash::MessageDigest,
pkey::{PKey, Private}, pkey::{PKey, Private},
@ -48,6 +48,7 @@ use posts::Post;
use safe_string::SafeString; use safe_string::SafeString;
use schema::users; use schema::users;
use search::Searcher; use search::Searcher;
use timeline::Timeline;
use {ap_url, Connection, Error, PlumeRocket, Result}; use {ap_url, Connection, Error, PlumeRocket, Result};
pub type CustomPerson = CustomObject<ApSignature, Person>; pub type CustomPerson = CustomObject<ApSignature, Person>;
@ -198,10 +199,8 @@ impl User {
pub fn find_by_fqn(c: &PlumeRocket, fqn: &str) -> Result<User> { pub fn find_by_fqn(c: &PlumeRocket, fqn: &str) -> Result<User> {
let from_db = users::table let from_db = users::table
.filter(users::fqn.eq(fqn)) .filter(users::fqn.eq(fqn))
.limit(1) .first(&*c.conn)
.load::<User>(&*c.conn)? .optional()?;
.into_iter()
.next();
if let Some(from_db) = from_db { if let Some(from_db) = from_db {
Ok(from_db) Ok(from_db)
} else { } else {
@ -896,7 +895,7 @@ impl NewUser {
let (pub_key, priv_key) = gen_keypair(); let (pub_key, priv_key) = gen_keypair();
let instance = Instance::get_local()?; let instance = Instance::get_local()?;
User::insert( let res = User::insert(
conn, conn,
NewUser { NewUser {
username: username.clone(), username: username.clone(),
@ -917,7 +916,12 @@ impl NewUser {
fqn: username, fqn: username,
avatar_id: None, avatar_id: None,
}, },
) )?;
// create default timeline
Timeline::new_for_user(conn, res.id, "My feed".into(), "followed".into())?;
Ok(res)
} }
} }
@ -981,7 +985,6 @@ pub(crate) mod tests {
User::hash_pass("test_password").unwrap(), User::hash_pass("test_password").unwrap(),
) )
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
test_user.id, test_user.id,
User::find_by_name(conn, "test", Instance::get_local().unwrap().id) User::find_by_name(conn, "test", Instance::get_local().unwrap().id)
@ -1009,7 +1012,6 @@ pub(crate) mod tests {
.unwrap() .unwrap()
.id .id
); );
Ok(()) Ok(())
}); });
} }
@ -1023,7 +1025,6 @@ pub(crate) mod tests {
assert!(User::get(conn, inserted[0].id).is_ok()); assert!(User::get(conn, inserted[0].id).is_ok());
inserted[0].delete(conn, &get_searcher()).unwrap(); inserted[0].delete(conn, &get_searcher()).unwrap();
assert!(User::get(conn, inserted[0].id).is_err()); assert!(User::get(conn, inserted[0].id).is_err());
Ok(()) Ok(())
}); });
} }
@ -1046,7 +1047,6 @@ pub(crate) mod tests {
} }
inserted[0].set_role(conn, Role::Admin).unwrap(); inserted[0].set_role(conn, Role::Admin).unwrap();
assert_eq!(inserted[0].id, local_inst.main_admin(conn).unwrap().id); assert_eq!(inserted[0].id, local_inst.main_admin(conn).unwrap().id);
Ok(()) Ok(())
}); });
} }
@ -1069,7 +1069,6 @@ pub(crate) mod tests {
assert!(test_user.auth("test_password")); assert!(test_user.auth("test_password"));
assert!(!test_user.auth("other_password")); assert!(!test_user.auth("other_password"));
Ok(()) Ok(())
}); });
} }
@ -1099,7 +1098,6 @@ pub(crate) mod tests {
.len() as i64, .len() as i64,
User::count_local(conn).unwrap() User::count_local(conn).unwrap()
); );
Ok(()) Ok(())
}); });
} }
@ -1127,7 +1125,6 @@ pub(crate) mod tests {
assert_eq!(user.avatar_url(conn), users[0].avatar_url(conn)); assert_eq!(user.avatar_url(conn), users[0].avatar_url(conn));
assert_eq!(user.fqn, users[0].fqn); assert_eq!(user.fqn, users[0].fqn);
assert_eq!(user.summary_html, users[0].summary_html); assert_eq!(user.summary_html, users[0].summary_html);
Ok(()) Ok(())
}); });
} }

View File

@ -7,7 +7,7 @@ use plume_api::posts::*;
use plume_common::{activity_pub::broadcast, utils::md_to_html}; use plume_common::{activity_pub::broadcast, utils::md_to_html};
use plume_models::{ use plume_models::{
blogs::Blog, db_conn::DbConn, instance::Instance, medias::Media, mentions::*, post_authors::*, blogs::Blog, db_conn::DbConn, instance::Instance, medias::Media, mentions::*, post_authors::*,
posts::*, safe_string::SafeString, tags::*, users::User, Error, PlumeRocket, posts::*, safe_string::SafeString, tags::*, timeline::*, users::User, Error, PlumeRocket,
}; };
#[get("/posts/<id>")] #[get("/posts/<id>")]
@ -204,6 +204,8 @@ pub fn create(
worker.execute(move || broadcast(&author, act, dest)); worker.execute(move || broadcast(&author, act, dest));
} }
Timeline::add_to_all_timelines(&rockets, &post, Kind::Original)?;
Ok(Json(PostData { Ok(Json(PostData {
authors: post.get_authors(conn)?.into_iter().map(|a| a.fqn).collect(), authors: post.get_authors(conn)?.into_iter().map(|a| a.fqn).collect(),
creation_date: post.creation_date.format("%Y-%m-%d").to_string(), creation_date: post.creation_date.format("%Y-%m-%d").to_string(),

View File

@ -193,9 +193,6 @@ Then try to restart Plume
routes::comments::delete, routes::comments::delete,
routes::comments::activity_pub, routes::comments::activity_pub,
routes::instance::index, routes::instance::index,
routes::instance::local,
routes::instance::feed,
routes::instance::federated,
routes::instance::admin, routes::instance::admin,
routes::instance::admin_mod, routes::instance::admin_mod,
routes::instance::admin_instances, routes::instance::admin_instances,
@ -243,6 +240,12 @@ Then try to restart Plume
routes::plume_static_files, routes::plume_static_files,
routes::static_files, routes::static_files,
routes::tags::tag, routes::tags::tag,
routes::timelines::details,
routes::timelines::new,
routes::timelines::create,
routes::timelines::edit,
routes::timelines::update,
routes::timelines::delete,
routes::user::me, routes::user::me,
routes::user::details, routes::user::details,
routes::user::dashboard, routes::user::dashboard,

View File

@ -20,6 +20,7 @@ use plume_models::{
posts::Post, posts::Post,
safe_string::SafeString, safe_string::SafeString,
search::Searcher, search::Searcher,
timeline::Timeline,
users::{Role, User}, users::{Role, User},
Connection, Error, PlumeRocket, CONFIG, Connection, Error, PlumeRocket, CONFIG,
}; };
@ -30,64 +31,23 @@ use template_utils::{IntoContext, Ructe};
pub fn index(rockets: PlumeRocket) -> Result<Ructe, ErrorPage> { pub fn index(rockets: PlumeRocket) -> Result<Ructe, ErrorPage> {
let conn = &*rockets.conn; let conn = &*rockets.conn;
let inst = Instance::get_local()?; let inst = Instance::get_local()?;
let federated = Post::get_recents_page(conn, Page::default().limits())?; let timelines = Timeline::list_all_for_user(&conn, rockets.user.clone().map(|u| u.id))?
let local = Post::get_instance_page(conn, inst.id, Page::default().limits())?; .into_iter()
let user_feed = rockets.user.clone().and_then(|user| { .filter_map(|t| {
let followed = user.get_followed(conn).ok()?; if let Ok(latest) = t.get_latest(&conn, 12) {
let mut in_feed = followed.into_iter().map(|u| u.id).collect::<Vec<i32>>(); Some((t, latest))
in_feed.push(user.id); } else {
Post::user_feed_page(conn, in_feed, Page::default().limits()).ok() None
}); }
})
.collect();
Ok(render!(instance::index( Ok(render!(instance::index(
&rockets.to_context(), &rockets.to_context(),
inst, inst,
User::count_local(conn)?, User::count_local(conn)?,
Post::count_local(conn)?, Post::count_local(conn)?,
local, timelines
federated,
user_feed
)))
}
#[get("/local?<page>")]
pub fn local(page: Option<Page>, rockets: PlumeRocket) -> Result<Ructe, ErrorPage> {
let page = page.unwrap_or_default();
let instance = Instance::get_local()?;
let articles = Post::get_instance_page(&*rockets.conn, instance.id, page.limits())?;
Ok(render!(instance::local(
&rockets.to_context(),
instance,
articles,
page.0,
Page::total(Post::count_local(&*rockets.conn)? as i32)
)))
}
#[get("/feed?<page>")]
pub fn feed(user: User, page: Option<Page>, rockets: PlumeRocket) -> Result<Ructe, ErrorPage> {
let page = page.unwrap_or_default();
let followed = user.get_followed(&*rockets.conn)?;
let mut in_feed = followed.into_iter().map(|u| u.id).collect::<Vec<i32>>();
in_feed.push(user.id);
let articles = Post::user_feed_page(&*rockets.conn, in_feed, page.limits())?;
Ok(render!(instance::feed(
&rockets.to_context(),
articles,
page.0,
Page::total(Post::count_local(&*rockets.conn)? as i32)
)))
}
#[get("/federated?<page>")]
pub fn federated(page: Option<Page>, rockets: PlumeRocket) -> Result<Ructe, ErrorPage> {
let page = page.unwrap_or_default();
let articles = Post::get_recents_page(&*rockets.conn, page.limits())?;
Ok(render!(instance::federated(
&rockets.to_context(),
articles,
page.0,
Page::total(Post::count_local(&*rockets.conn)? as i32)
))) )))
} }

View File

@ -4,7 +4,7 @@ use rocket_i18n::I18n;
use plume_common::activity_pub::broadcast; use plume_common::activity_pub::broadcast;
use plume_common::utils; use plume_common::utils;
use plume_models::{ use plume_models::{
blogs::Blog, inbox::inbox, likes, posts::Post, users::User, Error, PlumeRocket, blogs::Blog, inbox::inbox, likes, posts::Post, timeline::*, users::User, Error, PlumeRocket,
}; };
use routes::errors::ErrorPage; use routes::errors::ErrorPage;
@ -23,6 +23,8 @@ pub fn create(
let like = likes::Like::insert(&*conn, likes::NewLike::new(&post, &user))?; let like = likes::Like::insert(&*conn, likes::NewLike::new(&post, &user))?;
like.notify(&*conn)?; like.notify(&*conn)?;
Timeline::add_to_all_timelines(&rockets, &post, Kind::Like(&user))?;
let dest = User::one_by_instance(&*conn)?; let dest = User::one_by_instance(&*conn)?;
let act = like.to_activity(&*conn)?; let act = like.to_activity(&*conn)?;
rockets.worker.execute(move || broadcast(&user, act, dest)); rockets.worker.execute(move || broadcast(&user, act, dest));

View File

@ -162,6 +162,7 @@ pub mod reshares;
pub mod search; pub mod search;
pub mod session; pub mod session;
pub mod tags; pub mod tags;
pub mod timelines;
pub mod user; pub mod user;
pub mod well_known; pub mod well_known;

View File

@ -23,6 +23,7 @@ use plume_models::{
posts::*, posts::*,
safe_string::SafeString, safe_string::SafeString,
tags::*, tags::*,
timeline::*,
users::User, users::User,
Error, PlumeRocket, Error, PlumeRocket,
}; };
@ -339,6 +340,8 @@ pub fn update(
.expect("post::update: act error"); .expect("post::update: act error");
let dest = User::one_by_instance(&*conn).expect("post::update: dest error"); let dest = User::one_by_instance(&*conn).expect("post::update: dest error");
rockets.worker.execute(move || broadcast(&user, act, dest)); rockets.worker.execute(move || broadcast(&user, act, dest));
Timeline::add_to_all_timelines(&rockets, &post, Kind::Original).ok();
} else { } else {
let act = post let act = post
.update_activity(&*conn) .update_activity(&*conn)
@ -529,8 +532,10 @@ pub fn create(
.create_activity(&*conn) .create_activity(&*conn)
.expect("posts::create: activity error"); .expect("posts::create: activity error");
let dest = User::one_by_instance(&*conn).expect("posts::create: dest error"); let dest = User::one_by_instance(&*conn).expect("posts::create: dest error");
let worker = rockets.worker; let worker = &rockets.worker;
worker.execute(move || broadcast(&user, act, dest)); worker.execute(move || broadcast(&user, act, dest));
Timeline::add_to_all_timelines(&rockets, &post, Kind::Original)?;
} }
Ok(Flash::success( Ok(Flash::success(

View File

@ -4,7 +4,8 @@ use rocket_i18n::I18n;
use plume_common::activity_pub::broadcast; use plume_common::activity_pub::broadcast;
use plume_common::utils; use plume_common::utils;
use plume_models::{ use plume_models::{
blogs::Blog, inbox::inbox, posts::Post, reshares::*, users::User, Error, PlumeRocket, blogs::Blog, inbox::inbox, posts::Post, reshares::*, timeline::*, users::User, Error,
PlumeRocket,
}; };
use routes::errors::ErrorPage; use routes::errors::ErrorPage;
@ -23,6 +24,8 @@ pub fn create(
let reshare = Reshare::insert(&*conn, NewReshare::new(&post, &user))?; let reshare = Reshare::insert(&*conn, NewReshare::new(&post, &user))?;
reshare.notify(&*conn)?; reshare.notify(&*conn)?;
Timeline::add_to_all_timelines(&rockets, &post, Kind::Reshare(&user))?;
let dest = User::one_by_instance(&*conn)?; let dest = User::one_by_instance(&*conn)?;
let act = reshare.to_activity(&*conn)?; let act = reshare.to_activity(&*conn)?;
rockets.worker.execute(move || broadcast(&user, act, dest)); rockets.worker.execute(move || broadcast(&user, act, dest));

51
src/routes/timelines.rs Normal file
View File

@ -0,0 +1,51 @@
#![allow(dead_code)]
use crate::{routes::errors::ErrorPage, template_utils::Ructe};
use plume_models::{timeline::*, PlumeRocket};
use rocket::response::Redirect;
use routes::Page;
use template_utils::IntoContext;
#[get("/timeline/<id>?<page>")]
pub fn details(id: i32, rockets: PlumeRocket, page: Option<Page>) -> Result<Ructe, ErrorPage> {
let page = page.unwrap_or_default();
let all_tl = Timeline::list_all_for_user(&rockets.conn, rockets.user.clone().map(|u| u.id))?;
let tl = Timeline::get(&rockets.conn, id)?;
let posts = tl.get_page(&rockets.conn, page.limits())?;
let total_posts = tl.count_posts(&rockets.conn)?;
Ok(render!(timelines::details(
&rockets.to_context(),
tl,
posts,
all_tl,
page.0,
Page::total(total_posts as i32)
)))
}
// TODO
#[get("/timeline/new")]
pub fn new() -> Result<Ructe, ErrorPage> {
unimplemented!()
}
#[post("/timeline/new")]
pub fn create() -> Result<Redirect, Ructe> {
unimplemented!()
}
#[get("/timeline/<_id>/edit")]
pub fn edit(_id: i32) -> Result<Ructe, ErrorPage> {
unimplemented!()
}
#[post("/timeline/<_id>/edit")]
pub fn update(_id: i32) -> Result<Redirect, Ructe> {
unimplemented!()
}
#[post("/timeline/<_id>/delete")]
pub fn delete(_id: i32) -> Result<Redirect, ErrorPage> {
unimplemented!()
}

View File

@ -108,6 +108,15 @@ pub fn translate_notification(ctx: BaseContext, notif: Notification) -> String {
} }
} }
pub fn i18n_timeline_name(cat: &Catalog, tl: &str) -> String {
match tl {
"Your feed" => i18n!(cat, "Your feed"),
"Local feed" => i18n!(cat, "Local feed"),
"Federated feed" => i18n!(cat, "Federated feed"),
n => n.to_string(),
}
}
pub enum Size { pub enum Size {
Small, Small,
Medium, Medium,
@ -143,11 +152,11 @@ pub fn avatar(
)) ))
} }
pub fn tabs(links: &[(&str, String, bool)]) -> Html<String> { pub fn tabs(links: &[(impl AsRef<str>, String, bool)]) -> Html<String> {
let mut res = String::from(r#"<div class="tabs">"#); let mut res = String::from(r#"<div class="tabs">"#);
for (url, title, selected) in links { for (url, title, selected) in links {
res.push_str(r#"<a dir="auto" href=""#); res.push_str(r#"<a dir="auto" href=""#);
res.push_str(url); res.push_str(url.as_ref());
if *selected { if *selected {
res.push_str(r#"" class="selected">"#); res.push_str(r#"" class="selected">"#);
} else { } else {
@ -183,7 +192,7 @@ pub fn paginate_param(
r#"<a href="?{}page={}">{}</a>"#, r#"<a href="?{}page={}">{}</a>"#,
param, param,
page - 1, page - 1,
catalog.gettext("Previous page") i18n!(catalog, "Previous page")
) )
.as_str(), .as_str(),
); );
@ -194,7 +203,7 @@ pub fn paginate_param(
r#"<a href="?{}page={}">{}</a>"#, r#"<a href="?{}page={}">{}</a>"#,
param, param,
page + 1, page + 1,
catalog.gettext("Next page") i18n!(catalog, "Next page")
) )
.as_str(), .as_str(),
); );

View File

@ -1,34 +0,0 @@
@use plume_models::posts::Post;
@use templates::{base, partials::post_card};
@use template_utils::*;
@use routes::*;
@(ctx: BaseContext, articles: Vec<Post>, page: i32, n_pages: i32)
@:base(ctx, i18n!(ctx.1, "All the articles of the Fediverse"), {}, {}, {
<div class="h-feed">
<h1 "p-name">@i18n!(ctx.1, "All the articles of the Fediverse")</h1>
@if ctx.2.is_some() {
@tabs(&[
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
(&uri!(instance::feed: _).to_string(), i18n!(ctx.1, "Your feed"), false),
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), true),
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), false),
])
} else {
@tabs(&[
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), true),
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), false),
])
}
<div class="cards">
@for article in articles {
@:post_card(ctx, article)
}
</div>
@paginate(ctx.1, page, n_pages)
</div>
})

View File

@ -1,28 +0,0 @@
@use plume_models::posts::Post;
@use templates::{base, partials::post_card};
@use template_utils::*;
@use routes::*;
@(ctx: BaseContext, articles: Vec<Post>, page: i32, n_pages: i32)
@:base(ctx, i18n!(ctx.1, "Your feed"), {}, {}, {
<h1>@i18n!(ctx.1, "Your feed")</h1>
@tabs(&[
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
(&uri!(instance::feed: _).to_string(), i18n!(ctx.1, "Your feed"), true),
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), false),
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), false),
])
@if !articles.is_empty() {
<div class="cards">
@for article in articles {
@:post_card(ctx, article)
}
</div>
} else {
<p class="center">@i18n!(ctx.1, "Nothing to see here yet. Try subscribing to more people.")</p>
}
@paginate(ctx.1, page, n_pages)
})

View File

@ -2,34 +2,40 @@
@use template_utils::*; @use template_utils::*;
@use plume_models::instance::Instance; @use plume_models::instance::Instance;
@use plume_models::posts::Post; @use plume_models::posts::Post;
@use plume_models::timeline::Timeline;
@use routes::*; @use routes::*;
@(ctx: BaseContext, instance: Instance, n_users: i64, n_articles: i64, local: Vec<Post>, federated: Vec<Post>, user_feed: Option<Vec<Post>>) @(ctx: BaseContext, instance: Instance, n_users: i64, n_articles: i64, all_tl: Vec<(Timeline, Vec<Post>)>)
@:base(ctx, instance.name.clone(), {}, {}, { @:base(ctx, instance.name.clone(), {}, {}, {
<h1>@i18n!(ctx.1, "Welcome to {}"; instance.name.as_str())</h1> <h1>@i18n!(ctx.1, "Welcome to {}"; instance.name.as_str())</h1>
@if ctx.2.is_some() { @tabs(&vec![(format!("{}", uri!(instance::index)), i18n!(ctx.1, "Latest articles"), true)]
@tabs(&[ .into_iter().chain(all_tl.clone()
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), true), .into_iter()
(&uri!(instance::feed: _).to_string(), i18n!(ctx.1, "Your feed"), false), .map(|(tl, _)| {
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), false), let url = format!("{}", uri!(timelines::details: id = tl.id, page = _));
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), false), (url, i18n_timeline_name(ctx.1, &tl.name), false)
]) })
).collect::<Vec<_>>()
)
@:home_feed(ctx, user_feed.unwrap_or_default(), &uri!(instance::feed: _).to_string(), i18n!(ctx.1, "Your feed")) @for (tl, articles) in all_tl {
@:home_feed(ctx, federated, &uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed")) @if !articles.is_empty() {
@:home_feed(ctx, local, &uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed")) <div class="h-feed">
@:instance_description(ctx, instance, n_users, n_articles) <h2 dir="auto">
} else { <span class="p-name">@i18n_timeline_name(ctx.1, &tl.name)</span>
@tabs(&[ &mdash;
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), true), <a href="@uri!(timelines::details: id = tl.id, page = _)">@i18n!(ctx.1, "View all")</a>
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), false), </h2>
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), false), <div class="cards">
]) @for article in articles {
@:post_card(ctx, article)
@:home_feed(ctx, federated, &uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed")) }
@:home_feed(ctx, local, &uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed")) </div>
@:instance_description(ctx, instance, n_users, n_articles) </div>
}
} }
@:instance_description(ctx, instance, n_users, n_articles)
}) })

View File

@ -1,35 +0,0 @@
@use plume_models::posts::Post;
@use plume_models::instance::Instance;
@use templates::{base, partials::post_card};
@use template_utils::*;
@use routes::*;
@(ctx: BaseContext, instance: Instance, articles: Vec<Post>, page: i32, n_pages: i32)
@:base(ctx, i18n!(ctx.1, "Articles from {}"; instance.name.clone()), {}, {}, {
<div class="h-feed">
<h1 class="p-name">@i18n!(ctx.1, "Articles from {}"; instance.name)</h1>
@if ctx.2.is_some() {
@tabs(&[
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
(&uri!(instance::feed: _).to_string(), i18n!(ctx.1, "Your feed"), false),
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), false),
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), true),
])
} else {
@tabs(&[
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), false),
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), true),
])
}
<div class="cards">
@for article in articles {
@:post_card(ctx, article)
}
</div>
@paginate(ctx.1, page, n_pages)
</div>
})

View File

@ -1,16 +0,0 @@
@use templates::partials::post_card;
@use plume_models::posts::Post;
@use template_utils::*;
@(ctx: BaseContext, articles: Vec<Post>, link: &str, title: String)
@if !articles.is_empty() {
<div class="h-feed">
<h2 dir="auto"><span class="p-name">@title</span> &mdash; <a href="@link">@i18n!(ctx.1, "View all")</a></h2>
<div class="cards">
@for article in articles {
@:post_card(ctx, article)
}
</div>
</div>
}

View File

@ -0,0 +1,38 @@
@use plume_models::posts::Post;
@use plume_models::timeline::Timeline;
@use template_utils::*;
@use templates::base;
@use templates::partials::post_card;
@use routes::*;
@(ctx: BaseContext, tl: Timeline, articles: Vec<Post>, all_tl: Vec<Timeline>, page: i32, n_pages: i32)
@:base(ctx, tl.name.clone(), {}, {}, {
<section class="flex wrap" dir="auto">
<h1 class="grow">@i18n_timeline_name(ctx.1, &tl.name)</h1>
@if ctx.clone().2.map(|u| (u.is_admin() && tl.user_id.is_none()) || Some(u.id) == tl.user_id).unwrap_or(false) {
<a href="@uri!(timelines::edit: _id = tl.id)" class="button inline-block">@i18n!(ctx.1, "Edit")</a>
}
</section>
@tabs(&vec![(format!("{}", uri!(instance::index)), i18n!(ctx.1, "Latest articles"), false)]
.into_iter().chain(all_tl
.into_iter()
.map(|t| {
let url = format!("{}", uri!(timelines::details: id = t.id, page = _));
(url, i18n_timeline_name(ctx.1, &t.name), t.id == tl.id)
})
).collect::<Vec<_>>()
)
@if !articles.is_empty() {
<div class="cards">
@for article in articles {
@:post_card(ctx, article)
}
</div>
} else {
<p class="center">@i18n!(ctx.1, "Nothing to see here yet.")</p>
}
@paginate(ctx.1, page, n_pages)
})