forked from Plume/Plume
Add support for generic timeline (#525)
* Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmtupgrade
parent
a0e3fe8c94
commit
006b44f580
@ -0,0 +1,6 @@
|
||||
-- This file should undo anything in `up.sql`
|
||||
|
||||
DROP TABLE timeline;
|
||||
DROP TABLE timeline_definition;
|
||||
DROP TABLE list_elems;
|
||||
DROP TABLE lists;
|
@ -0,0 +1,31 @@
|
||||
-- Your SQL goes here
|
||||
|
||||
CREATE TABLE timeline_definition(
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id integer REFERENCES users ON DELETE CASCADE,
|
||||
name VARCHAR NOT NULL,
|
||||
query VARCHAR NOT NULL,
|
||||
CONSTRAINT timeline_unique_user_name UNIQUE(user_id, name)
|
||||
);
|
||||
|
||||
CREATE TABLE timeline(
|
||||
id SERIAL PRIMARY KEY,
|
||||
post_id integer NOT NULL REFERENCES posts ON DELETE CASCADE,
|
||||
timeline_id integer NOT NULL REFERENCES timeline_definition ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE lists(
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR NOT NULL,
|
||||
user_id integer REFERENCES users ON DELETE CASCADE,
|
||||
type integer NOT NULL,
|
||||
CONSTRAINT list_unique_user_name UNIQUE(user_id, name)
|
||||
);
|
||||
|
||||
CREATE TABLE list_elems(
|
||||
id SERIAL PRIMARY KEY,
|
||||
list_id integer NOT NULL REFERENCES lists ON DELETE CASCADE,
|
||||
user_id integer REFERENCES users ON DELETE CASCADE,
|
||||
blog_id integer REFERENCES blogs ON DELETE CASCADE,
|
||||
word VARCHAR
|
||||
);
|
@ -0,0 +1,4 @@
|
||||
-- This file should undo anything in `up.sql`
|
||||
DELETE FROM timeline_definition WHERE name = 'Your feed';
|
||||
DELETE FROM timeline_definition WHERE name = 'Local feed' AND query = 'local';
|
||||
DELETE FROM timeline_definition WHERE name = 'Federared feed' AND query = 'all';
|
@ -0,0 +1,17 @@
|
||||
-- Your SQL goes here
|
||||
--#!|conn: &Connection, path: &Path| {
|
||||
--#! super::timeline::Timeline::new_for_instance(conn, "Local feed".into(), "local".into()).expect("Local feed creation error");
|
||||
--#! super::timeline::Timeline::new_for_instance(conn, "Federated feed".into(), "all".into()).expect("Federated feed creation error");
|
||||
--#!
|
||||
--#! for i in 0.. {
|
||||
--#! if let Some(users) = super::users::User::get_local_page(conn, (i * 20, (i + 1) * 20)).ok().filter(|l| !l.is_empty()) {
|
||||
--#! for u in users {
|
||||
--#! super::timeline::Timeline::new_for_user(conn, u.id, "Your feed".into(), format!("followed or author in [ {} ]", u.fqn)).expect("User feed creation error");
|
||||
--#! }
|
||||
--#! } else {
|
||||
--#! break;
|
||||
--#! }
|
||||
--#! }
|
||||
--#!
|
||||
--#! Ok(())
|
||||
--#!}
|
@ -0,0 +1,6 @@
|
||||
-- This file should undo anything in `up.sql`
|
||||
|
||||
DROP TABLE timeline;
|
||||
DROP TABLE timeline_definition;
|
||||
DROP TABLE list_elems;
|
||||
DROP TABLE lists;
|
@ -0,0 +1,31 @@
|
||||
-- Your SQL goes here
|
||||
|
||||
CREATE TABLE timeline_definition(
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER REFERENCES users(id) ON DELETE CASCADE,
|
||||
name VARCHAR NOT NULL,
|
||||
query VARCHAR NOT NULL,
|
||||
CONSTRAINT timeline_unique_user_name UNIQUE(user_id, name)
|
||||
);
|
||||
|
||||
CREATE TABLE timeline(
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
post_id integer NOT NULL REFERENCES posts(id) ON DELETE CASCADE,
|
||||
timeline_id integer NOT NULL REFERENCES timeline_definition(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE lists(
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
name VARCHAR NOT NULL,
|
||||
user_id integer REFERENCES users(id) ON DELETE CASCADE,
|
||||
type integer NOT NULL,
|
||||
CONSTRAINT timeline_unique_user_name UNIQUE(user_id, name)
|
||||
);
|
||||
|
||||
CREATE TABLE list_elems(
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
list_id integer NOT NULL REFERENCES lists(id) ON DELETE CASCADE,
|
||||
user_id integer REFERENCES users(id) ON DELETE CASCADE,
|
||||
blog_id integer REFERENCES blogs(id) ON DELETE CASCADE,
|
||||
word VARCHAR
|
||||
);
|
@ -0,0 +1,4 @@
|
||||
-- This file should undo anything in `up.sql`
|
||||
DELETE FROM timeline_definition WHERE name = 'Your feed';
|
||||
DELETE FROM timeline_definition WHERE name = 'Local feed' AND query = 'local';
|
||||
DELETE FROM timeline_definition WHERE name = 'Federared feed' AND query = 'all';
|
@ -0,0 +1,17 @@
|
||||
-- Your SQL goes here
|
||||
--#!|conn: &Connection, path: &Path| {
|
||||
--#! super::timeline::Timeline::new_for_instance(conn, "Local feed".into(), "local".into()).expect("Local feed creation error");
|
||||
--#! super::timeline::Timeline::new_for_instance(conn, "Federated feed".into(), "all".into()).expect("Federated feed creation error");
|
||||
--#!
|
||||
--#! for i in 0.. {
|
||||
--#! if let Some(users) = super::users::User::get_local_page(conn, (i * 20, (i + 1) * 20)).ok().filter(|l| !l.is_empty()) {
|
||||
--#! for u in users {
|
||||
--#! super::timeline::Timeline::new_for_user(conn, u.id, "Your feed".into(), format!("followed or author in [ {} ]", u.fqn)).expect("User feed creation error");
|
||||
--#! }
|
||||
--#! } else {
|
||||
--#! break;
|
||||
--#! }
|
||||
--#! }
|
||||
--#!
|
||||
--#! Ok(())
|
||||
--#!}
|
@ -0,0 +1,555 @@
|
||||
use diesel::{self, ExpressionMethods, QueryDsl, RunQueryDsl};
|
||||
|
||||
use blogs::Blog;
|
||||
use schema::{blogs, list_elems, lists, users};
|
||||
use std::convert::{TryFrom, TryInto};
|
||||
use users::User;
|
||||
use {Connection, Error, Result};
|
||||
|
||||
/// Represent what a list is supposed to store. Represented in database as an integer
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ListType {
|
||||
User,
|
||||
Blog,
|
||||
Word,
|
||||
Prefix,
|
||||
}
|
||||
|
||||
impl TryFrom<i32> for ListType {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(i: i32) -> std::result::Result<Self, ()> {
|
||||
match i {
|
||||
0 => Ok(ListType::User),
|
||||
1 => Ok(ListType::Blog),
|
||||
2 => Ok(ListType::Word),
|
||||
3 => Ok(ListType::Prefix),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<i32> for ListType {
|
||||
fn into(self) -> i32 {
|
||||
match self {
|
||||
ListType::User => 0,
|
||||
ListType::Blog => 1,
|
||||
ListType::Word => 2,
|
||||
ListType::Prefix => 3,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Queryable, Identifiable)]
|
||||
pub struct List {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub user_id: Option<i32>,
|
||||
type_: i32,
|
||||
}
|
||||
|
||||
#[derive(Default, Insertable)]
|
||||
#[table_name = "lists"]
|
||||
struct NewList<'a> {
|
||||
pub name: &'a str,
|
||||
pub user_id: Option<i32>,
|
||||
type_: i32,
|
||||
}
|
||||
|
||||
macro_rules! func {
|
||||
(@elem User $id:expr, $value:expr) => {
|
||||
NewListElem {
|
||||
list_id: $id,
|
||||
user_id: Some(*$value),
|
||||
blog_id: None,
|
||||
word: None,
|
||||
}
|
||||
};
|
||||
(@elem Blog $id:expr, $value:expr) => {
|
||||
NewListElem {
|
||||
list_id: $id,
|
||||
user_id: None,
|
||||
blog_id: Some(*$value),
|
||||
word: None,
|
||||
}
|
||||
};
|
||||
(@elem Word $id:expr, $value:expr) => {
|
||||
NewListElem {
|
||||
list_id: $id,
|
||||
user_id: None,
|
||||
blog_id: None,
|
||||
word: Some($value),
|
||||
}
|
||||
};
|
||||
(@elem Prefix $id:expr, $value:expr) => {
|
||||
NewListElem {
|
||||
list_id: $id,
|
||||
user_id: None,
|
||||
blog_id: None,
|
||||
word: Some($value),
|
||||
}
|
||||
};
|
||||
(@in_type User) => { i32 };
|
||||
(@in_type Blog) => { i32 };
|
||||
(@in_type Word) => { &str };
|
||||
(@in_type Prefix) => { &str };
|
||||
(@out_type User) => { User };
|
||||
(@out_type Blog) => { Blog };
|
||||
(@out_type Word) => { String };
|
||||
(@out_type Prefix) => { String };
|
||||
|
||||
(add: $fn:ident, $kind:ident) => {
|
||||
pub fn $fn(&self, conn: &Connection, vals: &[func!(@in_type $kind)]) -> Result<()> {
|
||||
if self.kind() != ListType::$kind {
|
||||
return Err(Error::InvalidValue);
|
||||
}
|
||||
diesel::insert_into(list_elems::table)
|
||||
.values(
|
||||
vals
|
||||
.iter()
|
||||
.map(|u| func!(@elem $kind self.id, u))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.execute(conn)?;
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
|
||||
(list: $fn:ident, $kind:ident, $table:ident) => {
|
||||
pub fn $fn(&self, conn: &Connection) -> Result<Vec<func!(@out_type $kind)>> {
|
||||
if self.kind() != ListType::$kind {
|
||||
return Err(Error::InvalidValue);
|
||||
}
|
||||
list_elems::table
|
||||
.filter(list_elems::list_id.eq(self.id))
|
||||
.inner_join($table::table)
|
||||
.select($table::all_columns)
|
||||
.load(conn)
|
||||
.map_err(Error::from)
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
(set: $fn:ident, $kind:ident, $add:ident) => {
|
||||
pub fn $fn(&self, conn: &Connection, val: &[func!(@in_type $kind)]) -> Result<()> {
|
||||
if self.kind() != ListType::$kind {
|
||||
return Err(Error::InvalidValue);
|
||||
}
|
||||
self.clear(conn)?;
|
||||
self.$add(conn, val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Queryable, Identifiable)]
|
||||
struct ListElem {
|
||||
pub id: i32,
|
||||
pub list_id: i32,
|
||||
pub user_id: Option<i32>,
|
||||
pub blog_id: Option<i32>,
|
||||
pub word: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Insertable)]
|
||||
#[table_name = "list_elems"]
|
||||
struct NewListElem<'a> {
|
||||
pub list_id: i32,
|
||||
pub user_id: Option<i32>,
|
||||
pub blog_id: Option<i32>,
|
||||
pub word: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl List {
|
||||
last!(lists);
|
||||
get!(lists);
|
||||
|
||||
fn insert(conn: &Connection, val: NewList) -> Result<Self> {
|
||||
diesel::insert_into(lists::table)
|
||||
.values(val)
|
||||
.execute(conn)?;
|
||||
List::last(conn)
|
||||
}
|
||||
|
||||
pub fn list_for_user(conn: &Connection, user_id: Option<i32>) -> Result<Vec<Self>> {
|
||||
if let Some(user_id) = user_id {
|
||||
lists::table
|
||||
.filter(lists::user_id.eq(user_id))
|
||||
.load::<Self>(conn)
|
||||
.map_err(Error::from)
|
||||
} else {
|
||||
lists::table
|
||||
.filter(lists::user_id.is_null())
|
||||
.load::<Self>(conn)
|
||||
.map_err(Error::from)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_for_user_by_name(
|
||||
conn: &Connection,
|
||||
user_id: Option<i32>,
|
||||
name: &str,
|
||||
) -> Result<Self> {
|
||||
if let Some(user_id) = user_id {
|
||||
lists::table
|
||||
.filter(lists::user_id.eq(user_id))
|
||||
.filter(lists::name.eq(name))
|
||||
.first(conn)
|
||||
.map_err(Error::from)
|
||||
} else {
|
||||
lists::table
|
||||
.filter(lists::user_id.is_null())
|
||||
.filter(lists::name.eq(name))
|
||||
.first(conn)
|
||||
.map_err(Error::from)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(conn: &Connection, name: &str, user: Option<&User>, kind: ListType) -> Result<Self> {
|
||||
Self::insert(
|
||||
conn,
|
||||
NewList {
|
||||
name,
|
||||
user_id: user.map(|u| u.id),
|
||||
type_: kind.into(),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the kind of a list
|
||||
pub fn kind(&self) -> ListType {
|
||||
self.type_.try_into().expect("invalid list was constructed")
|
||||
}
|
||||
|
||||
/// Return Ok(true) if the list contain the given user, Ok(false) otherwiser,
|
||||
/// and Err(_) on error
|
||||
pub fn contains_user(&self, conn: &Connection, user: i32) -> Result<bool> {
|
||||
private::ListElem::user_in_list(conn, self, user)
|
||||
}
|
||||
|
||||
/// Return Ok(true) if the list contain the given blog, Ok(false) otherwiser,
|
||||
/// and Err(_) on error
|
||||
pub fn contains_blog(&self, conn: &Connection, blog: i32) -> Result<bool> {
|
||||
private::ListElem::blog_in_list(conn, self, blog)
|
||||
}
|
||||
|
||||
/// Return Ok(true) if the list contain the given word, Ok(false) otherwiser,
|
||||
/// and Err(_) on error
|
||||
pub fn contains_word(&self, conn: &Connection, word: &str) -> Result<bool> {
|
||||
private::ListElem::word_in_list(conn, self, word)
|
||||
}
|
||||
|
||||
/// Return Ok(true) if the list match the given prefix, Ok(false) otherwiser,
|
||||
/// and Err(_) on error
|
||||
pub fn contains_prefix(&self, conn: &Connection, word: &str) -> Result<bool> {
|
||||
private::ListElem::prefix_in_list(conn, self, word)
|
||||
}
|
||||
|
||||
/// Insert new users in a list
|
||||
func! {add: add_users, User}
|
||||
|
||||
/// Insert new blogs in a list
|
||||
func! {add: add_blogs, Blog}
|
||||
|
||||
/// Insert new words in a list
|
||||
func! {add: add_words, Word}
|
||||
|
||||
/// Insert new prefixes in a list
|
||||
func! {add: add_prefixes, Prefix}
|
||||
|
||||
/// Get all users in the list
|
||||
func! {list: list_users, User, users}
|
||||
|
||||
/// Get all blogs in the list
|
||||
func! {list: list_blogs, Blog, blogs}
|
||||
|
||||
/// Get all words in the list
|
||||
pub fn list_words(&self, conn: &Connection) -> Result<Vec<String>> {
|
||||
self.list_stringlike(conn, ListType::Word)
|
||||
}
|
||||
|
||||
/// Get all prefixes in the list
|
||||
pub fn list_prefixes(&self, conn: &Connection) -> Result<Vec<String>> {
|
||||
self.list_stringlike(conn, ListType::Prefix)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn list_stringlike(&self, conn: &Connection, t: ListType) -> Result<Vec<String>> {
|
||||
if self.kind() != t {
|
||||
return Err(Error::InvalidValue);
|
||||
}
|
||||
list_elems::table
|
||||
.filter(list_elems::list_id.eq(self.id))
|
||||
.filter(list_elems::word.is_not_null())
|
||||
.select(list_elems::word)
|
||||
.load::<Option<String>>(conn)
|
||||
.map_err(Error::from)
|
||||
.map(|r| r.into_iter().filter_map(|o| o).collect::<Vec<String>>())
|
||||
}
|
||||
|
||||
pub fn clear(&self, conn: &Connection) -> Result<()> {
|
||||
diesel::delete(list_elems::table.filter(list_elems::list_id.eq(self.id)))
|
||||
.execute(conn)
|
||||
.map(|_| ())
|
||||
.map_err(Error::from)
|
||||
}
|
||||
|
||||
func! {set: set_users, User, add_users}
|
||||
func! {set: set_blogs, Blog, add_blogs}
|
||||
func! {set: set_words, Word, add_words}
|
||||
func! {set: set_prefixes, Prefix, add_prefixes}
|
||||
}
|
||||
|
||||
mod private {
|
||||
pub use super::*;
|
||||
use diesel::{
|
||||
dsl,
|
||||
sql_types::{Nullable, Text},
|
||||
IntoSql, TextExpressionMethods,
|
||||
};
|
||||
|
||||
impl ListElem {
|
||||
insert!(list_elems, NewListElem);
|
||||
|
||||
pub fn user_in_list(conn: &Connection, list: &List, user: i32) -> Result<bool> {
|
||||
dsl::select(dsl::exists(
|
||||
list_elems::table
|
||||
.filter(list_elems::list_id.eq(list.id))
|
||||
.filter(list_elems::user_id.eq(Some(user))),
|
||||
))
|
||||
.get_result(conn)
|
||||
.map_err(Error::from)
|
||||
}
|
||||
|
||||
pub fn blog_in_list(conn: &Connection, list: &List, blog: i32) -> Result<bool> {
|
||||
dsl::select(dsl::exists(
|
||||
list_elems::table
|
||||
.filter(list_elems::list_id.eq(list.id))
|
||||
.filter(list_elems::blog_id.eq(Some(blog))),
|
||||
))
|
||||
.get_result(conn)
|
||||
.map_err(Error::from)
|
||||
}
|
||||
|
||||
pub fn word_in_list(conn: &Connection, list: &List, word: &str) -> Result<bool> {
|
||||
dsl::select(dsl::exists(
|
||||
list_elems::table
|
||||
.filter(list_elems::list_id.eq(list.id))
|
||||
.filter(list_elems::word.eq(word)),
|
||||
))
|
||||
.get_result(conn)
|
||||
.map_err(Error::from)
|
||||
}
|
||||
|
||||
pub fn prefix_in_list(conn: &Connection, list: &List, word: &str) -> Result<bool> {
|
||||
dsl::select(dsl::exists(
|
||||
list_elems::table
|
||||
.filter(
|
||||
word.into_sql::<Nullable<Text>>()
|
||||
.like(list_elems::word.concat("%")),
|
||||
)
|
||||
.filter(list_elems::list_id.eq(list.id)),
|
||||
))
|
||||
.get_result(conn)
|
||||
.map_err(Error::from)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use blogs::tests as blog_tests;
|
||||
use diesel::Connection;
|
||||
use tests::db;
|
||||
|
||||
#[test]
|
||||
fn list_type() {
|
||||
for i in 0..4 {
|
||||
assert_eq!(i, Into::<i32>::into(ListType::try_from(i).unwrap()));
|
||||
}
|
||||
ListType::try_from(4).unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_lists() {
|
||||
let conn = &db();
|
||||
conn.test_transaction::<_, (), _>(|| {
|
||||
let (users, _) = blog_tests::fill_database(conn);
|
||||
|
||||
let l1 = List::new(conn, "list1", None, ListType::User).unwrap();
|
||||
let l2 = List::new(conn, "list2", None, ListType::Blog).unwrap();
|
||||
let l1u = List::new(conn, "list1", Some(&users[0]), ListType::Word).unwrap();
|
||||
|
||||
let l_eq = |l1: &List, l2: &List| {
|
||||
assert_eq!(l1.id, l2.id);
|
||||
assert_eq!(l1.user_id, l2.user_id);
|
||||
assert_eq!(l1.name, l2.name);
|
||||
assert_eq!(l1.type_, l2.type_);
|
||||
};
|
||||
|
||||
let l1bis = List::get(conn, l1.id).unwrap();
|
||||
l_eq(&l1, &l1bis);
|
||||
|
||||
let l_inst = List::list_for_user(conn, None).unwrap();
|
||||
let l_user = List::list_for_user(conn, Some(users[0].id)).unwrap();
|
||||
assert_eq!(2, l_inst.len());
|
||||
assert_eq!(1, l_user.len());
|
||||
assert!(l_inst.iter().all(|l| l.id != l1u.id));
|
||||
|
||||
l_eq(&l1u, &l_user[0]);
|
||||
if l_inst[0].id == l1.id {
|
||||
l_eq(&l1, &l_inst[0]);
|
||||
l_eq(&l2, &l_inst[1]);
|
||||
} else {
|
||||
l_eq(&l1, &l_inst[1]);
|
||||
l_eq(&l2, &l_inst[0]);
|
||||
}
|
||||
|
||||
l_eq(
|
||||
&l1,
|
||||
&List::find_for_user_by_name(conn, l1.user_id, &l1.name).unwrap(),
|
||||
);
|
||||
l_eq(
|
||||
&&l1u,
|
||||
&List::find_for_user_by_name(conn, l1u.user_id, &l1u.name).unwrap(),
|
||||
);
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_user_list() {
|
||||
let conn = &db();
|
||||
conn.test_transaction::<_, (), _>(|| {
|
||||
let (users, blogs) = blog_tests::fill_database(conn);
|
||||
|
||||
let l = List::new(conn, "list", None, ListType::User).unwrap();
|
||||
|
||||
assert_eq!(l.kind(), ListType::User);
|
||||
assert!(l.list_users(conn).unwrap().is_empty());
|
||||
|
||||
assert!(!l.contains_user(conn, users[0].id).unwrap());
|
||||
assert!(l.add_users(conn, &[users[0].id]).is_ok());
|
||||
assert!(l.contains_user(conn, users[0].id).unwrap());
|
||||
|
||||
assert!(l.add_users(conn, &[users[1].id]).is_ok());
|
||||
assert!(l.contains_user(conn, users[0].id).unwrap());
|
||||
assert!(l.contains_user(conn, users[1].id).unwrap());
|
||||
assert_eq!(2, l.list_users(conn).unwrap().len());
|
||||
|
||||
assert!(l.set_users(conn, &[users[0].id]).is_ok());
|
||||
assert!(l.contains_user(conn, users[0].id).unwrap());
|
||||
assert!(!l.contains_user(conn, users[1].id).unwrap());
|
||||
assert_eq!(1, l.list_users(conn).unwrap().len());
|
||||
assert!(users[0] == l.list_users(conn).unwrap()[0]);
|
||||
|
||||
l.clear(conn).unwrap();
|
||||
assert!(l.list_users(conn).unwrap().is_empty());
|
||||
|
||||
assert!(l.add_blogs(conn, &[blogs[0].id]).is_err());
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_blog_list() {
|
||||
let conn = &db();
|
||||
conn.test_transaction::<_, (), _>(|| {
|
||||
let (users, blogs) = blog_tests::fill_database(conn);
|
||||
|
||||
let l = List::new(conn, "list", None, ListType::Blog).unwrap();
|
||||
|
||||
assert_eq!(l.kind(), ListType::Blog);
|
||||
assert!(l.list_blogs(conn).unwrap().is_empty());
|
||||
|
||||
assert!(!l.contains_blog(conn, blogs[0].id).unwrap());
|
||||
assert!(l.add_blogs(conn, &[blogs[0].id]).is_ok());
|
||||
assert!(l.contains_blog(conn, blogs[0].id).unwrap());
|
||||
|
||||
assert!(l.add_blogs(conn, &[blogs[1].id]).is_ok());
|
||||
assert!(l.contains_blog(conn, blogs[0].id).unwrap());
|
||||
assert!(l.contains_blog(conn, blogs[1].id).unwrap());
|
||||
assert_eq!(2, l.list_blogs(conn).unwrap().len());
|
||||
|
||||
assert!(l.set_blogs(conn, &[blogs[0].id]).is_ok());
|
||||
assert!(l.contains_blog(conn, blogs[0].id).unwrap());
|
||||
assert!(!l.contains_blog(conn, blogs[1].id).unwrap());
|
||||
assert_eq!(1, l.list_blogs(conn).unwrap().len());
|
||||
assert_eq!(blogs[0].id, l.list_blogs(conn).unwrap()[0].id);
|
||||
|
||||
l.clear(conn).unwrap();
|
||||
assert!(l.list_blogs(conn).unwrap().is_empty());
|
||||
|
||||
assert!(l.add_users(conn, &[users[0].id]).is_err());
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_word_list() {
|
||||
let conn = &db();
|
||||
conn.test_transaction::<_, (), _>(|| {
|
||||
let l = List::new(conn, "list", None, ListType::Word).unwrap();
|
||||
|
||||
assert_eq!(l.kind(), ListType::Word);
|
||||
assert!(l.list_words(conn).unwrap().is_empty());
|
||||
|
||||
assert!(!l.contains_word(conn, "plume").unwrap());
|
||||
assert!(l.add_words(conn, &["plume"]).is_ok());
|
||||
assert!(l.contains_word(conn, "plume").unwrap());
|
||||
assert!(!l.contains_word(conn, "plumelin").unwrap());
|
||||
|
||||
assert!(l.add_words(conn, &["amsterdam"]).is_ok());
|
||||
assert!(l.contains_word(conn, "plume").unwrap());
|
||||
assert!(l.contains_word(conn, "amsterdam").unwrap());
|
||||
assert_eq!(2, l.list_words(conn).unwrap().len());
|
||||
|
||||
assert!(l.set_words(conn, &["plume"]).is_ok());
|
||||
assert!(l.contains_word(conn, "plume").unwrap());
|
||||
assert!(!l.contains_word(conn, "amsterdam").unwrap());
|
||||
assert_eq!(1, l.list_words(conn).unwrap().len());
|
||||
assert_eq!("plume", l.list_words(conn).unwrap()[0]);
|
||||
|
||||
l.clear(conn).unwrap();
|
||||
assert!(l.list_words(conn).unwrap().is_empty());
|
||||
|
||||
assert!(l.add_prefixes(conn, &["something"]).is_err());
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_prefix_list() {
|
||||
let conn = &db();
|
||||
conn.test_transaction::<_, (), _>(|| {
|
||||
let l = List::new(conn, "list", None, ListType::Prefix).unwrap();
|
||||
|
||||
assert_eq!(l.kind(), ListType::Prefix);
|
||||
assert!(l.list_prefixes(conn).unwrap().is_empty());
|
||||
|
||||
assert!(!l.contains_prefix(conn, "plume").unwrap());
|
||||
assert!(l.add_prefixes(conn, &["plume"]).is_ok());
|
||||
assert!(l.contains_prefix(conn, "plume").unwrap());
|
||||
assert!(l.contains_prefix(conn, "plumelin").unwrap());
|
||||
|
||||
assert!(l.add_prefixes(conn, &["amsterdam"]).is_ok());
|
||||
assert!(l.contains_prefix(conn, "plume").unwrap());
|
||||
assert!(l.contains_prefix(conn, "amsterdam").unwrap());
|
||||
assert_eq!(2, l.list_prefixes(conn).unwrap().len());
|
||||
|
||||
assert!(l.set_prefixes(conn, &["plume"]).is_ok());
|
||||
assert!(l.contains_prefix(conn, "plume").unwrap());
|
||||
assert!(!l.contains_prefix(conn, "amsterdam").unwrap());
|
||||
assert_eq!(1, l.list_prefixes(conn).unwrap().len());
|
||||
assert_eq!("plume", l.list_prefixes(conn).unwrap()[0]);
|
||||
|
||||
l.clear(conn).unwrap();
|
||||
assert!(l.list_prefixes(conn).unwrap().is_empty());
|
||||
|
||||
assert!(l.add_words(conn, &["something"]).is_err());
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,877 @@
|
||||
use blogs::Blog;
|
||||
use lists::{self, ListType};
|
||||
use plume_common::activity_pub::inbox::AsActor;
|
||||
use posts::Post;
|
||||
use tags::Tag;
|
||||
use users::User;
|
||||
use whatlang::{self, Lang};
|
||||
|
||||
use {PlumeRocket, Result};
|
||||
|
||||
use super::Timeline;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum QueryError {
|
||||
SyntaxError(usize, usize, String),
|
||||
UnexpectedEndOfQuery,
|
||||
RuntimeError(String),
|
||||
}
|
||||
|
||||
impl From<std::option::NoneError> for QueryError {
|
||||
fn from(_: std::option::NoneError) -> Self {
|
||||
QueryError::UnexpectedEndOfQuery
|
||||
}
|
||||
}
|
||||
|
||||
pub type QueryResult<T> = std::result::Result<T, QueryError>;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum Kind<'a> {
|
||||
Original,
|
||||
Reshare(&'a User),
|
||||
Like(&'a User),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
enum Token<'a> {
|
||||
LParent(usize),
|
||||
RParent(usize),
|
||||
LBracket(usize),
|
||||
RBracket(usize),
|
||||
Comma(usize),
|
||||
Word(usize, usize, &'a str),
|
||||
}
|
||||
|
||||
impl<'a> Token<'a> {
|
||||
fn get_text(&self) -> &'a str {
|
||||
match self {
|
||||
Token::Word(_, _, s) => s,
|
||||
Token::LParent(_) => "(",
|
||||
Token::RParent(_) => ")",
|
||||
Token::LBracket(_) => "[",
|
||||
Token::RBracket(_) => "]",
|
||||
Token::Comma(_) => ",",
|
||||
}
|
||||
}
|
||||
|
||||
fn get_pos(&self) -> (usize, usize) {
|
||||
match self {
|
||||
Token::Word(a, b, _) => (*a, *b),
|
||||
Token::LParent(a)
|
||||
| Token::RParent(a)
|
||||
| Token::LBracket(a)
|
||||
| Token::RBracket(a)
|
||||
| Token::Comma(a) => (*a, 1),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_error<T>(&self, token: Token) -> QueryResult<T> {
|
||||
let (b, e) = self.get_pos();
|
||||
let message = format!(
|
||||
"Syntax Error: Expected {}, got {}",
|
||||
token.to_string(),
|
||||
self.to_string()
|
||||
);
|
||||
Err(QueryError::SyntaxError(b, e, message))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ToString for Token<'a> {
|
||||
fn to_string(&self) -> String {
|
||||
if let Token::Word(0, 0, v) = self {
|
||||
return v.to_string();
|
||||
}
|
||||
format!(
|
||||
"'{}'",
|
||||
match self {
|
||||
Token::Word(_, _, v) => v,
|
||||
Token::LParent(_) => "(",
|
||||
Token::RParent(_) => ")",
|
||||
Token::LBracket(_) => "[",
|
||||
Token::RBracket(_) => "]",
|
||||
Token::Comma(_) => ",",
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! gen_tokenizer {
|
||||
( ($c:ident,$i:ident), $state:ident, $quote:ident; $([$char:tt, $variant:tt]),*) => {
|
||||
match $c {
|
||||
space if !*$quote && space.is_whitespace() => match $state.take() {
|
||||
Some(v) => vec![v],
|
||||
None => vec![],
|
||||
},
|
||||
$(
|
||||
$char if !*$quote => match $state.take() {
|
||||
Some(v) => vec![v, Token::$variant($i)],
|
||||
None => vec![Token::$variant($i)],
|
||||
},
|
||||
)*
|
||||
'"' => {
|
||||
*$quote = !*$quote;
|
||||
vec![]
|
||||
},
|
||||
_ => match $state.take() {
|
||||
Some(Token::Word(b, l, _)) => {
|
||||
*$state = Some(Token::Word(b, l+1, &""));
|
||||
vec![]
|
||||
},
|
||||
None => {
|
||||
*$state = Some(Token::Word($i,1,&""));
|
||||
vec![]
|
||||
},
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lex(stream: &str) -> Vec<Token> {
|
||||
stream
|
||||
.chars()
|
||||
.chain(" ".chars()) // force a last whitespace to empty scan's state
|
||||
.zip(0..)
|
||||
.scan((None, false), |(state, quote), (c, i)| {
|
||||
Some(gen_tokenizer!((c,i), state, quote;
|
||||
['(', LParent], [')', RParent],
|
||||
['[', LBracket], [']', RBracket],
|
||||
[',', Comma]))
|
||||
})
|
||||
.flatten()
|
||||
.map(|t| {
|
||||
if let Token::Word(b, e, _) = t {
|
||||
Token::Word(b, e, &stream[b..b + e])
|
||||
} else {
|
||||
t
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Private internals of TimelineQuery
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum TQ<'a> {
|
||||
Or(Vec<TQ<'a>>),
|
||||
And(Vec<TQ<'a>>),
|
||||
Arg(Arg<'a>, bool),
|
||||
}
|
||||
|
||||
impl<'a> TQ<'a> {
|
||||
fn matches(
|
||||
&self,
|
||||
rocket: &PlumeRocket,
|
||||
timeline: &Timeline,
|
||||
post: &Post,
|
||||
kind: Kind,
|
||||
) -> Result<bool> {
|
||||
match self {
|
||||
TQ::Or(inner) => inner.iter().try_fold(false, |s, e| {
|
||||
e.matches(rocket, timeline, post, kind).map(|r| s || r)
|
||||
}),
|
||||
TQ::And(inner) => inner.iter().try_fold(true, |s, e| {
|
||||
e.matches(rocket, timeline, post, kind).map(|r| s && r)
|
||||
}),
|
||||
TQ::Arg(inner, invert) => Ok(inner.matches(rocket, timeline, post, kind)? ^ invert),
|
||||
}
|
||||
}
|
||||
|
||||
fn list_used_lists(&self) -> Vec<(String, ListType)> {
|
||||
match self {
|
||||
TQ::Or(inner) => inner.iter().flat_map(TQ::list_used_lists).collect(),
|
||||
TQ::And(inner) => inner.iter().flat_map(TQ::list_used_lists).collect(),
|
||||
TQ::Arg(Arg::In(typ, List::List(name)), _) => vec![(
|
||||
name.to_string(),
|
||||
match typ {
|
||||
WithList::Blog => ListType::Blog,
|
||||
WithList::Author { .. } => ListType::User,
|
||||
WithList::License => ListType::Word,
|
||||
WithList::Tags => ListType::Word,
|
||||
WithList::Lang => ListType::Prefix,
|
||||
},
|
||||
)],
|
||||
TQ::Arg(_, _) => vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum Arg<'a> {
|
||||
In(WithList, List<'a>),
|
||||
Contains(WithContains, &'a str),
|
||||
Boolean(Bool),
|
||||
}
|
||||
|
||||
impl<'a> Arg<'a> {
|
||||
pub fn matches(
|
||||
&self,
|
||||
rocket: &PlumeRocket,
|
||||
timeline: &Timeline,
|
||||
post: &Post,
|
||||
kind: Kind,
|
||||
) -> Result<bool> {
|
||||
match self {
|
||||
Arg::In(t, l) => t.matches(rocket, timeline, post, l, kind),
|
||||
Arg::Contains(t, v) => t.matches(post, v),
|
||||
Arg::Boolean(t) => t.matches(rocket, timeline, post, kind),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum WithList {
|
||||
Blog,
|
||||
Author { boosts: bool, likes: bool },
|
||||
License,
|
||||
Tags,
|
||||
Lang,
|
||||
}
|
||||
|
||||
impl WithList {
|
||||
pub fn matches(
|
||||
&self,
|
||||
rocket: &PlumeRocket,
|
||||
timeline: &Timeline,
|
||||
post: &Post,
|
||||
list: &List,
|
||||
kind: Kind,
|
||||
) -> Result<bool> {
|
||||
match list {
|
||||
List::List(name) => {
|
||||
let list =
|
||||
lists::List::find_for_user_by_name(&rocket.conn, timeline.user_id, &name)?;
|
||||
match (self, list.kind()) {
|
||||
(WithList::Blog, ListType::Blog) => {
|
||||
list.contains_blog(&rocket.conn, post.blog_id)
|
||||
}
|
||||
(WithList::Author { boosts, likes }, ListType::User) => match kind {
|
||||
Kind::Original => Ok(list
|
||||
.list_users(&rocket.conn)?
|
||||
.iter()
|
||||
.any(|a| post.is_author(&rocket.conn, a.id).unwrap_or(false))),
|
||||
Kind::Reshare(u) => {
|
||||
if *boosts {
|
||||
list.contains_user(&rocket.conn, u.id)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
Kind::Like(u) => {
|
||||
if *likes {
|
||||
list.contains_user(&rocket.conn, u.id)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
},
|
||||
(WithList::License, ListType::Word) => {
|
||||
list.contains_word(&rocket.conn, &post.license)
|
||||
}
|
||||
(WithList::Tags, ListType::Word) => {
|
||||
let tags = Tag::for_post(&rocket.conn, post.id)?;
|
||||
Ok(list
|
||||
.list_words(&rocket.conn)?
|
||||
.iter()
|
||||
.any(|s| tags.iter().any(|t| s == &t.tag)))
|
||||
}
|
||||
(WithList::Lang, ListType::Prefix) => {
|
||||
let lang = whatlang::detect(post.content.get())
|
||||
.and_then(|i| {
|
||||
if i.is_reliable() {
|
||||
Some(i.lang())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or(Lang::Eng)
|
||||
.name();
|
||||
list.contains_prefix(&rocket.conn, lang)
|
||||
}
|
||||
(_, _) => Err(QueryError::RuntimeError(format!(
|
||||
"The list '{}' is of the wrong type for this usage",
|
||||
name
|
||||
)))?,
|
||||
}
|
||||
}
|
||||
List::Array(list) => match self {
|
||||
WithList::Blog => Ok(list
|
||||
.iter()
|
||||
.filter_map(|b| Blog::find_by_fqn(rocket, b).ok())
|
||||
.any(|b| b.id == post.blog_id)),
|
||||
WithList::Author { boosts, likes } => match kind {
|
||||
Kind::Original => Ok(list
|
||||
.iter()
|
||||
.filter_map(|a| User::find_by_fqn(rocket, a).ok())
|
||||
.any(|a| post.is_author(&rocket.conn, a.id).unwrap_or(false))),
|
||||
Kind::Reshare(u) => {
|
||||
if *boosts {
|
||||
Ok(list.iter().any(|user| &u.fqn == user))
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
Kind::Like(u) => {
|
||||
if *likes {
|
||||
Ok(list.iter().any(|user| &u.fqn == user))
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
},
|
||||
WithList::License => Ok(list.iter().any(|s| s == &post.license)),
|
||||
WithList::Tags => {
|
||||
let tags = Tag::for_post(&rocket.conn, post.id)?;
|
||||
Ok(list.iter().any(|s| tags.iter().any(|t| s == &t.tag)))
|
||||
}
|
||||
WithList::Lang => {
|
||||
let lang = whatlang::detect(post.content.get())
|
||||
.and_then(|i| {
|
||||
if i.is_reliable() {
|
||||
Some(i.lang())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or(Lang::Eng)
|
||||
.name()
|
||||
.to_lowercase();
|
||||
Ok(list.iter().any(|s| lang.starts_with(&s.to_lowercase())))
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum WithContains {
|
||||
Title,
|
||||
Subtitle,
|
||||
Content,
|
||||
}
|
||||
|
||||
impl WithContains {
|
||||
pub fn matches(&self, post: &Post, value: &str) -> Result<bool> {
|
||||
match self {
|
||||
WithContains::Title => Ok(post.title.contains(value)),
|
||||
WithContains::Subtitle => Ok(post.subtitle.contains(value)),
|
||||
WithContains::Content => Ok(post.content.contains(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum Bool {
|
||||
Followed { boosts: bool, likes: bool },
|
||||
HasCover,
|
||||
Local,
|
||||
All,
|
||||
}
|
||||
|
||||
impl Bool {
|
||||
pub fn matches(
|
||||
&self,
|
||||
rocket: &PlumeRocket,
|
||||
timeline: &Timeline,
|
||||
post: &Post,
|
||||
kind: Kind,
|
||||
) -> Result<bool> {
|
||||
match self {
|
||||
Bool::Followed { boosts, likes } => {
|
||||
if timeline.user_id.is_none() {
|
||||
return Ok(false);
|
||||
}
|
||||
let user = timeline.user_id.unwrap();
|
||||
match kind {
|
||||
Kind::Original => post
|
||||
.get_authors(&rocket.conn)?
|
||||
.iter()
|
||||
.try_fold(false, |s, a| {
|
||||
a.is_followed_by(&rocket.conn, user).map(|r| s || r)
|
||||
}),
|
||||
Kind::Reshare(u) => {
|
||||
if *boosts {
|
||||
u.is_followed_by(&rocket.conn, user)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
Kind::Like(u) => {
|
||||
if *likes {
|
||||
u.is_followed_by(&rocket.conn, user)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Bool::HasCover => Ok(post.cover_id.is_some()),
|
||||
Bool::Local => Ok(post.get_blog(&rocket.conn)?.is_local()),
|
||||
Bool::All => Ok(true),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum List<'a> {
|
||||
List(&'a str),
|
||||
Array(Vec<&'a str>),
|
||||
}
|
||||
|
||||
fn parse_s<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> {
|
||||
let mut res = Vec::new();
|
||||
let (left, token) = parse_a(&stream)?;
|
||||
res.push(token);
|
||||
stream = left;
|
||||
while !stream.is_empty() {
|
||||
match stream[0] {
|
||||
Token::Word(_, _, and) if and == "or" => {}
|
||||
_ => break,
|
||||
}
|
||||
let (left, token) = parse_a(&stream[1..])?;
|
||||
res.push(token);
|
||||
stream = left;
|
||||
}
|
||||
|
||||
if res.len() == 1 {
|
||||
Ok((stream, res.remove(0)))
|
||||
} else {
|
||||
Ok((stream, TQ::Or(res)))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_a<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> {
|
||||
let mut res = Vec::new();
|
||||
let (left, token) = parse_b(&stream)?;
|
||||
res.push(token);
|
||||
stream = left;
|
||||
while !stream.is_empty() {
|
||||
match stream[0] {
|
||||
Token::Word(_, _, and) if and == "and" => {}
|
||||
_ => break,
|
||||
}
|
||||
let (left, token) = parse_b(&stream[1..])?;
|
||||
res.push(token);
|
||||
stream = left;
|
||||
}
|
||||
|
||||
if res.len() == 1 {
|
||||
Ok((stream, res.remove(0)))
|
||||
} else {
|
||||
Ok((stream, TQ::And(res)))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_b<'a, 'b>(stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> {
|
||||
match stream.get(0) {
|
||||
Some(Token::LParent(_)) => {
|
||||
let (left, token) = parse_s(&stream[1..])?;
|
||||
match left.get(0) {
|
||||
Some(Token::RParent(_)) => Ok((&left[1..], token)),
|
||||
Some(t) => t.get_error(Token::RParent(0)),
|
||||
None => None?,
|
||||
}
|
||||
}
|
||||
_ => parse_c(stream),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_c<'a, 'b>(stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> {
|
||||
match stream.get(0) {
|
||||
Some(Token::Word(_, _, not)) if not == &"not" => {
|
||||
let (left, token) = parse_d(&stream[1..])?;
|
||||
Ok((left, TQ::Arg(token, true)))
|
||||
}
|
||||
_ => {
|
||||
let (left, token) = parse_d(stream)?;
|
||||
Ok((left, TQ::Arg(token, false)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_d<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], Arg<'a>)> {
|
||||
match stream.get(0).map(Token::get_text)? {
|
||||
s @ "blog" | s @ "author" | s @ "license" | s @ "tags" | s @ "lang" => {
|
||||
match stream.get(1)? {
|
||||
Token::Word(_, _, r#in) if r#in == &"in" => {
|
||||
let (mut left, list) = parse_l(&stream[2..])?;
|
||||
let kind = match s {
|
||||
"blog" => WithList::Blog,
|
||||
"author" => {
|
||||
let mut boosts = true;
|
||||
let mut likes = false;
|
||||
while let Some(Token::Word(s, e, clude)) = left.get(0) {
|
||||
if *clude != "include" && *clude != "exclude" {
|
||||
break;
|
||||
}
|
||||
match (*clude, left.get(1).map(Token::get_text)?) {
|
||||
("include", "reshares") | ("include", "reshare") => {
|
||||
boosts = true
|
||||
}
|
||||
("exclude", "reshares") | ("exclude", "reshare") => {
|
||||
boosts = false
|
||||
}
|
||||
("include", "likes") | ("include", "like") => likes = true,
|
||||
("exclude", "likes") | ("exclude", "like") => likes = false,
|
||||
(_, w) => {
|
||||
return Token::Word(*s, *e, w).get_error(Token::Word(
|
||||
0,
|
||||
0,
|
||||
"one of 'likes' or 'reshares'",
|
||||
))
|
||||
}
|
||||
}
|
||||
left = &left[2..];
|
||||
}
|
||||
WithList::Author { boosts, likes }
|
||||
}
|
||||
"license" => WithList::License,
|
||||
"tags" => WithList::Tags,
|
||||
"lang" => WithList::Lang,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
Ok((left, Arg::In(kind, list)))
|
||||
}
|
||||
t => t.get_error(Token::Word(0, 0, "'in'")),
|
||||
}
|
||||
}
|
||||
s @ "title" | s @ "subtitle" | s @ "content" => match (stream.get(1)?, stream.get(2)?) {
|
||||
(Token::Word(_, _, contains), Token::Word(_, _, w)) if contains == &"contains" => Ok((
|
||||
&stream[3..],
|
||||
Arg::Contains(
|
||||
match s {
|
||||
"title" => WithContains::Title,
|
||||
"subtitle" => WithContains::Subtitle,
|
||||
"content" => WithContains::Content,
|
||||
_ => unreachable!(),
|
||||
},
|
||||
w,
|
||||
),
|
||||
)),
|
||||
(Token::Word(_, _, contains), t) if contains == &"contains" => {
|
||||
t.get_error(Token::Word(0, 0, "any word"))
|
||||
}
|
||||
(t, _) => t.get_error(Token::Word(0, 0, "'contains'")),
|
||||
},
|
||||
s @ "followed" | s @ "has_cover" | s @ "local" | s @ "all" => match s {
|
||||
"followed" => {
|
||||
let mut boosts = true;
|
||||
let mut likes = false;
|
||||
while let Some(Token::Word(s, e, clude)) = stream.get(1) {
|
||||
if *clude != "include" && *clude != "exclude" {
|
||||
break;
|
||||
}
|
||||
match (*clude, stream.get(2).map(Token::get_text)?) {
|
||||
("include", "reshares") | ("include", "reshare") => boosts = true,
|
||||
("exclude", "reshares") | ("exclude", "reshare") => boosts = false,
|
||||
("include", "likes") | ("include", "like") => likes = true,
|
||||
("exclude", "likes") | ("exclude", "like") => likes = false,
|
||||
(_, w) => {
|
||||
return Token::Word(*s, *e, w).get_error(Token::Word(
|
||||
0,
|
||||
0,
|
||||
"one of 'likes' or 'boosts'",
|
||||
))
|
||||
}
|
||||
}
|
||||
stream = &stream[2..];
|
||||
}
|
||||
Ok((&stream[1..], Arg::Boolean(Bool::Followed { boosts, likes })))
|
||||
}
|
||||
"has_cover" => Ok((&stream[1..], Arg::Boolean(Bool::HasCover))),
|
||||
"local" => Ok((&stream[1..], Arg::Boolean(Bool::Local))),
|
||||
"all" => Ok((&stream[1..], Arg::Boolean(Bool::All))),
|
||||
_ => unreachable!(),
|
||||
},
|
||||
_ => stream.get(0)?.get_error(Token::Word(
|
||||
0,
|
||||
0,
|
||||
"one of 'blog', 'author', 'license', 'tags', 'lang', \
|
||||
'title', 'subtitle', 'content', 'followed', 'has_cover', 'local' or 'all'",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_l<'a, 'b>(stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], List<'a>)> {
|
||||
match stream.get(0)? {
|
||||
Token::LBracket(_) => {
|
||||
let (left, list) = parse_m(&stream[1..])?;
|
||||
match left.get(0)? {
|
||||
Token::RBracket(_) => Ok((&left[1..], List::Array(list))),
|
||||
t => t.get_error(Token::Word(0, 0, "one of ']' or ','")),
|
||||
}
|
||||
}
|
||||
Token::Word(_, _, list) => Ok((&stream[1..], List::List(list))),
|
||||
t => t.get_error(Token::Word(0, 0, "one of [list, of, words] or list_name")),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_m<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], Vec<&'a str>)> {
|
||||
let mut res: Vec<&str> = Vec::new();
|
||||
res.push(match stream.get(0)? {
|
||||
Token::Word(_, _, w) => w,
|
||||
t => return t.get_error(Token::Word(0, 0, "any word")),
|
||||
});
|
||||
stream = &stream[1..];
|
||||
while let Token::Comma(_) = stream[0] {
|
||||
res.push(match stream.get(1)? {
|
||||
Token::Word(_, _, w) => w,
|
||||
t => return t.get_error(Token::Word(0, 0, "any word")),
|
||||
});
|
||||
stream = &stream[2..];
|
||||
}
|
||||
|
||||
Ok((stream, res))
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TimelineQuery<'a>(TQ<'a>);
|
||||
|
||||
impl<'a> TimelineQuery<'a> {
|
||||
pub fn parse(query: &'a str) -> QueryResult<Self> {
|
||||
parse_s(&lex(query))
|
||||
.and_then(|(left, res)| {
|
||||
if left.is_empty() {
|
||||
Ok(res)
|
||||
} else {
|
||||
left[0].get_error(Token::Word(0, 0, "on of 'or' or 'and'"))
|
||||
}
|
||||
})
|
||||
.map(TimelineQuery)
|
||||
}
|
||||
|
||||
pub fn matches(
|
||||
&self,
|
||||
rocket: &PlumeRocket,
|
||||
timeline: &Timeline,
|
||||
post: &Post,
|
||||
kind: Kind,
|
||||
) -> Result<bool> {
|
||||
self.0.matches(rocket, timeline, post, kind)
|
||||
}
|
||||
|
||||
pub fn list_used_lists(&self) -> Vec<(String, ListType)> {
|
||||
self.0.list_used_lists()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_lexer() {
|
||||
assert_eq!(
|
||||
lex("()[ ],two words \"something quoted with , and [\""),
|
||||
vec![
|
||||
Token::LParent(0),
|
||||
Token::RParent(1),
|
||||
Token::LBracket(2),
|
||||
Token::RBracket(4),
|
||||
Token::Comma(5),
|
||||
Token::Word(6, 3, "two"),
|
||||
Token::Word(10, 5, "words"),
|
||||
Token::Word(17, 29, "something quoted with , and ["),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parser() {
|
||||
let q = TimelineQuery::parse(r#"lang in [fr, en] and (license in my_fav_lic or not followed) or title contains "Plume is amazing""#)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
q.0,
|
||||
TQ::Or(vec![
|
||||
TQ::And(vec![
|
||||
TQ::Arg(
|
||||
Arg::In(WithList::Lang, List::Array(vec!["fr", "en"]),),
|
||||
false
|
||||
),
|
||||
TQ::Or(vec![
|
||||
TQ::Arg(Arg::In(WithList::License, List::List("my_fav_lic"),), false),
|
||||
TQ::Arg(
|
||||
Arg::Boolean(Bool::Followed {
|
||||
boosts: true,
|
||||
likes: false
|
||||
}),
|
||||
true
|
||||
),
|
||||
]),
|
||||
]),
|
||||
TQ::Arg(
|
||||
Arg::Contains(WithContains::Title, "Plume is amazing",),
|
||||
false
|
||||
),
|
||||
])
|
||||
);
|
||||
|
||||
let lists = TimelineQuery::parse(
|
||||
r#"blog in a or author in b include likes or license in c or tags in d or lang in e "#,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
lists.0,
|
||||
TQ::Or(vec![
|
||||
TQ::Arg(Arg::In(WithList::Blog, List::List("a"),), false),
|
||||
TQ::Arg(
|
||||
Arg::In(
|
||||
WithList::Author {
|
||||
boosts: true,
|
||||
likes: true
|
||||
},
|
||||
List::List("b"),
|
||||
),
|
||||
false
|
||||
),
|
||||
TQ::Arg(Arg::In(WithList::License, List::List("c"),), false),
|
||||
TQ::Arg(Arg::In(WithList::Tags, List::List("d"),), false),
|
||||
TQ::Arg(Arg::In(WithList::Lang, List::List("e"),), false),
|
||||
])
|
||||
);
|
||||
|
||||
let contains = TimelineQuery::parse(
|
||||
r#"title contains a or subtitle contains b or content contains c"#,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
contains.0,
|
||||
TQ::Or(vec![
|
||||
TQ::Arg(Arg::Contains(WithContains::Title, "a"), false),
|
||||
TQ::Arg(Arg::Contains(WithContains::Subtitle, "b"), false),
|
||||
TQ::Arg(Arg::Contains(WithContains::Content, "c"), false),
|
||||
])
|
||||
);
|
||||
|
||||
let booleans = TimelineQuery::parse(
|
||||
r#"followed include like exclude reshares and has_cover and local and all"#,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
booleans.0,
|
||||
TQ::And(vec![
|
||||
TQ::Arg(
|
||||
Arg::Boolean(Bool::Followed {
|
||||
boosts: false,
|
||||
likes: true
|
||||
}),
|
||||
false
|
||||
),
|
||||
TQ::Arg(Arg::Boolean(Bool::HasCover), false),
|
||||
TQ::Arg(Arg::Boolean(Bool::Local), false),
|
||||
TQ::Arg(Arg::Boolean(Bool::All), false),
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rejection_parser() {
|
||||
let missing_and_or = TimelineQuery::parse(r#"followed or has_cover local"#).unwrap_err();
|
||||
assert_eq!(
|
||||
missing_and_or,
|
||||
QueryError::SyntaxError(
|
||||
22,
|
||||
5,
|
||||
"Syntax Error: Expected on of 'or' or 'and', got 'local'".to_owned()
|
||||
)
|
||||
);
|
||||
|
||||
let unbalanced_parent =
|
||||
TimelineQuery::parse(r#"followed and (has_cover or local"#).unwrap_err();
|
||||
assert_eq!(unbalanced_parent, QueryError::UnexpectedEndOfQuery);
|
||||
|
||||
let missing_and_or_in_par =
|
||||
TimelineQuery::parse(r#"(title contains "abc def" followed)"#).unwrap_err();
|
||||
assert_eq!(
|
||||
missing_and_or_in_par,
|
||||
QueryError::SyntaxError(
|
||||
26,
|
||||
8,
|
||||
"Syntax Error: Expected ')', got 'followed'".to_owned()
|
||||
)
|
||||
);
|
||||
|
||||
let expect_in = TimelineQuery::parse(r#"lang contains abc"#).unwrap_err();
|
||||
assert_eq!(
|
||||
expect_in,
|
||||
QueryError::SyntaxError(
|
||||
5,
|
||||
8,
|
||||
"Syntax Error: Expected 'in', got 'contains'".to_owned()
|
||||
)
|
||||
);
|
||||
|
||||
let expect_contains = TimelineQuery::parse(r#"title in abc"#).unwrap_err();
|
||||
assert_eq!(
|
||||
expect_contains,
|
||||
QueryError::SyntaxError(
|
||||
6,
|
||||
2,
|
||||
"Syntax Error: Expected 'contains', got 'in'".to_owned()
|
||||
)
|
||||
);
|
||||
|
||||
let expect_keyword = TimelineQuery::parse(r#"not_a_field contains something"#).unwrap_err();
|
||||
assert_eq!(expect_keyword, QueryError::SyntaxError(0, 11, "Syntax Error: Expected one of 'blog', \
|
||||
'author', 'license', 'tags', 'lang', 'title', 'subtitle', 'content', 'followed', 'has_cover', \
|
||||
'local' or 'all', got 'not_a_field'".to_owned()));
|
||||
|
||||
let expect_bracket_or_comma = TimelineQuery::parse(r#"lang in [en ["#).unwrap_err();
|
||||
assert_eq!(
|
||||
expect_bracket_or_comma,
|
||||
QueryError::SyntaxError(
|
||||
12,
|
||||
1,
|
||||
"Syntax Error: Expected one of ']' or ',', \
|
||||
got '['"
|
||||
.to_owned()
|
||||
)
|
||||
);
|
||||
|
||||
let expect_bracket = TimelineQuery::parse(r#"lang in )abc"#).unwrap_err();
|
||||
assert_eq!(
|
||||
expect_bracket,
|
||||
QueryError::SyntaxError(
|
||||
8,
|
||||
1,
|
||||
"Syntax Error: Expected one of [list, of, words] or list_name, \
|
||||
got ')'"
|
||||
.to_owned()
|
||||
)
|
||||
);
|
||||
|
||||
let expect_word = TimelineQuery::parse(r#"title contains ,"#).unwrap_err();
|
||||
assert_eq!(
|
||||
expect_word,
|
||||
QueryError::SyntaxError(15, 1, "Syntax Error: Expected any word, got ','".to_owned())
|
||||
);
|
||||
|
||||
let got_bracket = TimelineQuery::parse(r#"lang in []"#).unwrap_err();
|
||||
assert_eq!(
|
||||
got_bracket,
|
||||
QueryError::SyntaxError(9, 1, "Syntax Error: Expected any word, got ']'".to_owned())
|
||||
);
|
||||
|
||||
let got_par = TimelineQuery::parse(r#"lang in [a, ("#).unwrap_err();
|
||||
assert_eq!(
|
||||
got_par,
|
||||
QueryError::SyntaxError(12, 1, "Syntax Error: Expected any word, got '('".to_owned())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_list_used_lists() {
|
||||
let q = TimelineQuery::parse(r#"lang in [fr, en] and blog in blogs or author in my_fav_authors or tags in hashtag and lang in spoken or license in copyleft"#)
|
||||
.unwrap();
|
||||
let used_lists = q.list_used_lists();
|
||||
assert_eq!(
|
||||
used_lists,
|
||||
vec![
|
||||
("blogs".to_owned(), ListType::Blog),
|
||||
("my_fav_authors".to_owned(), ListType::User),
|
||||
("hashtag".to_owned(), ListType::Word),
|
||||
("spoken".to_owned(), ListType::Prefix),
|
||||
("copyleft".to_owned(), ListType::Word),
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use crate::{routes::errors::ErrorPage, template_utils::Ructe};
|
||||
use plume_models::{timeline::*, PlumeRocket};
|
||||
use rocket::response::Redirect;
|
||||
use routes::Page;
|
||||
use template_utils::IntoContext;
|
||||
|
||||
#[get("/timeline/<id>?<page>")]
|
||||
pub fn details(id: i32, rockets: PlumeRocket, page: Option<Page>) -> Result<Ructe, ErrorPage> {
|
||||
let page = page.unwrap_or_default();
|
||||
let all_tl = Timeline::list_all_for_user(&rockets.conn, rockets.user.clone().map(|u| u.id))?;
|
||||
let tl = Timeline::get(&rockets.conn, id)?;
|
||||
let posts = tl.get_page(&rockets.conn, page.limits())?;
|
||||
let total_posts = tl.count_posts(&rockets.conn)?;
|
||||
Ok(render!(timelines::details(
|
||||
&rockets.to_context(),
|
||||
tl,
|
||||
posts,
|
||||
all_tl,
|
||||
page.0,
|
||||
Page::total(total_posts as i32)
|
||||
)))
|
||||
}
|
||||
|
||||
// TODO
|
||||
|
||||
#[get("/timeline/new")]
|
||||
pub fn new() -> Result<Ructe, ErrorPage> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
#[post("/timeline/new")]
|
||||
pub fn create() -> Result<Redirect, Ructe> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
#[get("/timeline/<_id>/edit")]
|
||||
pub fn edit(_id: i32) -> Result<Ructe, ErrorPage> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
#[post("/timeline/<_id>/edit")]
|
||||
pub fn update(_id: i32) -> Result<Redirect, Ructe> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
#[post("/timeline/<_id>/delete")]
|
||||
pub fn delete(_id: i32) -> Result<Redirect, ErrorPage> {
|
||||
unimplemented!()
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
@use plume_models::posts::Post;
|
||||
@use templates::{base, partials::post_card};
|
||||
@use template_utils::*;
|
||||
@use routes::*;
|
||||
|
||||
@(ctx: BaseContext, articles: Vec<Post>, page: i32, n_pages: i32)
|
||||
|
||||
@:base(ctx, i18n!(ctx.1, "All the articles of the Fediverse"), {}, {}, {
|
||||
<div class="h-feed">
|
||||
<h1 "p-name">@i18n!(ctx.1, "All the articles of the Fediverse")</h1>
|
||||
|
||||
@if ctx.2.is_some() {
|
||||
@tabs(&[
|
||||
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
|
||||
(&uri!(instance::feed: _).to_string(), i18n!(ctx.1, "Your feed"), false),
|
||||
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), true),
|
||||
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), false),
|
||||
])
|
||||
} else {
|
||||
@tabs(&[
|
||||
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
|
||||
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), true),
|
||||
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), false),
|
||||
])
|
||||
}
|
||||
|
||||
<div class="cards">
|
||||
@for article in articles {
|
||||
@:post_card(ctx, article)
|
||||
}
|
||||
</div>
|
||||
@paginate(ctx.1, page, n_pages)
|
||||
</div>
|
||||
})
|
@ -1,28 +0,0 @@
|
||||
@use plume_models::posts::Post;
|
||||
@use templates::{base, partials::post_card};
|
||||
@use template_utils::*;
|
||||
@use routes::*;
|
||||
|
||||
@(ctx: BaseContext, articles: Vec<Post>, page: i32, n_pages: i32)
|
||||
|
||||
@:base(ctx, i18n!(ctx.1, "Your feed"), {}, {}, {
|
||||
<h1>@i18n!(ctx.1, "Your feed")</h1>
|
||||
|
||||
@tabs(&[
|
||||
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
|
||||
(&uri!(instance::feed: _).to_string(), i18n!(ctx.1, "Your feed"), true),
|
||||
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), false),
|
||||
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), false),
|
||||
])
|
||||
|
||||
@if !articles.is_empty() {
|
||||
<div class="cards">
|
||||
@for article in articles {
|
||||
@:post_card(ctx, article)
|
||||
}
|
||||
</div>
|
||||
} else {
|
||||
<p class="center">@i18n!(ctx.1, "Nothing to see here yet. Try subscribing to more people.")</p>
|
||||
}
|
||||
@paginate(ctx.1, page, n_pages)
|
||||
})
|
@ -1,35 +0,0 @@
|
||||
@use plume_models::posts::Post;
|
||||
@use plume_models::instance::Instance;
|
||||
@use templates::{base, partials::post_card};
|
||||
@use template_utils::*;
|
||||
@use routes::*;
|
||||
|
||||
@(ctx: BaseContext, instance: Instance, articles: Vec<Post>, page: i32, n_pages: i32)
|
||||
|
||||
@:base(ctx, i18n!(ctx.1, "Articles from {}"; instance.name.clone()), {}, {}, {
|
||||
<div class="h-feed">
|
||||
<h1 class="p-name">@i18n!(ctx.1, "Articles from {}"; instance.name)</h1>
|
||||
|
||||
@if ctx.2.is_some() {
|
||||
@tabs(&[
|
||||
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
|
||||
(&uri!(instance::feed: _).to_string(), i18n!(ctx.1, "Your feed"), false),
|
||||
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), false),
|
||||
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), true),
|
||||
])
|
||||
} else {
|
||||
@tabs(&[
|
||||
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
|
||||
(&uri!(instance::federated: _).to_string(), i18n!(ctx.1, "Federated feed"), false),
|
||||
(&uri!(instance::local: _).to_string(), i18n!(ctx.1, "Local feed"), true),
|
||||
])
|
||||
}
|
||||
|
||||
<div class="cards">
|
||||
@for article in articles {
|
||||
@:post_card(ctx, article)
|
||||
}
|
||||
</div>
|
||||
@paginate(ctx.1, page, n_pages)
|
||||
</div>
|
||||
})
|
@ -1,16 +0,0 @@
|
||||
@use templates::partials::post_card;
|
||||
@use plume_models::posts::Post;
|
||||
@use template_utils::*;
|
||||
|
||||
@(ctx: BaseContext, articles: Vec<Post>, link: &str, title: String)
|
||||
|
||||
@if !articles.is_empty() {
|
||||
<div class="h-feed">
|
||||
<h2 dir="auto"><span class="p-name">@title</span> — <a href="@link">@i18n!(ctx.1, "View all")</a></h2>
|
||||
<div class="cards">
|
||||
@for article in articles {
|
||||
@:post_card(ctx, article)
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
@use plume_models::posts::Post;
|
||||
@use plume_models::timeline::Timeline;
|
||||
@use template_utils::*;
|
||||
@use templates::base;
|
||||
@use templates::partials::post_card;
|
||||
@use routes::*;
|
||||
|
||||
@(ctx: BaseContext, tl: Timeline, articles: Vec<Post>, all_tl: Vec<Timeline>, page: i32, n_pages: i32)
|
||||
|
||||
@:base(ctx, tl.name.clone(), {}, {}, {
|
||||
<section class="flex wrap" dir="auto">
|
||||
<h1 class="grow">@i18n_timeline_name(ctx.1, &tl.name)</h1>
|
||||
@if ctx.clone().2.map(|u| (u.is_admin() && tl.user_id.is_none()) || Some(u.id) == tl.user_id).unwrap_or(false) {
|
||||
<a href="@uri!(timelines::edit: _id = tl.id)" class="button inline-block">@i18n!(ctx.1, "Edit")</a>
|
||||
}
|
||||
</section>
|
||||
|
||||
@tabs(&vec![(format!("{}", uri!(instance::index)), i18n!(ctx.1, "Latest articles"), false)]
|
||||
.into_iter().chain(all_tl
|
||||
.into_iter()
|
||||
.map(|t| {
|
||||
let url = format!("{}", uri!(timelines::details: id = t.id, page = _));
|
||||
(url, i18n_timeline_name(ctx.1, &t.name), t.id == tl.id)
|
||||
})
|
||||
).collect::<Vec<_>>()
|
||||
)
|
||||
|
||||
@if !articles.is_empty() {
|
||||
<div class="cards">
|
||||
@for article in articles {
|
||||
@:post_card(ctx, article)
|
||||
}
|
||||
</div>
|
||||
} else {
|
||||
<p class="center">@i18n!(ctx.1, "Nothing to see here yet.")</p>
|
||||
}
|
||||
@paginate(ctx.1, page, n_pages)
|
||||
})
|
Loading…
Reference in New Issue