Compare commits

...

64 Commits

Author SHA1 Message Date
Kitaiti Makoto 2e9fd05d6d Update posts when requested 3 years ago
Kitaiti Makoto 74faac61a4 Make plume_models::search::searcher public 3 years ago
Kitaiti Makoto 1c464c829e Publish PostUpdate to channel instead of tell ActorSearcher directly 3 years ago
Kitaiti Makoto 27618def0c Make SearcherActor subscribe to channel 3 years ago
Kitaiti Makoto 100d73c859 Change SearcherActor's message from SearcherActorMsg to PostEvent 3 years ago
Kitaiti Makoto 290df12cb8 Define ACTORS and POST_CHAN as global 3 years ago
Kitaiti Makoto 46fd0f613a Install once_cell 3 years ago
Kitaiti Makoto 1b481e0eab Send messages to searcher actor from Post model instead of controllers 3 years ago
Kitaiti Makoto 1fea72e80f Replace PlumeRockets::actors() with ACTORS.clone() 3 years ago
Kitaiti Makoto dc7eacb916 Use global actor system at setup process 3 years ago
Kitaiti Makoto 4dfb5cc821 Define global actor system 3 years ago
Kitaiti Makoto 6de30edbc3 Remove Searcher from Post::insert() 3 years ago
Kitaiti Makoto f4eb612359 Use searcher actor for post addition 3 years ago
Kitaiti Makoto 298784520d Make member of search::AddDocument public 3 years ago
Kitaiti Makoto dc5a1b5016 Fix searcher actor path 3 years ago
Kitaiti Makoto 41de2541d9 Don't unwrap searcher 3 years ago
Ana Gelez 8aa4ae4302
Make tests compile again 4 years ago
Mina Galić 56091c0d49
add UpdateDocument to list of SearcherActor messages
how did this even work?!
I've only been using UpdateDocument so far, and it wasn't listed!
4 years ago
Mina Galić d7e380f83e
remove Searcher from Post::update()
thereby decoupling the two systems (for updates, anyway)
That means, consumers of Post need to update_document themselves!
Post (in update) now only talks to the database)

Tests for this will now be failing, as they haven't been touched yet
4 years ago
Mina Galić 06e20299e0
use searcher_actor for posts update route (instead of post.update()) 4 years ago
Mina Galić 94cf4dab98
add ActorSystem member to PlumeRocket 4 years ago
Mina Galić 1dfad6469b
initialize Actor system in main, as well as SearcherActor 4 years ago
Ana Gelez ee6a562404
Wrap Searcher in an Arc only in main 4 years ago
Mina Galić 54496c1527
Implement an actor around Searcher
this compiles, but isn't used yet.
4 years ago
Mina Galić 4b0b03b848
plume-models: fix test compilation 4 years ago
Ana Gelez f6d169567c
Introduce Searcher::new
This function does what was previously done in main:
create a search index, and try to recover for errors
if possible.

This commit also fixes plume-cli to use the new Searcher
API (that depends on a DbPool, not on a single Connection).
4 years ago
Ana Gelez eaad38ad2c
Avoid unwrapping, return an Error instead 4 years ago
Mina Galić 962fbcc503
plume-model: refactor Searcher to have its own DbPool
this way, we don't need to pass along a conn into the function.
This should make splitting PlumeRocket up into its components a little
easier. This PR is another attempt at fixing #799.
4 years ago
Mina Galić f945e18bf1
Revert "Add an SearcherActor, wrapping Searcher & DbPool"
This reverts commit 0757f84397fc4d4ecba8a904e9d25f1b243b4292.
We'll attempt to use a strategy in #807
4 years ago
Mina Galić b92a877420
finish off the strategy document 4 years ago
Mina Galić 046b5d0214
Add past attempts at solutions. 4 years ago
Mina Galić 6935565efd
add design document describing current issues 4 years ago
Mina Galić 5e17636b9d
Add an SearcherActor, wrapping Searcher & DbPool
and implement AddDocument message!
This code compiles, but hasn't been put to use yet.
4 years ago
Mina Galić d344e06efd
add riker as dependency, we will use it to replace searcher 4 years ago
trinity-1686a 9ec2d93f50 implement login via LDAP
Reviewed-on: Plume/Plume#826
Reviewed-by: Mina Galić <me+git@igalic.co>
4 years ago
Trinity Pointard 4da9b24cb1 cargo fmt 4 years ago
Trinity Pointard f40634aa97 address comments 4 years ago
Trinity Pointard ccba8163c9 fix compilation issue with cli 4 years ago
Trinity Pointard 8975b0f9e9 cargo fmt 4 years ago
Trinity Pointard d626f3366d add support for ldap 4 years ago
Trinity Pointard b24f195e10 refactor login
first step toward ldap
should have no functionnal change
4 years ago
Mina Galić 003dcf861a Merge pull request 'CSS Fix: My title is displayed at the wrong position on Webkit browsers' (#825) from quentin/Plume:pr/flex into main
Reviewed-on: Plume/Plume#825
Reviewed-by: Mina Galić <me+git@igalic.co>
4 years ago
Quentin Dufour 5d8efa77a3 Fix title 4 years ago
kiwii fb6d49cc6e Merge pull request 'GPU acceleration for the mobile menu' (#818) from gpu-acceleration into main
Reviewed-on: Plume/Plume#818
Reviewed-by: kiwii <kiwii@noreply@joinplu.me>
4 years ago
Mina Galić 48776939f4 Merge branch 'main' into gpu-acceleration 4 years ago
Marek Ľach 104b6c1d0c
GPU acceleration for the mobile menu
Reimplements #810, but for all browsers.
4 years ago
Mina Galić 3881927550 Merge pull request 'mobile-margins' (#817) from mobile-margins into main
Reviewed-on: Plume/Plume#817
Reviewed-by: Mina Galić <me+git@igalic.co>
4 years ago
Ana Gelez 0bec13edc0 Improve responsiveness 4 years ago
Ana Gelez 191d6da486 Move header-related style to _header.scss
It seems to fix the overflowing menu items on small screens too
4 years ago
kiwii 9aaff1a484 Merge pull request 'Use result of local storage insert operation to suppress Clippy' (#810) from KitaitiMakoto/Plume:suppress-clippy into main
Reviewed-on: Plume/Plume#810
Reviewed-by: kiwii <kiwii@noreply@joinplu.me>
4 years ago
Kitaiti Makoto 36fd55a7e4 Use result of local storage insert operation to suppress Clippy 4 years ago
kiwii fc474bf8d1 Merge pull request '[BUG FIX]Make it possible to switch to rich text editor' (#808) from KitaitiMakoto/Plume:richtexteditor into main
Reviewed-on: Plume/Plume#808
Reviewed-by: kiwii <kiwii@noreply@joinplu.me>
4 years ago
Kitaiti Makoto a050deb557 [BUG FIX]Make it possible to switch to rich text editor 4 years ago
kiwii 6de9a1f1c8 Merge pull request 'Recreate search index if its format is outdated' (#802) from KitaitiMakoto/Plume:invalid-index into main
Reviewed-on: Plume/Plume#802
Reviewed-by: Mina Galić <me+git@igalic.co>
4 years ago
Kitaiti Makoto 5e30bede40 Don't care about needless return value of closures 4 years ago
Kitaiti Makoto 5cca66b346 Add action user can take to error message 4 years ago
Kitaiti Makoto 484659fde2 Run cargo fmt 4 years ago
Kitaiti Makoto 5fc827c1c9 Re-initialize search index when recreating is failed 4 years ago
Kitaiti Makoto 50753b3a65 Recreate search index if its format is outdated 4 years ago
Kitaiti Makoto c5d03d300b Cause IndexInvalidDataError when search index is invalid 4 years ago
kiwii 010eac6c4a Merge pull request 'upgrade rocket* to the latest stable' (#800) from igalic/Plume:update/rocket into main
Reviewed-on: Plume/Plume#800
Reviewed-by: kiwii <kiwii@noreply@joinplu.me>
4 years ago
Mina Galić a107d35492
upgrade rocket* to the latest stable
before we embark on upgrading to async, and all the refactoring that
this will bring on us (see #797 & #799), we should keep our `main`
branch as stable and current as possible.
Let's start by upgrading rocket and its dependencies.
4 years ago
Kitaiti Makoto f9beb2383b Update lindera-tantivy to v0.1.3 4 years ago
Chosto 7e78cffcaa
Bump Docker base images to buster flavor (#797) 4 years ago

@ -45,3 +45,12 @@ ROCKET_ADDRESS=127.0.0.1
#PLUME_LOGO_192=icons/trwnh/paragraphs/plumeParagraphs192.png
#PLUME_LOGO_256=icons/trwnh/paragraphs/plumeParagraphs256.png
#PLUME_LOGO_512=icons/trwnh/paragraphs/plumeParagraphs512.png
## LDAP CONFIG ##
# the object that will be bound is "${USER_NAME_ATTR}=${username},${BASE_DN}"
#LDAP_ADDR=ldap://127.0.0.1:1389
#LDAP_BASE_DN="ou=users,dc=your-org,dc=eu"
#LDAP_USER_NAME_ATTR=cn
#LDAP_USER_MAIL_ATTR=mail
#LDAP_TLS=false

2097
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -20,8 +20,9 @@ heck = "0.3.0"
lettre = "0.9.2"
lettre_email = "0.9.2"
num_cpus = "1.10"
rocket = "0.4.2"
rocket_contrib = { version = "0.4.2", features = ["json"] }
riker = "0.4"
rocket = "0.4.5"
rocket_contrib = { version = "0.4.5", features = ["json"] }
rocket_i18n = { git = "https://github.com/Plume-org/rocket_i18n", rev = "e922afa7c366038b3433278c03b1456b346074f2" }
rpassword = "4.0"
scheduled-thread-pool = "0.2.2"

@ -0,0 +1,101 @@
# Refactoring
This is document describes the Design Goals and Refactoring Strategies of our ongoing efforts to improve the architecture, and with it the stability and performance of Plume.
## Current Issues
Let's look at the current architecture's problems, and the attempts we've made at resolving them.
### PlumeRocket
This data structure was [introduced by yours truly](https://github.com/Plume-org/Plume/pull/462) with the intent to please Clippy, reduce the number of arguments passed around (the main thing Clippy complained about).
We also tried reduce the amount of bytes being passed around, by using references.
At the time, this seemed like a good idea.
Right now, it's the main source of our problems.
This `struct` bundles `DbConn`, which makes it difficult migrate Plume to `async` Rocket:
Passing around a giant `struct` as `ref` in `async` world, means that different owners are waiting for it to be `.await`ed, so they can access them.
This makes working with such a `struct` very unwieldy, if not impossible sometimes.
### DbConn, Searcher and Post
`DbConn` and `Searcher` are deeply bundled via `Post`.
`Searcher` is called every time a `Post` is added/updated/deleted.
It needs access to `DbConn` to fill the data that `Post` does not have.
While removing one or the other from `PlumeRocket` is possible, complications still arise with `AsObject`:
Posts's `AsObject` APIs are called every time a Post is added/updated/deleted.
It builds on `PlumeRocket` as main `Context`, and so we'd have to touch every API if we split either `DbConn` or `Searcher` out of `PlumeRocket`
## Solution Attempts and their Problems
in the past couple of weeks, we've made the following attepts to at least partially dissolve `PlumeRocket`
- [plume-model: refactor Searcher to have its own DbPool](https://git.joinplu.me/Plume/Plume/pulls/809)
- [WIP: Experiment: extract Searcher into an Actor](https://git.joinplu.me/Plume/Plume/pulls/807)
- [extract DbConn from PlumeRocket](https://git.joinplu.me/Plume/Plume/pulls/805)
As soon as we attempted to delete out one of the members from `PlumeRocket`, compiles would fail all over the place, meaning we'd have to touch almost every single function's *signature* that uses `PlumeRocket`.
This then means we'd have to touch every single function that in turn use those functions!
That is a lot of broken code, before we've even started refactoring.
## Strategy
Despite ambitions to use an [Actor System (Riker)](https://riker.rs/), it is not magnitude of the ambitions, but the size of the steps we've taken.
So, given past failures we devise a strategy.
We want to replace each of the systems in `PlumeRocket` with an `Actor`, accessed by a single reference to the `ActorSystem`.
This way we don't have to touch every single function's parameters that `PlumeRocket` flows thru, while the code is still in motion.
### Actors
in [#807](https://git.joinplu.me/Plume/Plume/pulls/807), we've already made our first attempt at extracting `Searcher` into a Riker `Actor`.
Here, just like in [#809](https://git.joinplu.me/Plume/Plume/pulls/809), we've already given `Searcher` its own `DbPool`.
Why?
### DbPool instead of DbConn
In our previous attempts at this code, we've realized that `DbPool`, being wrapped in an `Arc` is very cheap to `.clone()`.
This means that every `Actor` that needs a `DbConn`, could get its own `DbPool`.
We also realized that `DbPool` acts like an `Actor` in its own right:
- `DbPool` has a `.get()` message
- when that message is sent, it responds with a `DbConn`
- if the pool is exhausted, it does not!
Thus, we conclude:
We want to `.clone()` a `DbPool` for every Actor that we extract from `PlumeRocket` that needs it.
### Workers
In [#799](https://git.joinplu.me/Plume/Plume/issues/799#issuecomment-4402), we've identified the following `workers`:
> Here is the list of things the worker is used for:
>
> - sending activities to other instances (just needs the activity and a list of recipients)
> - rotating user keypair when they delete a post (after 10 minutes), which requires the DB
> - fetching posts for a blog/users, either because it is new or because it has not been updated in 24 hours (depends on the DB too, and on the searcher)
For the first type of worker, we'll have to make *repeated* network requests.
There's a [Riker issue](https://github.com/riker-rs/riker/issues/130) asking how to best do that (with an answer.)
The two workers that need access to the database, should get their own `DbPool`.
Being part of the same `ActorSystem`, the last type of worker will be able to access `Searcher` thru messages.
### Request Path vs DbConn vs async
For the Rocket Request path, we want to wrap `DbPool` in an `Actor` of its own:
Then, in the `RequestGuard` we'd [ask](https://riker.rs/patterns/#ask) for a `DbConn`.
This brings us on-par with Rocket's current [`#[database]`](https://github.com/SergioBenitez/Rocket/pull/1375) approach, that puts database connection access into `spawn_blocking()`.
However, unlike `#[database]`, Riker's `ask()` responds with a Future, which would mean that in `async` functions we could simply `.await` it!
## The long road
Once we've refactored the main systems in `PlumeRocket` into their own Actors, and once we're down to only the `ActorSystem` being the main component of `PlumeRocket`, we can finally shed the husk.
That means that we *do* go and touch all the functions that take `PlumeRocket` and only give them access to the things they need.
This is also a good chance to look at functions that are too long, or are doing to much, and mark them for refactoring.

@ -1,4 +1,4 @@
FROM rust:1-stretch as builder
FROM rust:1-buster as builder
RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \
@ -28,7 +28,7 @@ RUN cargo install --path ./ --force --no-default-features --features postgres
RUN cargo install --path plume-cli --force --no-default-features --features postgres
RUN cargo clean
FROM debian:stretch-slim
FROM debian:buster-slim
RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \

@ -1,4 +1,4 @@
FROM rust:1-stretch
FROM rust:1-buster
RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \

@ -40,7 +40,7 @@ main header.article {
display: flex;
flex-direction: column;
justify-content: end;
justify-content: flex-end;
h1, .article-info {
text-align: center;
@ -490,3 +490,30 @@ input:checked ~ .cw-container > .cw-text {
display: inline;
}
}
// Small screens
@media screen and (max-width: 600px) {
#plume-editor header {
flex-direction: column-reverse;
button {
flex: 0 0 0;
}
}
.popup {
top: 10vh;
bottom: 10vh;
left: 1vw;
right: 1vw;
}
main article {
margin: 2.5em .5em;
max-width: none;
}
main .article-meta > *, main .article-meta .comments, main .article-meta > .banner > * {
margin: 0 5%;
}
}

@ -490,6 +490,10 @@ figure {
/// Small screens
@media screen and (max-width: 600px) {
body > main > *, .h-feed > * {
margin: 1em;
}
main .article-meta {
> *, .comments {
margin: 0 5%;
@ -535,7 +539,7 @@ figure {
margin: 0;
& > * {
max-width: 100%;
max-width: 100% !important;
}
}

@ -205,6 +205,7 @@ body > header {
position: absolute;
left: 50%;
transform: translate(-50%, 0);
transform: translateZ(0);
opacity: 0;
font-size: 0.9em;
white-space: nowrap;
@ -221,3 +222,93 @@ body > header {
}
}
}
// Small screens
@media screen and (max-width: 600px) {
@keyframes menuOpening {
from {
transform: scaleX(0);
transform-origin: left;
opacity: 0;
}
to {
transform: scaleX(1);
transform-origin: left;
opacity: 1;
}
}
body > header {
flex-direction: column;
nav#menu {
display: inline-flex;
z-index: 21;
}
#content {
display: none;
appearance: none;
text-align: center;
z-index: 20;
}
}
body > header:focus-within #content, #content.show {
position: fixed;
display: flex;
flex-direction: column;
justify-content: flex-start;
top: 0;
left: 0;
width: 100%;
height: 100%;
box-sizing: border-box;
animation: 0.2s menuOpening;
&::before {
content: "";
position: absolute;
transform: skewX(-10deg);
top: 0;
left: -20%;
width: 100%;
height: 100%;
z-index: -10;
background: $primary;
}
> nav {
flex-direction: column;
align-items: flex-start;
a {
display: flex;
flex-direction: row;
align-items: center;
margin: 0;
padding: 1rem 1.5rem;
color: $background;
font-size: 1.4em;
font-weight: 300;
&.title { font-size: 1.8em; }
> *:first-child { width: 3rem; }
> img:first-child { height: 3rem; }
> *:last-child { margin-left: 1rem; }
> nav hr {
display: block;
margin: 0;
width: 100%;
border: solid $background 0.1rem;
}
.mobile-label { display: initial; }
}
}
}
}

@ -1,8 +1,7 @@
use dotenv;
use clap::App;
use diesel::Connection;
use plume_models::{instance::Instance, Connection as Conn, CONFIG};
use plume_models::{db_conn::init_pool, instance::Instance};
use std::io::{self, prelude::*};
mod instance;
@ -26,22 +25,27 @@ fn main() {
Err(ref e) if e.not_found() => eprintln!("no .env was found"),
e => e.map(|_| ()).unwrap(),
}
let conn = Conn::establish(CONFIG.database_url.as_str());
let _ = conn.as_ref().map(|conn| Instance::cache_local(conn));
let db_pool = init_pool()
.expect("Couldn't create a database pool, please check DATABASE_URL in your .env");
let _ = db_pool
.get()
.as_ref()
.map(|conn| Instance::cache_local(conn));
match matches.subcommand() {
("instance", Some(args)) => {
instance::run(args, &conn.expect("Couldn't connect to the database."))
}
("migration", Some(args)) => {
migration::run(args, &conn.expect("Couldn't connect to the database."))
}
("search", Some(args)) => {
search::run(args, &conn.expect("Couldn't connect to the database."))
}
("users", Some(args)) => {
users::run(args, &conn.expect("Couldn't connect to the database."))
}
("instance", Some(args)) => instance::run(
args,
&db_pool.get().expect("Couldn't connect to the database."),
),
("migration", Some(args)) => migration::run(
args,
&db_pool.get().expect("Couldn't connect to the database."),
),
("search", Some(args)) => search::run(args, db_pool),
("users", Some(args)) => users::run(
args,
&db_pool.get().expect("Couldn't connect to the database."),
),
_ => app.print_help().expect("Couldn't print help"),
};
}

@ -1,6 +1,6 @@
use clap::{App, Arg, ArgMatches, SubCommand};
use plume_models::{search::Searcher, Connection, CONFIG};
use plume_models::{db_conn::DbPool, search::Searcher, CONFIG};
use std::fs::{read_dir, remove_file};
use std::io::ErrorKind;
use std::path::Path;
@ -52,7 +52,7 @@ pub fn command<'a, 'b>() -> App<'a, 'b> {
)
}
pub fn run<'a>(args: &ArgMatches<'a>, conn: &Connection) {
pub fn run<'a>(args: &ArgMatches<'a>, conn: DbPool) {
let conn = conn;
match args.subcommand() {
("init", Some(x)) => init(x, conn),
@ -63,7 +63,7 @@ pub fn run<'a>(args: &ArgMatches<'a>, conn: &Connection) {
}
}
fn init<'a>(args: &ArgMatches<'a>, conn: &Connection) {
fn init<'a>(args: &ArgMatches<'a>, db_pool: DbPool) {
let path = args
.value_of("path")
.map(|p| Path::new(p).join("search_index"))
@ -82,8 +82,8 @@ fn init<'a>(args: &ArgMatches<'a>, conn: &Connection) {
}
};
if can_do || force {
let searcher = Searcher::create(&path, &CONFIG.search_tokenizers).unwrap();
refill(args, conn, Some(searcher));
let searcher = Searcher::create(&path, db_pool.clone(), &CONFIG.search_tokenizers).unwrap();
refill(args, db_pool, Some(searcher));
} else {
eprintln!(
"Can't create new index, {} exist and is not empty",
@ -92,16 +92,16 @@ fn init<'a>(args: &ArgMatches<'a>, conn: &Connection) {
}
}
fn refill<'a>(args: &ArgMatches<'a>, conn: &Connection, searcher: Option<Searcher>) {
fn refill<'a>(args: &ArgMatches<'a>, db_pool: DbPool, searcher: Option<Searcher>) {
let path = args.value_of("path");
let path = match path {
Some(path) => Path::new(path).join("search_index"),
None => Path::new(&CONFIG.search_index).to_path_buf(),
};
let searcher =
searcher.unwrap_or_else(|| Searcher::open(&path, &CONFIG.search_tokenizers).unwrap());
let searcher = searcher
.unwrap_or_else(|| Searcher::open(&path, db_pool, &CONFIG.search_tokenizers).unwrap());
searcher.fill(conn).expect("Couldn't import post");
searcher.fill().expect("Couldn't import post");
println!("Commiting result");
searcher.commit();
}

@ -132,7 +132,7 @@ fn new<'a>(args: &ArgMatches<'a>, conn: &Connection) {
role,
&bio,
email,
User::hash_pass(&password).expect("Couldn't hash password"),
Some(User::hash_pass(&password).expect("Couldn't hash password")),
)
.expect("Couldn't save new user");
}

@ -14,7 +14,7 @@ heck = "0.3.0"
hex = "0.3"
hyper = "0.12.33"
openssl = "0.10.22"
rocket = "0.4.0"
rocket = "0.4.5"
reqwest = "0.9"
serde = "1.0"
serde_derive = "1.0"

@ -269,7 +269,13 @@ pub fn init() -> Result<(), EditorError> {
let editor_button = document().create_element("a")?;
js! { @{&editor_button}.href = "#"; }
editor_button.add_event_listener(|_: ClickEvent| {
window().local_storage().remove("basic-editor");
if window()
.local_storage()
.insert("basic-editor", "false")
.is_err()
{
console!(log, "Failed to write into local storage");
}
window().history().go(0).ok(); // refresh
});
editor_button.append_child(

@ -13,9 +13,10 @@ guid-create = "0.1"
heck = "0.3.0"
itertools = "0.8.0"
lazy_static = "1.0"
ldap3 = "0.7.1"
migrations_internals= "1.4.0"
openssl = "0.10.22"
rocket = "0.4.0"
rocket = "0.4.5"
rocket_i18n = { git = "https://github.com/Plume-org/rocket_i18n", rev = "e922afa7c366038b3433278c03b1456b346074f2" }
reqwest = "0.9"
scheduled-thread-pool = "0.2.2"
@ -30,7 +31,9 @@ whatlang = "0.7.1"
shrinkwraprs = "0.2.1"
diesel-derive-newtype = "0.1.2"
glob = "0.3.0"
lindera-tantivy = { version = "0.1.2", optional = true }
lindera-tantivy = { version = "0.1.3", optional = true }
riker = "0.4"
once_cell = "1.5.2"
[dependencies.chrono]
features = ["serde"]

@ -20,6 +20,7 @@ pub struct Config {
pub logo: LogoConfig,
pub default_theme: String,
pub media_directory: String,
pub ldap: Option<LdapConfig>,
}
#[derive(Debug, Clone)]
@ -240,6 +241,40 @@ impl SearchTokenizerConfig {
}
}
pub struct LdapConfig {
pub addr: String,
pub base_dn: String,
pub tls: bool,
pub user_name_attr: String,
pub mail_attr: String,
}
fn get_ldap_config() -> Option<LdapConfig> {
let addr = var("LDAP_ADDR").ok();
let base_dn = var("LDAP_BASE_DN").ok();
if addr.is_some() && base_dn.is_some() {
let tls = var("LDAP_TLS").unwrap_or_else(|_| "false".to_owned());
let tls = match tls.as_ref() {
"1" | "true" | "TRUE" => true,
"0" | "false" | "FALSE" => false,
_ => panic!("Invalid LDAP configuration : tls"),
};
let user_name_attr = var("LDAP_USER_NAME_ATTR").unwrap_or_else(|_| "cn".to_owned());
let mail_attr = var("LDAP_USER_MAIL_ATTR").unwrap_or_else(|_| "mail".to_owned());
Some(LdapConfig {
addr: addr.unwrap(),
base_dn: base_dn.unwrap(),
tls,
user_name_attr,
mail_attr,
})
} else if addr.is_some() || base_dn.is_some() {
panic!("Invalid LDAP configuration : both LDAP_ADDR and LDAP_BASE_DN must be set")
} else {
None
}
}
lazy_static! {
pub static ref CONFIG: Config = Config {
base_url: var("BASE_URL").unwrap_or_else(|_| format!(
@ -267,5 +302,6 @@ lazy_static! {
default_theme: var("DEFAULT_THEME").unwrap_or_else(|_| "default-light".to_owned()),
media_directory: var("MEDIA_UPLOAD_DIRECTORY")
.unwrap_or_else(|_| "static/media".to_owned()),
ldap: get_ldap_config(),
};
}

@ -1,4 +1,4 @@
use crate::Connection;
use crate::{instance::Instance, Connection, CONFIG};
use diesel::r2d2::{
ConnectionManager, CustomizeConnection, Error as ConnError, Pool, PooledConnection,
};
@ -13,6 +13,20 @@ use std::ops::Deref;
pub type DbPool = Pool<ConnectionManager<Connection>>;
/// Initializes a database pool.
pub fn init_pool() -> Option<DbPool> {
let manager = ConnectionManager::<Connection>::new(CONFIG.database_url.as_str());
let mut builder = DbPool::builder()
.connection_customizer(Box::new(PragmaForeignKey))
.min_idle(CONFIG.db_min_idle);
if let Some(max_size) = CONFIG.db_max_size {
builder = builder.max_size(max_size);
};
let pool = builder.build(manager).ok()?;
Instance::cache_local(&pool.get().unwrap());
Some(pool)
}
// From rocket documentation
// Connection request guard type: a wrapper around an r2d2 pooled connection.

@ -98,7 +98,6 @@ pub(crate) mod tests {
source: String::new(),
cover_id: None,
},
&rockets.searcher,
)
.unwrap();

@ -17,7 +17,11 @@ extern crate serde_json;
#[macro_use]
extern crate tantivy;
extern crate riker;
use plume_common::activity_pub::inbox::InboxError;
use riker::actors::{ActorSystem, ChannelRef};
use std::sync::Arc;
#[cfg(not(any(feature = "sqlite", feature = "postgres")))]
compile_error!("Either feature \"sqlite\" or \"postgres\" must be enabled for this crate.");
@ -30,6 +34,10 @@ pub type Connection = diesel::SqliteConnection;
#[cfg(all(not(feature = "sqlite"), feature = "postgres"))]
pub type Connection = diesel::PgConnection;
use once_cell::sync::OnceCell;
pub static ACTORS: OnceCell<Arc<ActorSystem>> = OnceCell::new();
pub static POST_CHAN: OnceCell<ChannelRef<crate::search::searcher::PostEvent>> = OnceCell::new();// TODO: define wrapper enum of Post
/// All the possible errors that can be encoutered in this crate
#[derive(Debug)]
pub enum Error {
@ -40,6 +48,7 @@ pub enum Error {
Io(std::io::Error),
MissingApProperty,
NotFound,
DbPool,
Request,
SerDe,
Search(search::SearcherError),
@ -303,6 +312,10 @@ mod tests {
db_conn::DbConn((*DB_POOL).get().unwrap())
}
pub fn pool<'a>() -> db_conn::DbPool {
(*DB_POOL).clone()
}
lazy_static! {
static ref DB_POOL: db_conn::DbPool = {
let pool = db_conn::DbPool::builder()
@ -323,6 +336,9 @@ mod tests {
searcher: Arc::new(search::tests::get_searcher(&CONFIG.search_tokenizers)),
worker: Arc::new(ScheduledThreadPool::new(2)),
user: None,
actors: Arc::new(
riker::actors::ActorSystem::new().expect("Couldn't create test actor system"),
),
}
}
}

@ -3,6 +3,7 @@ pub use self::module::PlumeRocket;
#[cfg(not(test))]
mod module {
use crate::{db_conn::DbConn, search, users};
use riker::actors::ActorSystem;
use rocket::{
request::{self, FlashMessage, FromRequest, Request},
Outcome, State,
@ -18,6 +19,7 @@ mod module {
pub searcher: Arc<search::Searcher>,
pub worker: Arc<ScheduledThreadPool>,
pub flash_msg: Option<(String, String)>,
pub actors: Arc<ActorSystem>,
}
impl<'a, 'r> FromRequest<'a, 'r> for PlumeRocket {
@ -30,6 +32,7 @@ mod module {
let worker = request.guard::<'_, State<'_, Arc<ScheduledThreadPool>>>()?;
let searcher = request.guard::<'_, State<'_, Arc<search::Searcher>>>()?;
let flash_msg = request.guard::<FlashMessage<'_, '_>>().succeeded();
let actors = request.guard::<'_, State<'_, Arc<ActorSystem>>>()?;
Outcome::Success(PlumeRocket {
conn,
intl,
@ -37,6 +40,7 @@ mod module {
flash_msg: flash_msg.map(|f| (f.name().into(), f.msg().into())),
worker: worker.clone(),
searcher: searcher.clone(),
actors: actors.clone(),
})
}
}
@ -45,6 +49,7 @@ mod module {
#[cfg(test)]
mod module {
use crate::{db_conn::DbConn, search, users};
use riker::actors::ActorSystem;
use rocket::{
request::{self, FromRequest, Request},
Outcome, State,
@ -58,6 +63,7 @@ mod module {
pub user: Option<users::User>,
pub searcher: Arc<search::Searcher>,
pub worker: Arc<ScheduledThreadPool>,
pub actors: Arc<ActorSystem>,
}
impl<'a, 'r> FromRequest<'a, 'r> for PlumeRocket {
@ -68,11 +74,13 @@ mod module {
let user = request.guard::<users::User>().succeeded();
let worker = request.guard::<'_, State<'_, Arc<ScheduledThreadPool>>>()?;
let searcher = request.guard::<'_, State<'_, Arc<search::Searcher>>>()?;
let actors = request.guard::<'_, State<'_, Arc<ActorSystem>>>()?;
Outcome::Success(PlumeRocket {
conn,
user,
worker: worker.clone(),
searcher: searcher.clone(),
actors: actors.clone(),
})
}
}

@ -1,7 +1,17 @@
use crate::{
ap_url, blogs::Blog, instance::Instance, medias::Media, mentions::Mention, post_authors::*,
safe_string::SafeString, schema::posts, search::Searcher, tags::*, timeline::*, users::User,
Connection, Error, PlumeRocket, Result, CONFIG,
ap_url,
blogs::Blog,
instance::Instance,
medias::Media,
mentions::Mention,
post_authors::*,
safe_string::SafeString,
schema::posts,
search::{AddDocument, Searcher, UpdateDocument, PostEvent::*},
tags::*,
timeline::*,
users::User,
Connection, Error, PlumeRocket, Result, ACTORS, CONFIG,
};
use activitypub::{
activity::{Create, Delete, Update},
@ -19,12 +29,14 @@ use plume_common::{
},
utils::md_to_html,
};
use riker::actors::*;
use serde_json;
use std::collections::HashSet;
pub type LicensedArticle = CustomObject<Licensed, Article>;
#[derive(Queryable, Identifiable, Clone, AsChangeset)]
#[derive(Debug, Queryable, Identifiable, Clone, AsChangeset)]
#[changeset_options(treat_none_as_null = "true")]
pub struct Post {
pub id: i32,
@ -63,7 +75,7 @@ impl Post {
find_by!(posts, find_by_ap_url, ap_url as &str);
last!(posts);
pub fn insert(conn: &Connection, new: NewPost, searcher: &Searcher) -> Result<Self> {
pub fn insert(conn: &Connection, new: NewPost) -> Result<Self> {
diesel::insert_into(posts::table)
.values(new)
.execute(conn)?;
@ -78,14 +90,22 @@ impl Post {
let _: Post = post.save_changes(conn)?;
}
searcher.add_document(conn, &post)?;
if post.published {
let searcher_actor = ACTORS.get().unwrap().clone().select("/plume/user/searcher-actor").unwrap();
searcher_actor.try_tell(AddDocument(post.clone()), None);
}
Ok(post)
}
pub fn update(&self, conn: &Connection, searcher: &Searcher) -> Result<Self> {
pub fn update(&self, conn: &Connection) -> Result<Self> {
diesel::update(self).set(self).execute(conn)?;
let post = Self::get(conn, self.id)?;
searcher.update_document(conn, &post)?;
if post.published {
crate::POST_CHAN.get().unwrap().tell(Publish { msg: PostUpdated(post.clone()), topic: "post.updated".into() }, None);
}
Ok(post)
}
@ -560,7 +580,6 @@ impl FromId<PlumeRocket> for Post {
fn from_activity(c: &PlumeRocket, article: LicensedArticle) -> Result<Self> {
let conn = &*c.conn;
let searcher = &c.searcher;
let license = article.custom_props.license_string().unwrap_or_default();
let article = article.object;
@ -605,7 +624,6 @@ impl FromId<PlumeRocket> for Post {
source: article.ap_object_props.source_object::<Source>()?.content,
cover_id: cover,
},
searcher,
)?;
for author in authors {
@ -647,6 +665,9 @@ impl FromId<PlumeRocket> for Post {
Timeline::add_to_all_timelines(c, &post, Kind::Original)?;
let searcher_actor = ACTORS.get().unwrap().clone().select("/plume/user/searcher-actor").unwrap();
searcher_actor.try_tell(AddDocument(post.clone()), None);
Ok(post)
}
}
@ -728,7 +749,7 @@ impl AsObject<User, Update, &PlumeRocket> for PostUpdate {
fn activity(self, c: &PlumeRocket, actor: User, _id: &str) -> Result<()> {
let conn = &*c.conn;
let searcher = &c.searcher;
let searcher_actor = ACTORS.get().unwrap().clone().select("/plume/user/searcher-actor").unwrap();
let mut post = Post::from_id(c, &self.ap_url, None).map_err(|(_, e)| e)?;
if !post.is_author(conn, actor.id)? {
@ -791,7 +812,8 @@ impl AsObject<User, Update, &PlumeRocket> for PostUpdate {
post.update_hashtags(conn, hashtags)?;
}
post.update(conn, searcher)?;
post.update(conn)?;
searcher_actor.try_tell(UpdateDocument(post.clone()), None);
Ok(())
}
}
@ -833,7 +855,6 @@ mod tests {
source: "Hello".into(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
PostAuthor::insert(

@ -1,5 +1,5 @@
mod query;
mod searcher;
pub mod searcher;
mod tokenizer;
pub use self::query::PlumeQuery as Query;
pub use self::searcher::*;
@ -7,7 +7,7 @@ pub use self::tokenizer::TokenizerKind;
#[cfg(test)]
pub(crate) mod tests {
use super::{Query, Searcher, TokenizerKind};
use super::{Query, Searcher};
use diesel::Connection;
use plume_common::utils::random_hex;
use std::env::temp_dir;
@ -20,15 +20,16 @@ pub(crate) mod tests {
posts::{NewPost, Post},
safe_string::SafeString,
tests::db,
tests::pool,
CONFIG,
};
pub(crate) fn get_searcher(tokenizers: &SearchTokenizerConfig) -> Searcher {
let dir = temp_dir().join(&format!("plume-test-{}", random_hex()));
if dir.exists() {
Searcher::open(&dir, tokenizers)
Searcher::open(&dir, pool(), tokenizers)
} else {
Searcher::create(&dir, tokenizers)
Searcher::create(&dir, pool(), tokenizers)
}
.unwrap()
}
@ -103,20 +104,20 @@ pub(crate) mod tests {
fn open() {
let dir = temp_dir().join(format!("plume-test-{}", random_hex()));
{
Searcher::create(&dir, &CONFIG.search_tokenizers).unwrap();
Searcher::create(&dir, pool(), &CONFIG.search_tokenizers).unwrap();
}
Searcher::open(&dir, &CONFIG.search_tokenizers).unwrap();
Searcher::open(&dir, pool(), &CONFIG.search_tokenizers).unwrap();
}
#[test]
fn create() {
let dir = temp_dir().join(format!("plume-test-{}", random_hex()));
assert!(Searcher::open(&dir, &CONFIG.search_tokenizers).is_err());
assert!(Searcher::open(&dir, pool(), &CONFIG.search_tokenizers).is_err());
{
Searcher::create(&dir, &CONFIG.search_tokenizers).unwrap();
Searcher::create(&dir, pool(), &CONFIG.search_tokenizers).unwrap();
}
Searcher::open(&dir, &CONFIG.search_tokenizers).unwrap(); //verify it's well created
Searcher::open(&dir, pool(), &CONFIG.search_tokenizers).unwrap(); //verify it's well created
}
#[test]
@ -144,9 +145,9 @@ pub(crate) mod tests {
source: "".to_owned(),
cover_id: None,
},
&searcher,
)
.unwrap();
searcher.add_document(&post).unwrap();
PostAuthor::insert(
conn,
NewPostAuthor {
@ -158,26 +159,26 @@ pub(crate) mod tests {
searcher.commit();
assert_eq!(
searcher.search_document(conn, Query::from_str(&title).unwrap(), (0, 1))[0].id,
searcher.search_document(Query::from_str(&title).unwrap(), (0, 1))[0].id,
post.id
);
let newtitle = random_hex()[..8].to_owned();
post.title = newtitle.clone();
post.update(conn, &searcher).unwrap();
searcher.update_document(&post).unwrap();
searcher.commit();
assert_eq!(
searcher.search_document(conn, Query::from_str(&newtitle).unwrap(), (0, 1))[0].id,
searcher.search_document(Query::from_str(&newtitle).unwrap(), (0, 1))[0].id,
post.id
);
assert!(searcher
.search_document(conn, Query::from_str(&title).unwrap(), (0, 1))
.search_document(Query::from_str(&title).unwrap(), (0, 1))
.is_empty());
post.delete(conn, &searcher).unwrap();
searcher.commit();
assert!(searcher
.search_document(conn, Query::from_str(&newtitle).unwrap(), (0, 1))
.search_document(Query::from_str(&newtitle).unwrap(), (0, 1))
.is_empty());
Ok(())
});
@ -213,9 +214,9 @@ pub(crate) mod tests {
source: "".to_owned(),
cover_id: None,
},
&searcher,
)
.unwrap();
searcher.add_document(&post).unwrap();
searcher.commit();

@ -1,14 +1,15 @@
use crate::{
config::SearchTokenizerConfig, instance::Instance, posts::Post, schema::posts,
search::query::PlumeQuery, tags::Tag, Connection, Result,
config::SearchTokenizerConfig, db_conn::DbPool, instance::Instance, posts::Post, schema::posts,
search::query::PlumeQuery, tags::Tag, Error, Result, CONFIG,
};
use chrono::Datelike;
use chrono::{Datelike, Utc};
use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl};
use itertools::Itertools;
use std::{cmp, fs::create_dir_all, path::Path, sync::Mutex};
use riker::actors::*;
use std::{cmp, fs::create_dir_all, io, path::Path, sync::Arc, sync::Mutex};
use tantivy::{
collector::TopDocs, directory::MmapDirectory, schema::*, Index, IndexReader, IndexWriter,
ReloadPolicy, Term,
ReloadPolicy, TantivyError, Term,
};
use whatlang::{detect as detect_lang, Lang};
@ -18,15 +19,174 @@ pub enum SearcherError {
WriteLockAcquisitionError,
IndexOpeningError,
IndexEditionError,
InvalidIndexDataError,
}
pub struct Searcher {
index: Index,
reader: IndexReader,
writer: Mutex<Option<IndexWriter>>,
dbpool: DbPool,
}
#[derive(Clone, Debug)]
pub struct AddDocument(pub Post);
#[derive(Clone, Debug)]
pub struct UpdateDocument(pub Post);
#[derive(Clone, Debug)]
pub struct DeleteDocument(Post);
#[derive(Clone, Debug)]
pub enum PostEvent {
PostPublished(Post),
PostUpdated(Post),
PostDeleted(Post),
}
impl From<PostEvent> for Post {
fn from(event: PostEvent) -> Self {
use PostEvent::*;
match event {
PostPublished(post) => post,
PostUpdated(post) => post,
PostDeleted(post) => post,
}
}
}
pub struct SearcherActor(Arc<Searcher>);
impl Actor for SearcherActor {
type Msg = PostEvent;
// forwards Msg to the respective Receive<T> implementation
fn recv(&mut self, ctx: &Context<Self::Msg>, msg: Self::Msg, sender: Sender) {
self.receive(ctx, msg, sender);
}
}
impl SearcherActor {
fn receive(&mut self, _ctx: &Context<PostEvent>, msg: PostEvent, _sender: Sender) {
use PostEvent::*;
match msg {
PostPublished(post) => { self.0.add_document(&post).unwrap(); },
PostUpdated(post) => { self.0.update_document(&post).unwrap(); },
PostDeleted(post) => { self.0.delete_document(&post); },
}
}
}
impl ActorFactoryArgs<Arc<Searcher>> for SearcherActor {
fn create_args(searcher: Arc<Searcher>) -> Self {
SearcherActor(searcher)
}
}
impl Searcher {
/// Initializes a new `Searcher`, ready to be used by
/// Plume.
///
/// The main task of this function is to try everything
/// to get a valid `Searcher`:
///
/// - first it tries to open the search index normally (using the options from `CONFIG`)
/// - if it fails, it makes a back-up of the index files, deletes the original ones,
/// and recreate the whole index. It removes the backup only if the re-creation
/// succeeds.
///
/// # Panics
///
/// This function panics if it needs to create a backup and it can't, or if it fails
/// to recreate the search index.
///
/// After that, it can also panic if there are still errors remaining.
///
/// The panic messages are normally explicit enough for a human to
/// understand how to fix the issue when they see it.
pub fn new(db_pool: DbPool) -> Self {
// We try to open the index a first time
let searcher = match Self::open(
&CONFIG.search_index,
db_pool.clone(),
&CONFIG.search_tokenizers,
) {
// The index may be corrupted, inexistent or use an older format.
// In this case, we can easily recover by deleting and re-creating it.
Err(Error::Search(SearcherError::InvalidIndexDataError)) => {
if Self::create(
&CONFIG.search_index,
db_pool.clone(),
&CONFIG.search_tokenizers,
)
.is_err()
{
let current_path = Path::new(&CONFIG.search_index);
let backup_path =
format!("{}.{}", &current_path.display(), Utc::now().timestamp());
let backup_path = Path::new(&backup_path);
std::fs::rename(current_path, backup_path)
.expect("Error while backing up search index directory for re-creation");
if Self::create(
&CONFIG.search_index,
db_pool.clone(),
&CONFIG.search_tokenizers,
)
.is_ok()
{
if std::fs::remove_dir_all(backup_path).is_err() {
eprintln!(
"error on removing backup directory: {}. it remains",
backup_path.display()
);
}
} else {
panic!("Error while re-creating search index in new index format. Remove search index and run `plm search init` manually.");
}
}
Self::open(&CONFIG.search_index, db_pool, &CONFIG.search_tokenizers)
}
// If it opened successfully or if it was another kind of
// error (that we don't know how to handle), don't do anything more
other => other,
};
// At this point, if there are still errors, we just panic
#[allow(clippy::match_wild_err_arm)]
match searcher {
Err(Error::Search(e)) => match e {
SearcherError::WriteLockAcquisitionError => panic!(
r#"
Your search index is locked. Plume can't start. To fix this issue
make sure no other Plume instance is started, and run:
plm search unlock
Then try to restart Plume.
"#
),
SearcherError::IndexOpeningError => panic!(
r#"
Plume was unable to open the search index. If you created the index
before, make sure to run Plume in the same directory it was created in, or
to set SEARCH_INDEX accordingly. If you did not yet create the search
index, run this command:
plm search init
Then try to restart Plume
"#
),
e => Err(e).unwrap(),
},
Err(_) => panic!("Unexpected error while opening search index"),
Ok(s) => s,
}
}
pub fn schema() -> Schema {
let tag_indexing = TextOptions::default().set_indexing_options(
TextFieldIndexing::default()
@ -66,7 +226,11 @@ impl Searcher {
schema_builder.build()
}
pub fn create(path: &dyn AsRef<Path>, tokenizers: &SearchTokenizerConfig) -> Result<Self> {
pub fn create(
path: &dyn AsRef<Path>,
dbpool: DbPool,
tokenizers: &SearchTokenizerConfig,
) -> Result<Self> {
let schema = Self::schema();
create_dir_all(path).map_err(|_| SearcherError::IndexCreationError)?;
@ -94,10 +258,15 @@ impl Searcher {
.try_into()
.map_err(|_| SearcherError::IndexCreationError)?,
index,
dbpool,
})
}
pub fn open(path: &dyn AsRef<Path>, tokenizers: &SearchTokenizerConfig) -> Result<Self> {
pub fn open(
path: &dyn AsRef<Path>,
dbpool: DbPool,
tokenizers: &SearchTokenizerConfig,
) -> Result<Self> {
let mut index =
Index::open(MmapDirectory::open(path).map_err(|_| SearcherError::IndexOpeningError)?)
.map_err(|_| SearcherError::IndexOpeningError)?;
@ -135,12 +304,25 @@ impl Searcher {
.reader_builder()
.reload_policy(ReloadPolicy::Manual)
.try_into()
.map_err(|_| SearcherError::IndexCreationError)?,
.map_err(|e| {
if let TantivyError::IOError(err) = e {
let err: io::Error = err.into();
if err.kind() == io::ErrorKind::InvalidData {
// Search index was created in older Tantivy format.
SearcherError::InvalidIndexDataError
} else {
SearcherError::IndexCreationError
}
} else {
SearcherError::IndexCreationError
}
})?,
index,
dbpool,
})
}
pub fn add_document(&self, conn: &Connection, post: &Post) -> Result<()> {
pub fn add_document(&self, post: &Post) -> Result<()> {
if !post.published {
return Ok(());
}
@ -162,15 +344,19 @@ impl Searcher {
let lang = schema.get_field("lang").unwrap();
let license = schema.get_field("license").unwrap();
let conn = match self.dbpool.get() {
Ok(c) => c,
Err(_) => return Err(Error::DbPool),
};
let mut writer = self.writer.lock().unwrap();
let writer = writer.as_mut().unwrap();
writer.add_document(doc!(
post_id => i64::from(post.id),
author => post.get_authors(conn)?.into_iter().map(|u| u.fqn).join(" "),
author => post.get_authors(&conn)?.into_iter().map(|u| u.fqn).join(" "),
creation_date => i64::from(post.creation_date.num_days_from_ce()),
instance => Instance::get(conn, post.get_blog(conn)?.instance_id)?.public_domain,
tag => Tag::for_post(conn, post.id)?.into_iter().map(|t| t.tag).join(" "),
blog_name => post.get_blog(conn)?.title,
instance => Instance::get(&conn, post.get_blog(&conn)?.instance_id)?.public_domain,
tag => Tag::for_post(&conn, post.id)?.into_iter().map(|t| t.tag).join(" "),
blog_name => post.get_blog(&conn)?.title,
content => post.content.get().clone(),
subtitle => post.subtitle.clone(),
title => post.title.clone(),
@ -190,17 +376,12 @@ impl Searcher {
writer.delete_term(doc_id);
}
pub fn update_document(&self, conn: &Connection, post: &Post) -> Result<()> {
pub fn update_document(&self, post: &Post) -> Result<()> {
self.delete_document(post);
self.add_document(conn, post)
self.add_document(post)
}
pub fn search_document(
&self,
conn: &Connection,
query: PlumeQuery,
(min, max): (i32, i32),
) -> Vec<Post> {
pub fn search_document(&self, query: PlumeQuery, (min, max): (i32, i32)) -> Vec<Post> {
let schema = self.index.schema();
let post_id = schema.get_field("post_id").unwrap();
@ -209,24 +390,33 @@ impl Searcher {
let searcher = self.reader.searcher();
let res = searcher.search(&query.into_query(), &collector).unwrap();
let conn = match self.dbpool.get() {
Ok(c) => c,
Err(_) => return Vec::new(),
};
res.get(min as usize..)
.unwrap_or(&[])
.iter()
.filter_map(|(_, doc_add)| {
let doc = searcher.doc(*doc_add).ok()?;
let id = doc.get_first(post_id)?;
Post::get(conn, id.i64_value() as i32).ok()
Post::get(&conn, id.i64_value() as i32).ok()
//borrow checker don't want me to use filter_map or and_then here
})
.collect()
}
pub fn fill(&self, conn: &Connection) -> Result<()> {
pub fn fill(&self) -> Result<()> {
let conn = match self.dbpool.get() {
Ok(c) => c,
Err(_) => return Err(Error::DbPool),
};
for post in posts::table
.filter(posts::published.eq(true))
.load::<Post>(conn)?
.load::<Post>(&conn)?
{
self.update_document(conn, &post)?
self.update_document(&post)?
}
Ok(())
}

@ -408,7 +408,6 @@ mod tests {
source: "you must say GNU/Linux, not Linux!!!".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
assert!(gnu_tl.matches(r, &gnu_post, Kind::Original).unwrap());
@ -428,7 +427,6 @@ mod tests {
source: "so is Microsoft".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
assert!(!gnu_tl.matches(r, &non_free_post, Kind::Original).unwrap());
@ -481,7 +479,6 @@ mod tests {
subtitle: "".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
assert!(my_tl.matches(r, &post, Kind::Original).unwrap()); // matches because of "blog in fav_blogs" (and there is no cover)
@ -503,7 +500,6 @@ mod tests {
subtitle: "".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
assert!(!my_tl.matches(r, &post, Kind::Like(&users[1])).unwrap());
@ -549,7 +545,6 @@ mod tests {
source: "you must say GNU/Linux, not Linux!!!".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
@ -568,7 +563,6 @@ mod tests {
source: "so is Microsoft".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
@ -608,7 +602,6 @@ mod tests {
source: "you must say GNU/Linux, not Linux!!!".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
gnu_post
@ -745,7 +738,6 @@ mod tests {
source: "you must say GNU/Linux, not Linux!!!".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();
gnu_post.update_tags(conn, vec![Tag::build_activity("free".to_owned()).unwrap()]).unwrap();
@ -779,7 +771,6 @@ mod tests {
source: "you must say GNU/Linux, not Linux!!!".to_string(),
cover_id: None,
},
&r.searcher,
)
.unwrap();

@ -1,8 +1,8 @@
use crate::{
ap_url, blocklisted_emails::BlocklistedEmail, blogs::Blog, db_conn::DbConn, follows::Follow,
instance::*, medias::Media, notifications::Notification, post_authors::PostAuthor, posts::Post,
safe_string::SafeString, schema::users, search::Searcher, timeline::Timeline, Connection,
Error, PlumeRocket, Result, ITEMS_PER_PAGE,
ap_url, blocklisted_emails::BlocklistedEmail, blogs::Blog, config::CONFIG, db_conn::DbConn,
follows::Follow, instance::*, medias::Media, notifications::Notification,
post_authors::PostAuthor, posts::Post, safe_string::SafeString, schema::users,
search::Searcher, timeline::Timeline, Connection, Error, PlumeRocket, Result, ITEMS_PER_PAGE,
};
use activitypub::{
activity::Delete,
@ -14,6 +14,7 @@ use activitypub::{
use bcrypt;
use chrono::{NaiveDateTime, Utc};
use diesel::{self, BelongingToDsl, ExpressionMethods, OptionalExtension, QueryDsl, RunQueryDsl};
use ldap3::{LdapConn, Scope, SearchEntry};
use openssl::{
hash::MessageDigest,
pkey::{PKey, Private},
@ -292,11 +293,116 @@ impl User {
bcrypt::hash(pass, 10).map_err(Error::from)
}
pub fn auth(&self, pass: &str) -> bool {
self.hashed_password
.clone()
.map(|hashed| bcrypt::verify(pass, hashed.as_ref()).unwrap_or(false))
.unwrap_or(false)
fn ldap_register(conn: &Connection, name: &str, password: &str) -> Result<User> {
if CONFIG.ldap.is_none() {
return Err(Error::NotFound);
}
let ldap = CONFIG.ldap.as_ref().unwrap();
let mut ldap_conn = LdapConn::new(&ldap.addr).map_err(|_| Error::NotFound)?;
let ldap_name = format!("{}={},{}", ldap.user_name_attr, name, ldap.base_dn);
let bind = ldap_conn
.simple_bind(&ldap_name, password)
.map_err(|_| Error::NotFound)?;
if bind.success().is_err() {
return Err(Error::NotFound);
}
let search = ldap_conn
.search(
&ldap_name,
Scope::Base,
"(|(objectClass=person)(objectClass=user))",
vec![&ldap.mail_attr],
)
.map_err(|_| Error::NotFound)?
.success()
.map_err(|_| Error::NotFound)?;
for entry in search.0 {
let entry = SearchEntry::construct(entry);
let email = entry.attrs.get("mail").and_then(|vec| vec.first());
if email.is_some() {
let _ = ldap_conn.unbind();
return NewUser::new_local(
conn,
name.to_owned(),
name.to_owned(),
Role::Normal,
"",
email.unwrap().to_owned(),
None,
);
}
}
let _ = ldap_conn.unbind();
Err(Error::NotFound)
}
fn ldap_login(&self, password: &str) -> bool {
if let Some(ldap) = CONFIG.ldap.as_ref() {
let mut conn = if let Ok(conn) = LdapConn::new(&ldap.addr) {
conn
} else {
return false;
};
let name = format!(
"{}={},{}",
ldap.user_name_attr, &self.username, ldap.base_dn
);
if let Ok(bind) = conn.simple_bind(&name, password) {
bind.success().is_ok()
} else {
false
}
} else {
false
}
}
pub fn login(conn: &Connection, ident: &str, password: &str) -> Result<User> {
let local_id = Instance::get_local()?.id;
let user = match User::find_by_email(conn, ident) {
Ok(user) => Ok(user),
_ => User::find_by_name(conn, ident, local_id),
}
.and_then(|u| {
if u.instance_id == local_id {
Ok(u)
} else {
Err(Error::NotFound)
}
});
match user {
Ok(user) if user.hashed_password.is_some() => {
if bcrypt::verify(password, user.hashed_password.as_ref().unwrap()).unwrap_or(false)
{
Ok(user)
} else {
Err(Error::NotFound)
}
}
Ok(user) => {
if user.ldap_login(password) {
Ok(user)
} else {
Err(Error::NotFound)
}
}
e => {
if let Ok(user) = User::ldap_register(conn, ident, password) {
return Ok(user);
}
// if no user was found, and we were unable to auto-register from ldap
// fake-verify a password, and return an error.
let other = User::get(&*conn, 1)
.expect("No user is registered")
.hashed_password;
other.map(|pass| bcrypt::verify(password, &pass));
e
}
}
}
pub fn reset_password(&self, conn: &Connection, pass: &str) -> Result<()> {
@ -983,7 +1089,7 @@ impl NewUser {
role: Role,
summary: &str,
email: String,
password: String,
password: Option<String>,
) -> Result<User> {
let (pub_key, priv_key) = gen_keypair();
let instance = Instance::get_local()?;
@ -1001,7 +1107,7 @@ impl NewUser {
summary: summary.to_owned(),
summary_html: SafeString::new(&utils::md_to_html(&summary, None, false, None).0),
email: Some(email),
hashed_password: Some(password),
hashed_password: password,
instance_id: instance.id,
public_key: String::from_utf8(pub_key).or(Err(Error::Signature))?,
private_key: Some(String::from_utf8(priv_key).or(Err(Error::Signature))?),

@ -62,30 +62,20 @@ pub fn oauth(
let conn = &*rockets.conn;
let app = App::find_by_client_id(conn, &query.client_id)?;
if app.client_secret == query.client_secret {
if let Ok(user) = User::find_by_fqn(&rockets, &query.username) {
if user.auth(&query.password) {
let token = ApiToken::insert(
conn,
NewApiToken {
app_id: app.id,
user_id: user.id,
value: random_hex(),
scopes: query.scopes.clone(),
},
)?;
Ok(Json(json!({
"token": token.value
})))
} else {
Ok(Json(json!({
"error": "Invalid credentials"
})))
}
if let Ok(user) = User::login(conn, &query.username, &query.password) {
let token = ApiToken::insert(
conn,
NewApiToken {
app_id: app.id,
user_id: user.id,
value: random_hex(),
scopes: query.scopes.clone(),
},
)?;
Ok(Json(json!({
"token": token.value
})))
} else {
// Making fake password verification to avoid different
// response times that would make it possible to know
// if a username is registered or not.
User::get(conn, 1)?.auth(&query.password);
Ok(Json(json!({
"error": "Invalid credentials"
})))

@ -7,7 +7,8 @@ use plume_api::posts::*;
use plume_common::{activity_pub::broadcast, utils::md_to_html};
use plume_models::{
blogs::Blog, db_conn::DbConn, instance::Instance, medias::Media, mentions::*, post_authors::*,
posts::*, safe_string::SafeString, tags::*, timeline::*, users::User, Error, PlumeRocket,
posts::*, safe_string::SafeString, tags::*, timeline::*, users::User,
Error, PlumeRocket,
};
#[get("/posts/<id>")]
@ -104,7 +105,6 @@ pub fn create(
rockets: PlumeRocket,
) -> Api<PostData> {
let conn = &*rockets.conn;
let search = &rockets.searcher;
let worker = &rockets.worker;
let author = User::get(conn, auth.0.user_id)?;
@ -154,7 +154,6 @@ pub fn create(
source: payload.source.clone(),
cover_id: payload.cover_id,
},
search,
)?;
PostAuthor::insert(

@ -10,15 +10,14 @@ extern crate serde_json;
#[macro_use]
extern crate validator_derive;
extern crate riker;
use clap::App;
use diesel::r2d2::ConnectionManager;
use plume_models::{
db_conn::{DbPool, PragmaForeignKey},
instance::Instance,
migrations::IMPORTED_MIGRATIONS,
search::{Searcher as UnmanagedSearcher, SearcherError},
Connection, Error, CONFIG,
db_conn::init_pool, migrations::IMPORTED_MIGRATIONS, search::{Searcher, SearcherActor, PostEvent},
ACTORS, CONFIG, POST_CHAN,
};
use riker::actors::*;
use rocket_csrf::CsrfFairingBuilder;
use scheduled_thread_pool::ScheduledThreadPool;
use std::process::exit;
@ -45,26 +44,6 @@ include!(concat!(env!("OUT_DIR"), "/templates.rs"));
compile_i18n!();
/// Initializes a database pool.
fn init_pool() -> Option<DbPool> {
match dotenv::dotenv() {
Ok(path) => println!("Configuration read from {}", path.display()),
Err(ref e) if e.not_found() => eprintln!("no .env was found"),
e => e.map(|_| ()).unwrap(),
}
let manager = ConnectionManager::<Connection>::new(CONFIG.database_url.as_str());
let mut builder = DbPool::builder()
.connection_customizer(Box::new(PragmaForeignKey))
.min_idle(CONFIG.db_min_idle);
if let Some(max_size) = CONFIG.db_max_size {
builder = builder.max_size(max_size);
};
let pool = builder.build(manager).ok()?;
Instance::cache_local(&pool.get().unwrap());
Some(pool)
}
fn main() {
App::new("Plume")
.bin_name("plume")
@ -79,6 +58,13 @@ and https://docs.joinplu.me/installation/init for more info.
"#,
)
.get_matches();
match dotenv::dotenv() {
Ok(path) => println!("Configuration read from {}", path.display()),
Err(ref e) if e.not_found() => eprintln!("no .env was found"),
e => e.map(|_| ()).unwrap(),
}
let dbpool = init_pool().expect("main: database pool initialization error");
if IMPORTED_MIGRATIONS
.is_pending(&dbpool.get().unwrap())
@ -97,37 +83,18 @@ Then try to restart Plume.
)
}
let workpool = ScheduledThreadPool::with_name("worker {}", num_cpus::get());
// we want a fast exit here, so
#[allow(clippy::match_wild_err_arm)]
let searcher = match UnmanagedSearcher::open(&CONFIG.search_index, &CONFIG.search_tokenizers) {
Err(Error::Search(e)) => match e {
SearcherError::WriteLockAcquisitionError => panic!(
r#"
Your search index is locked. Plume can't start. To fix this issue
make sure no other Plume instance is started, and run:
plm search unlock
let searcher = Arc::new(Searcher::new(dbpool.clone()));
Then try to restart Plume.
"#
),
SearcherError::IndexOpeningError => panic!(
r#"
Plume was unable to open the search index. If you created the index
before, make sure to run Plume in the same directory it was created in, or
to set SEARCH_INDEX accordingly. If you did not yet create the search
index, run this command:
plm search init
Then try to restart Plume
"#
),
e => Err(e).unwrap(),
},
Err(_) => panic!("Unexpected error while opening search index"),
Ok(s) => Arc::new(s),
};
let sys = SystemBuilder::new().name("plume").create().unwrap();
let chan: ChannelRef<PostEvent> = channel("post_events", &sys).unwrap();
ACTORS.set(Arc::new(sys)).unwrap();
let searcher_actor = ACTORS
.get().unwrap()
.clone()
.actor_of_args::<SearcherActor, _>("searcher-actor", searcher.clone())
.unwrap();
POST_CHAN.set(chan).unwrap();
POST_CHAN.get().unwrap().tell(Subscribe { actor: Box::new(searcher_actor), topic: "*".into() }, None);
let commiter = searcher.clone();
workpool.execute_with_fixed_delay(
@ -274,6 +241,7 @@ Then try to restart Plume
.manage(dbpool)
.manage(Arc::new(workpool))
.manage(searcher)
.manage(ACTORS.get().unwrap().clone())
.manage(include_i18n!())
.attach(
CsrfFairingBuilder::new()

@ -297,7 +297,7 @@ pub fn update(
post.source = form.content.clone();
post.license = form.license.clone();
post.cover_id = form.cover;
post.update(&*conn, &rockets.searcher)
post.update(&*conn)
.expect("post::update: update error");
if post.published {
@ -473,7 +473,6 @@ pub fn create(
source: form.content.clone(),
cover_id: form.cover,
},
&rockets.searcher,
)
.expect("post::create: post save error");

@ -51,7 +51,6 @@ macro_rules! param_to_query {
#[get("/search?<query..>")]
pub fn search(query: Option<Form<SearchQuery>>, rockets: PlumeRocket) -> Ructe {
let conn = &*rockets.conn;
let query = query.map(Form::into_inner).unwrap_or_default();
let page = query.page.unwrap_or_default();
let mut parsed_query =
@ -72,7 +71,7 @@ pub fn search(query: Option<Form<SearchQuery>>, rockets: PlumeRocket) -> Ructe {
} else {
let res = rockets
.searcher
.search_document(&conn, parsed_query, page.limits());
.search_document(parsed_query, page.limits());
let next_page = if res.is_empty() { 0 } else { page.0 + 1 };
render!(search::result(
&rockets.to_context(),

@ -48,38 +48,19 @@ pub fn create(
rockets: PlumeRocket,
) -> RespondOrRedirect {
let conn = &*rockets.conn;
let user = User::find_by_email(&*conn, &form.email_or_name)
.or_else(|_| User::find_by_fqn(&rockets, &form.email_or_name));
let mut errors = match form.validate() {
Ok(_) => ValidationErrors::new(),
Err(e) => e,
};
let user = User::login(conn, &form.email_or_name, &form.password);
let user_id = if let Ok(user) = user {
if !user.auth(&form.password) {
let mut err = ValidationError::new("invalid_login");
err.message = Some(Cow::from("Invalid username, or password"));
errors.add("email_or_name", err);
String::new()
} else {
user.id.to_string()
}
user.id.to_string()
} else {
// Fake password verification, only to avoid different login times
// that could be used to see if an email adress is registered or not
User::get(&*conn, 1)
.map(|u| u.auth(&form.password))
.expect("No user is registered");
let mut err = ValidationError::new("invalid_login");
err.message = Some(Cow::from("Invalid username, or password"));
errors.add("email_or_name", err);
String::new()
};
if !errors.is_empty() {
return render!(session::login(&rockets.to_context(), None, &*form, errors)).into();
}
};
cookies.add_private(
Cookie::build(AUTH_COOKIE, user_id)

@ -541,7 +541,7 @@ pub fn create(
Role::Normal,
"",
form.email.to_string(),
User::hash_pass(&form.password).map_err(to_validation)?,
Some(User::hash_pass(&form.password).map_err(to_validation)?),
).map_err(to_validation)?;
Ok(Flash::success(
Redirect::to(uri!(super::session::new: m = _)),

Loading…
Cancel
Save