Divers work on tags and mentions #295
6 changed files with 135 additions and 47 deletions
|
@ -5,6 +5,7 @@ use rocket::{
|
|||
http::uri::Uri,
|
||||
response::{Redirect, Flash}
|
||||
};
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Remove non alphanumeric characters and CamelCase a string
|
||||
pub fn make_actor_id(name: String) -> String {
|
||||
|
@ -29,7 +30,7 @@ enum State {
|
|||
}
|
||||
|
||||
/// Returns (HTML, mentions, hashtags)
|
||||
pub fn md_to_html(md: &str) -> (String, Vec<String>, Vec<String>) {
|
||||
pub fn md_to_html(md: &str) -> (String, HashSet<String>, HashSet<String>) {
|
||||
let parser = Parser::new_ext(md, Options::all());
|
||||
|
||||
let (parser, mentions, hashtags): (Vec<Vec<Event>>, Vec<Vec<String>>, Vec<Vec<String>>) = parser.map(|evt| match evt {
|
||||
|
@ -129,8 +130,7 @@ pub fn md_to_html(md: &str) -> (String, Vec<String>, Vec<String>) {
|
|||
|
||||
let mut buf = String::new();
|
||||
html::push_html(&mut buf, parser);
|
||||
let hashtags = hashtags.collect();
|
||||
(buf, mentions.collect(), hashtags)
|
||||
(buf, mentions.collect(), hashtags.collect())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -102,6 +102,12 @@ impl Mention {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete(&self, conn: &Connection) {
|
||||
//find related notifications and delete them
|
||||
Notification::find(conn, notification_kind::MENTION, self.id).map(|n| n.delete(conn));
|
||||
diesel::delete(self).execute(conn).expect("Mention::delete: mention deletion error");
|
||||
}
|
||||
}
|
||||
|
||||
impl Notify<Connection> for Mention {
|
||||
|
|
|
@ -106,4 +106,8 @@ impl Notification {
|
|||
});
|
||||
json
|
||||
}
|
||||
|
||||
pub fn delete(&self, conn: &Connection) {
|
||||
diesel::delete(self).execute(conn).expect("Notification::delete: notification deletion error");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,14 +6,17 @@ use activitypub::{
|
|||
use canapi::{Error, Provider};
|
||||
use chrono::{NaiveDateTime, TimeZone, Utc};
|
||||
use diesel::{self, RunQueryDsl, QueryDsl, ExpressionMethods, BelongingToDsl};
|
||||
use heck::KebabCase;
|
||||
use heck::{CamelCase, KebabCase};
|
||||
use serde_json;
|
||||
|
||||
use plume_api::posts::PostEndpoint;
|
||||
use plume_common::activity_pub::{
|
||||
Hashtag, Source,
|
||||
PUBLIC_VISIBILTY, Id, IntoId,
|
||||
inbox::{Deletable, FromActivity}
|
||||
use plume_common::{
|
||||
activity_pub::{
|
||||
Hashtag, Source,
|
||||
PUBLIC_VISIBILTY, Id, IntoId,
|
||||
inbox::{Deletable, FromActivity}
|
||||
},
|
||||
utils::md_to_html
|
||||
};
|
||||
use {BASE_URL, ap_url, Connection};
|
||||
use blogs::Blog;
|
||||
|
@ -26,6 +29,7 @@ use tags::Tag;
|
|||
use users::User;
|
||||
use schema::posts;
|
||||
use safe_string::SafeString;
|
||||
use std::collections::HashSet;
|
||||
|
||||
#[derive(Queryable, Identifiable, Serialize, Clone, AsChangeset)]
|
||||
pub struct Post {
|
||||
|
@ -385,9 +389,93 @@ impl Post {
|
|||
post.source = source.content;
|
||||
}
|
||||
|
||||
let mut txt_hashtags = md_to_html(&post.source).2.into_iter().map(|s| s.to_camel_case()).collect::<HashSet<_>>();
|
||||
if let Some(serde_json::Value::Array(mention_tags)) = updated.object_props.tag.clone() {
|
||||
let mut mentions = vec![];
|
||||
let mut tags = vec![];
|
||||
let mut hashtags = vec![];
|
||||
for tag in mention_tags.into_iter() {
|
||||
serde_json::from_value::<link::Mention>(tag.clone())
|
||||
.map(|m| mentions.push(m))
|
||||
.ok();
|
||||
|
||||
serde_json::from_value::<Hashtag>(tag.clone())
|
||||
.map(|t| {
|
||||
let tag_name = t.name_string().expect("Post::from_activity: tag name error");
|
||||
if txt_hashtags.remove(&tag_name) {
|
||||
hashtags.push(t);
|
||||
} else {
|
||||
tags.push(t);
|
||||
}
|
||||
}).ok();
|
||||
}
|
||||
post.update_mentions(conn, mentions);
|
||||
post.update_tags(conn, tags);
|
||||
post.update_hashtags(conn, hashtags);
|
||||
}
|
||||
|
||||
post.update(conn);
|
||||
}
|
||||
|
||||
pub fn update_mentions(&self, conn: &Connection, mentions: Vec<link::Mention>) {
|
||||
let mentions = mentions.into_iter().map(|m| (m.link_props.href_string().ok()
|
||||
.and_then(|ap_url| User::find_by_ap_url(conn, ap_url))
|
||||
.map(|u| u.id),m))
|
||||
.filter_map(|(id, m)| if let Some(id)=id {Some((m,id))} else {None}).collect::<Vec<_>>();
|
||||
|
||||
let old_mentions = Mention::list_for_post(&conn, self.id);
|
||||
let old_user_mentioned = old_mentions.iter()
|
||||
.map(|m| m.mentioned_id).collect::<HashSet<_>>();
|
||||
for (m,id) in mentions.iter() {
|
||||
if !old_user_mentioned.contains(&id) {
|
||||
Mention::from_activity(&*conn, m.clone(), self.id, true, true);
|
||||
}
|
||||
}
|
||||
|
||||
let new_mentions = mentions.into_iter().map(|(_m,id)| id).collect::<HashSet<_>>();
|
||||
for m in old_mentions.iter().filter(|m| !new_mentions.contains(&m.mentioned_id)) {
|
||||
m.delete(&conn);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_tags(&self, conn: &Connection, tags: Vec<Hashtag>) {
|
||||
let tags_name = tags.iter().filter_map(|t| t.name_string().ok()).collect::<HashSet<_>>();
|
||||
|
||||
let old_tags = Tag::for_post(&*conn, self.id).into_iter().collect::<Vec<_>>();
|
||||
let old_tags_name = old_tags.iter().filter_map(|tag| if !tag.is_hashtag {Some(tag.tag.clone())} else {None}).collect::<HashSet<_>>();
|
||||
|
||||
for t in tags.into_iter() {
|
||||
if !t.name_string().map(|n| old_tags_name.contains(&n)).unwrap_or(true) {
|
||||
Tag::from_activity(conn, t, self.id, false);
|
||||
}
|
||||
}
|
||||
|
||||
for ot in old_tags {
|
||||
if !tags_name.contains(&ot.tag) {
|
||||
ot.delete(conn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_hashtags(&self, conn: &Connection, tags: Vec<Hashtag>) {
|
||||
let tags_name = tags.iter().filter_map(|t| t.name_string().ok()).collect::<HashSet<_>>();
|
||||
|
||||
let old_tags = Tag::for_post(&*conn, self.id).into_iter().collect::<Vec<_>>();
|
||||
let old_tags_name = old_tags.iter().filter_map(|tag| if tag.is_hashtag {Some(tag.tag.clone())} else {None}).collect::<HashSet<_>>();
|
||||
|
||||
for t in tags.into_iter() {
|
||||
if !t.name_string().map(|n| old_tags_name.contains(&n)).unwrap_or(true) {
|
||||
Tag::from_activity(conn, t, self.id, true);
|
||||
}
|
||||
}
|
||||
|
||||
for ot in old_tags {
|
||||
if !tags_name.contains(&ot.tag) {
|
||||
ot.delete(conn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_json(&self, conn: &Connection) -> serde_json::Value {
|
||||
let blog = self.get_blog(conn);
|
||||
json!({
|
||||
|
@ -447,6 +535,7 @@ impl FromActivity<Article, Connection> for Post {
|
|||
}
|
||||
|
||||
// save mentions and tags
|
||||
let mut hashtags = md_to_html(&post.source).2.into_iter().map(|s| s.to_camel_case()).collect::<HashSet<_>>();
|
||||
if let Some(serde_json::Value::Array(tags)) = article.object_props.tag.clone() {
|
||||
for tag in tags.into_iter() {
|
||||
serde_json::from_value::<link::Mention>(tag.clone())
|
||||
|
@ -454,7 +543,10 @@ impl FromActivity<Article, Connection> for Post {
|
|||
.ok();
|
||||
|
||||
serde_json::from_value::<Hashtag>(tag.clone())
|
||||
.map(|t| Tag::from_activity(conn, t, post.id))
|
||||
.map(|t| {
|
||||
let tag_name = t.name_string().expect("Post::from_activity: tag name error");
|
||||
Tag::from_activity(conn, t, post.id, hashtags.remove(&tag_name));
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
|
@ -475,6 +567,9 @@ impl Deletable<Connection, Delete> for Post {
|
|||
act.object_props.set_id_string(format!("{}#delete", self.ap_url)).expect("Post::delete: id error");
|
||||
act.object_props.set_to_link_vec(vec![Id::new(PUBLIC_VISIBILTY)]).expect("Post::delete: to error");
|
||||
|
||||
for m in Mention::list_for_post(&conn, self.id) {
|
||||
m.delete(conn);
|
||||
}
|
||||
diesel::delete(self).execute(conn).expect("Post::delete: DB error");
|
||||
act
|
||||
}
|
||||
|
|
|
@ -37,14 +37,24 @@ impl Tag {
|
|||
ht
|
||||
}
|
||||
|
||||
pub fn from_activity(conn: &Connection, tag: Hashtag, post: i32) -> Tag {
|
||||
pub fn from_activity(conn: &Connection, tag: Hashtag, post: i32, is_hashtag: bool) -> Tag {
|
||||
Tag::insert(conn, NewTag {
|
||||
tag: tag.name_string().expect("Tag::from_activity: name error"),
|
||||
is_hashtag: false,
|
||||
is_hashtag,
|
||||
post_id: post
|
||||
})
|
||||
}
|
||||
|
||||
pub fn build_activity(conn: &Connection, tag: String) -> Hashtag {
|
||||
let mut ht = Hashtag::default();
|
||||
ht.set_href_string(ap_url(format!("{}/tag/{}",
|
||||
Instance::get_local(conn).expect("Tag::into_activity: local instance not found error").public_domain,
|
||||
tag)
|
||||
)).expect("Tag::into_activity: href error");
|
||||
ht.set_name_string(tag).expect("Tag::into_activity: name error");
|
||||
ht
|
||||
}
|
||||
|
||||
pub fn delete(&self, conn: &Connection) {
|
||||
diesel::delete(self).execute(conn).expect("Tag::delete: database error");
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use rocket::{State, request::LenientForm};
|
|||
use rocket::response::{Redirect, Flash};
|
||||
use rocket_contrib::Template;
|
||||
use serde_json;
|
||||
use std::{collections::HashMap, borrow::Cow};
|
||||
use std::{collections::{HashMap, HashSet}, borrow::Cow};
|
||||
use validator::{Validate, ValidationError, ValidationErrors};
|
||||
use workerpool::{Pool, thunk::*};
|
||||
|
||||
|
@ -207,43 +207,16 @@ fn update(blog: String, slug: String, user: User, conn: DbConn, data: LenientFor
|
|||
let post = post.update_ap_url(&*conn);
|
||||
|
||||
if post.published {
|
||||
for m in mentions.into_iter() {
|
||||
Mention::from_activity(&*conn, Mention::build_activity(&*conn, m), post.id, true, true);
|
||||
}
|
||||
post.update_mentions(&conn, mentions.into_iter().map(|m| Mention::build_activity(&conn, m)).collect());
|
||||
}
|
||||
|
||||
let old_tags = Tag::for_post(&*conn, post.id).into_iter().collect::<Vec<_>>();
|
||||
let tags = form.tags.split(",").map(|t| t.trim().to_camel_case()).filter(|t| t.len() > 0).collect::<Vec<_>>();
|
||||
for tag in tags.iter() {
|
||||
if old_tags.iter().all(|ot| &ot.tag!=tag || ot.is_hashtag) {
|
||||
Tag::insert(&*conn, NewTag {
|
||||
tag: tag.clone(),
|
||||
is_hashtag: false,
|
||||
post_id: post.id
|
||||
});
|
||||
}
|
||||
}
|
||||
for ot in old_tags.iter() {
|
||||
if !tags.contains(&ot.tag) && !ot.is_hashtag {
|
||||
ot.delete(&conn);
|
||||
}
|
||||
}
|
||||
let tags = form.tags.split(",").map(|t| t.trim().to_camel_case()).filter(|t| t.len() > 0)
|
||||
.collect::<HashSet<_>>().into_iter().map(|t| Tag::build_activity(&conn, t)).collect::<Vec<_>>();
|
||||
post.update_tags(&conn, tags);
|
||||
|
||||
let hashtags = hashtags.into_iter().map(|h| h.to_camel_case()).collect::<Vec<_>>();
|
||||
for hashtag in hashtags.iter() {
|
||||
if old_tags.iter().all(|ot| &ot.tag!=hashtag || !ot.is_hashtag) {
|
||||
Tag::insert(&*conn, NewTag {
|
||||
tag: hashtag.clone(),
|
||||
is_hashtag: true,
|
||||
post_id: post.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
for ot in old_tags {
|
||||
if !hashtags.contains(&ot.tag) && ot.is_hashtag {
|
||||
ot.delete(&conn);
|
||||
}
|
||||
}
|
||||
let hashtags = hashtags.into_iter().map(|h| h.to_camel_case()).collect::<HashSet<_>>()
|
||||
.into_iter().map(|t| Tag::build_activity(&conn, t)).collect::<Vec<_>>();
|
||||
post.update_tags(&conn, hashtags);
|
||||
|
||||
if post.published {
|
||||
let act = post.update_activity(&*conn);
|
||||
|
@ -334,7 +307,7 @@ fn create(blog_name: String, data: LenientForm<NewPostForm>, user: User, conn: D
|
|||
author_id: user.id
|
||||
});
|
||||
|
||||
let tags = form.tags.split(",").map(|t| t.trim().to_camel_case()).filter(|t| t.len() > 0);
|
||||
let tags = form.tags.split(",").map(|t| t.trim().to_camel_case()).filter(|t| t.len() > 0).collect::<HashSet<_>>();
|
||||
for tag in tags {
|
||||
Tag::insert(&*conn, NewTag {
|
||||
tag: tag,
|
||||
|
|
Loading…
Add table
Reference in a new issue