Deduplicate tags and mentions

Use set to work on tags and mentions, allowing deduplication of them,
and clearer code
May also help with distinguishing tags and hashtags latter
This commit is contained in:
Trinity Pointard 2018-10-27 20:44:42 +02:00 committed by Baptiste Gelez
parent 83d6da29a5
commit 1689813df4
2 changed files with 22 additions and 23 deletions

View File

@ -5,6 +5,7 @@ use rocket::{
http::uri::Uri,
response::{Redirect, Flash}
};
use std::collections::HashSet;
/// Remove non alphanumeric characters and CamelCase a string
pub fn make_actor_id(name: String) -> String {
@ -29,7 +30,7 @@ enum State {
}
/// Returns (HTML, mentions, hashtags)
pub fn md_to_html(md: &str) -> (String, Vec<String>, Vec<String>) {
pub fn md_to_html(md: &str) -> (String, HashSet<String>, HashSet<String>) {
let parser = Parser::new_ext(md, Options::all());
let (parser, mentions, hashtags): (Vec<Vec<Event>>, Vec<Vec<String>>, Vec<Vec<String>>) = parser.map(|evt| match evt {
@ -129,8 +130,7 @@ pub fn md_to_html(md: &str) -> (String, Vec<String>, Vec<String>) {
let mut buf = String::new();
html::push_html(&mut buf, parser);
let hashtags = hashtags.collect();
(buf, mentions.collect(), hashtags)
(buf, mentions.collect(), hashtags.collect())
}
#[cfg(test)]

View File

@ -5,7 +5,7 @@ use rocket::{State, request::LenientForm};
use rocket::response::{Redirect, Flash};
use rocket_contrib::Template;
use serde_json;
use std::{collections::HashMap, borrow::Cow};
use std::{collections::{HashMap, HashSet}, borrow::Cow};
use validator::{Validate, ValidationError, ValidationErrors};
use workerpool::{Pool, thunk::*};
@ -216,15 +216,16 @@ fn update(blog: String, slug: String, user: User, conn: DbConn, data: LenientFor
}
let old_tags = Tag::for_post(&*conn, post.id).into_iter().collect::<Vec<_>>();
let tags = form.tags.split(",").map(|t| t.trim().to_camel_case()).filter(|t| t.len() > 0).collect::<Vec<_>>();
for tag in tags.iter() {
if old_tags.iter().all(|ot| &ot.tag!=tag || ot.is_hashtag) {
Tag::insert(&*conn, NewTag {
tag: tag.clone(),
is_hashtag: false,
post_id: post.id
});
}
let old_non_hashtags = old_tags.iter().filter_map(|tag| if !tag.is_hashtag {Some(tag.tag.clone())} else {None}).collect();
let old_hashtags = old_tags.iter().filter_map(|tag| if tag.is_hashtag {Some(tag.tag.clone())} else {None}).collect();
let tags = form.tags.split(",").map(|t| t.trim().to_camel_case()).filter(|t| t.len() > 0).collect::<HashSet<_>>();
for tag in tags.difference(&old_non_hashtags) {
Tag::insert(&*conn, NewTag {
tag: tag.clone(),
is_hashtag: false,
post_id: post.id
});
}
for ot in old_tags.iter() {
if !tags.contains(&ot.tag) && !ot.is_hashtag {
@ -232,15 +233,13 @@ fn update(blog: String, slug: String, user: User, conn: DbConn, data: LenientFor
}
}
let hashtags = hashtags.into_iter().map(|h| h.to_camel_case()).collect::<Vec<_>>();
for hashtag in hashtags.iter() {
if old_tags.iter().all(|ot| &ot.tag!=hashtag || !ot.is_hashtag) {
Tag::insert(&*conn, NewTag {
tag: hashtag.clone(),
is_hashtag: true,
post_id: post.id,
});
}
let hashtags = hashtags.into_iter().map(|h| h.to_camel_case()).collect::<HashSet<_>>();
for hashtag in hashtags.difference(&old_hashtags) {
Tag::insert(&*conn, NewTag {
tag: hashtag.clone(),
is_hashtag: true,
post_id: post.id,
});
}
for ot in old_tags {
if !hashtags.contains(&ot.tag) && ot.is_hashtag {
@ -343,7 +342,7 @@ fn create(blog_name: String, data: LenientForm<NewPostForm>, user: User, conn: D
author_id: user.id
});
let tags = form.tags.split(",").map(|t| t.trim().to_camel_case()).filter(|t| t.len() > 0);
let tags = form.tags.split(",").map(|t| t.trim().to_camel_case()).filter(|t| t.len() > 0).collect::<HashSet<_>>();
for tag in tags {
Tag::insert(&*conn, NewTag {
tag: tag,