Centralize configuration and add some new config (#494)
Ideally, if someone could review the idea in [this comment](https://github.com/Plume-org/Plume/issues/273#issuecomment-474982184), I'd like to add the implementation to this pr too
This commit is contained in:
parent
b945d1f602
commit
65bb50e88f
|
@ -6,7 +6,7 @@ extern crate rpassword;
|
||||||
|
|
||||||
use clap::App;
|
use clap::App;
|
||||||
use diesel::Connection;
|
use diesel::Connection;
|
||||||
use plume_models::{Connection as Conn, DATABASE_URL};
|
use plume_models::{Connection as Conn, CONFIG};
|
||||||
use std::io::{self, prelude::*};
|
use std::io::{self, prelude::*};
|
||||||
|
|
||||||
mod instance;
|
mod instance;
|
||||||
|
@ -24,7 +24,7 @@ fn main() {
|
||||||
let matches = app.clone().get_matches();
|
let matches = app.clone().get_matches();
|
||||||
|
|
||||||
dotenv::dotenv().ok();
|
dotenv::dotenv().ok();
|
||||||
let conn = Conn::establish(DATABASE_URL.as_str());
|
let conn = Conn::establish(CONFIG.database_url.as_str());
|
||||||
|
|
||||||
match matches.subcommand() {
|
match matches.subcommand() {
|
||||||
("instance", Some(args)) => {
|
("instance", Some(args)) => {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use clap::{App, Arg, ArgMatches, SubCommand};
|
use clap::{App, Arg, ArgMatches, SubCommand};
|
||||||
use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl};
|
use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl};
|
||||||
|
|
||||||
use plume_models::{posts::Post, schema::posts, search::Searcher, Connection};
|
use plume_models::{posts::Post, schema::posts, search::Searcher, Connection, CONFIG};
|
||||||
use std::fs::{read_dir, remove_file};
|
use std::fs::{read_dir, remove_file};
|
||||||
use std::io::ErrorKind;
|
use std::io::ErrorKind;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
@ -64,9 +64,9 @@ pub fn run<'a>(args: &ArgMatches<'a>, conn: &Connection) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init<'a>(args: &ArgMatches<'a>, conn: &Connection) {
|
fn init<'a>(args: &ArgMatches<'a>, conn: &Connection) {
|
||||||
let path = args.value_of("path").unwrap_or(".");
|
let path = args.value_of("path").map(|p| Path::new(p).join("search_index"))
|
||||||
|
.unwrap_or_else(|| Path::new(&CONFIG.search_index).to_path_buf());
|
||||||
let force = args.is_present("force");
|
let force = args.is_present("force");
|
||||||
let path = Path::new(path).join("search_index");
|
|
||||||
|
|
||||||
let can_do = match read_dir(path.clone()) {
|
let can_do = match read_dir(path.clone()) {
|
||||||
// try to read the directory specified
|
// try to read the directory specified
|
||||||
|
|
|
@ -26,7 +26,7 @@ use safe_string::SafeString;
|
||||||
use schema::blogs;
|
use schema::blogs;
|
||||||
use search::Searcher;
|
use search::Searcher;
|
||||||
use users::User;
|
use users::User;
|
||||||
use {Connection, Error, Result, BASE_URL, USE_HTTPS};
|
use {Connection, Error, Result, CONFIG};
|
||||||
|
|
||||||
pub type CustomGroup = CustomObject<ApSignature, Group>;
|
pub type CustomGroup = CustomObject<ApSignature, Group>;
|
||||||
|
|
||||||
|
@ -142,7 +142,7 @@ impl Blog {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fetch_from_webfinger(conn: &Connection, acct: &str) -> Result<Blog> {
|
fn fetch_from_webfinger(conn: &Connection, acct: &str) -> Result<Blog> {
|
||||||
resolve(acct.to_owned(), *USE_HTTPS)?
|
resolve(acct.to_owned(), true)?
|
||||||
.links
|
.links
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.find(|l| l.mime_type == Some(String::from("application/activity+json")))
|
.find(|l| l.mime_type == Some(String::from("application/activity+json")))
|
||||||
|
@ -288,7 +288,7 @@ impl Blog {
|
||||||
Blog::find_by_ap_url(conn, url).or_else(|_| {
|
Blog::find_by_ap_url(conn, url).or_else(|_| {
|
||||||
// The requested blog was not in the DB
|
// The requested blog was not in the DB
|
||||||
// We try to fetch it if it is remote
|
// We try to fetch it if it is remote
|
||||||
if Url::parse(url)?.host_str()? != BASE_URL.as_str() {
|
if Url::parse(url)?.host_str()? != CONFIG.base_url.as_str() {
|
||||||
Blog::fetch_from_url(conn, url)
|
Blog::fetch_from_url(conn, url)
|
||||||
} else {
|
} else {
|
||||||
Err(Error::NotFound)
|
Err(Error::NotFound)
|
||||||
|
|
|
@ -0,0 +1,65 @@
|
||||||
|
use std::env::var;
|
||||||
|
use rocket::Config as RocketConfig;
|
||||||
|
use rocket::config::Limits;
|
||||||
|
|
||||||
|
#[cfg(not(test))]
|
||||||
|
const DB_NAME: &str = "plume";
|
||||||
|
#[cfg(test)]
|
||||||
|
const DB_NAME: &str = "plume_tests";
|
||||||
|
|
||||||
|
pub struct Config {
|
||||||
|
pub base_url: String,
|
||||||
|
pub db_name: &'static str,
|
||||||
|
pub database_url: String,
|
||||||
|
pub search_index: String,
|
||||||
|
pub rocket: Result<RocketConfig, RocketError>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug,Clone)]
|
||||||
|
pub enum RocketError {
|
||||||
|
InvalidEnv,
|
||||||
|
InvalidAddress,
|
||||||
|
InvalidSecretKey,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_rocket_config() -> Result<RocketConfig, RocketError> {
|
||||||
|
let mut c = RocketConfig::active().map_err(|_| RocketError::InvalidEnv)?;
|
||||||
|
|
||||||
|
let address = var("ROCKET_ADDRESS").unwrap_or_else(|_| "localhost".to_owned());
|
||||||
|
let port = var("ROCKET_PORT").ok().map(|s| s.parse::<u16>().unwrap()).unwrap_or(7878);
|
||||||
|
let secret_key = var("ROCKET_SECRET_KEY").map_err(|_| RocketError::InvalidSecretKey)?;
|
||||||
|
let form_size = var("FORM_SIZE").unwrap_or_else(|_| "32".to_owned()).parse::<u64>().unwrap();
|
||||||
|
let activity_size = var("ACTIVITY_SIZE").unwrap_or_else(|_| "1024".to_owned()).parse::<u64>().unwrap();
|
||||||
|
|
||||||
|
c.set_address(address).map_err(|_| RocketError::InvalidAddress)?;
|
||||||
|
c.set_port(port);
|
||||||
|
c.set_secret_key(secret_key).map_err(|_| RocketError::InvalidSecretKey) ?;
|
||||||
|
|
||||||
|
c.set_limits(Limits::new()
|
||||||
|
.limit("forms", form_size * 1024)
|
||||||
|
.limit("json", activity_size * 1024));
|
||||||
|
|
||||||
|
Ok(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref CONFIG: Config = Config {
|
||||||
|
base_url: var("BASE_URL").unwrap_or_else(|_| format!(
|
||||||
|
"127.0.0.1:{}",
|
||||||
|
var("ROCKET_PORT").unwrap_or_else(|_| "8000".to_owned()
|
||||||
|
))),
|
||||||
|
db_name: DB_NAME,
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
|
database_url: var("DATABASE_URL").unwrap_or_else(|_| format!(
|
||||||
|
"postgres://plume:plume@localhost/{}",
|
||||||
|
DB_NAME
|
||||||
|
)),
|
||||||
|
#[cfg(feature = "sqlite")]
|
||||||
|
database_url: var("DATABASE_URL").unwrap_or_else(|_| format!(
|
||||||
|
"{}.sqlite",
|
||||||
|
DB_NAME
|
||||||
|
)),
|
||||||
|
search_index: var("SEARCH_INDEX").unwrap_or_else(|_| "search_index".to_owned()),
|
||||||
|
rocket: get_rocket_config()
|
||||||
|
};
|
||||||
|
}
|
|
@ -14,7 +14,7 @@ use plume_common::activity_pub::{
|
||||||
};
|
};
|
||||||
use schema::follows;
|
use schema::follows;
|
||||||
use users::User;
|
use users::User;
|
||||||
use {ap_url, Connection, Error, Result, BASE_URL};
|
use {ap_url, Connection, Error, Result, CONFIG};
|
||||||
|
|
||||||
#[derive(Clone, Queryable, Identifiable, Associations, AsChangeset)]
|
#[derive(Clone, Queryable, Identifiable, Associations, AsChangeset)]
|
||||||
#[belongs_to(User, foreign_key = "following_id")]
|
#[belongs_to(User, foreign_key = "following_id")]
|
||||||
|
@ -36,7 +36,7 @@ pub struct NewFollow {
|
||||||
impl Follow {
|
impl Follow {
|
||||||
insert!(follows, NewFollow, |inserted, conn| {
|
insert!(follows, NewFollow, |inserted, conn| {
|
||||||
if inserted.ap_url.is_empty() {
|
if inserted.ap_url.is_empty() {
|
||||||
inserted.ap_url = ap_url(&format!("{}/follows/{}", *BASE_URL, inserted.id));
|
inserted.ap_url = ap_url(&format!("{}/follows/{}", CONFIG.base_url, inserted.id));
|
||||||
inserted.save_changes(conn).map_err(Error::from)
|
inserted.save_changes(conn).map_err(Error::from)
|
||||||
} else {
|
} else {
|
||||||
Ok(inserted)
|
Ok(inserted)
|
||||||
|
@ -88,7 +88,7 @@ impl Follow {
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut accept = Accept::default();
|
let mut accept = Accept::default();
|
||||||
let accept_id = ap_url(&format!("{}/follow/{}/accept", BASE_URL.as_str(), &res.id));
|
let accept_id = ap_url(&format!("{}/follow/{}/accept", CONFIG.base_url.as_str(), &res.id));
|
||||||
accept.object_props.set_id_string(accept_id)?;
|
accept.object_props.set_id_string(accept_id)?;
|
||||||
accept.object_props.set_to_link(from.clone().into_id())?;
|
accept.object_props.set_to_link(from.clone().into_id())?;
|
||||||
accept.object_props.set_cc_link_vec::<Id>(vec![])?;
|
accept.object_props.set_cc_link_vec::<Id>(vec![])?;
|
||||||
|
@ -204,7 +204,7 @@ mod tests {
|
||||||
.expect("Couldn't insert new follow");
|
.expect("Couldn't insert new follow");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
follow.ap_url,
|
follow.ap_url,
|
||||||
format!("https://{}/follows/{}", *BASE_URL, follow.id)
|
format!("https://{}/follows/{}", CONFIG.base_url, follow.id)
|
||||||
);
|
);
|
||||||
|
|
||||||
let follow = Follow::insert(
|
let follow = Follow::insert(
|
||||||
|
|
|
@ -33,8 +33,6 @@ extern crate whatlang;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate diesel_migrations;
|
extern crate diesel_migrations;
|
||||||
|
|
||||||
use std::env;
|
|
||||||
|
|
||||||
#[cfg(not(any(feature = "sqlite", feature = "postgres")))]
|
#[cfg(not(any(feature = "sqlite", feature = "postgres")))]
|
||||||
compile_error!("Either feature \"sqlite\" or \"postgres\" must be enabled for this crate.");
|
compile_error!("Either feature \"sqlite\" or \"postgres\" must be enabled for this crate.");
|
||||||
#[cfg(all(feature = "sqlite", feature = "postgres"))]
|
#[cfg(all(feature = "sqlite", feature = "postgres"))]
|
||||||
|
@ -277,34 +275,11 @@ macro_rules! last {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
mod config;
|
||||||
pub static ref BASE_URL: String = env::var("BASE_URL").unwrap_or_else(|_| format!(
|
pub use config::CONFIG;
|
||||||
"127.0.0.1:{}",
|
|
||||||
env::var("ROCKET_PORT").unwrap_or_else(|_| String::from("8000"))
|
|
||||||
));
|
|
||||||
pub static ref USE_HTTPS: bool = env::var("USE_HTTPS").map(|val| val == "1").unwrap_or(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
static DB_NAME: &str = "plume";
|
|
||||||
#[cfg(test)]
|
|
||||||
static DB_NAME: &str = "plume_tests";
|
|
||||||
|
|
||||||
#[cfg(all(feature = "postgres", not(feature = "sqlite")))]
|
|
||||||
lazy_static! {
|
|
||||||
pub static ref DATABASE_URL: String = env::var("DATABASE_URL")
|
|
||||||
.unwrap_or_else(|_| format!("postgres://plume:plume@localhost/{}", DB_NAME));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(all(feature = "sqlite", not(feature = "postgres")))]
|
|
||||||
lazy_static! {
|
|
||||||
pub static ref DATABASE_URL: String =
|
|
||||||
env::var("DATABASE_URL").unwrap_or_else(|_| format!("{}.sqlite", DB_NAME));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ap_url(url: &str) -> String {
|
pub fn ap_url(url: &str) -> String {
|
||||||
let scheme = if *USE_HTTPS { "https" } else { "http" };
|
format!("https://{}", url)
|
||||||
format!("{}://{}", scheme, url)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -312,7 +287,7 @@ pub fn ap_url(url: &str) -> String {
|
||||||
mod tests {
|
mod tests {
|
||||||
use diesel::{dsl::sql_query, Connection, RunQueryDsl};
|
use diesel::{dsl::sql_query, Connection, RunQueryDsl};
|
||||||
use Connection as Conn;
|
use Connection as Conn;
|
||||||
use DATABASE_URL;
|
use CONFIG;
|
||||||
|
|
||||||
#[cfg(feature = "sqlite")]
|
#[cfg(feature = "sqlite")]
|
||||||
embed_migrations!("../migrations/sqlite");
|
embed_migrations!("../migrations/sqlite");
|
||||||
|
@ -333,7 +308,7 @@ mod tests {
|
||||||
|
|
||||||
pub fn db() -> Conn {
|
pub fn db() -> Conn {
|
||||||
let conn =
|
let conn =
|
||||||
Conn::establish(&*DATABASE_URL.as_str()).expect("Couldn't connect to the database");
|
Conn::establish(CONFIG.database_url.as_str()).expect("Couldn't connect to the database");
|
||||||
embedded_migrations::run(&conn).expect("Couldn't run migrations");
|
embedded_migrations::run(&conn).expect("Couldn't run migrations");
|
||||||
#[cfg(feature = "sqlite")]
|
#[cfg(feature = "sqlite")]
|
||||||
sql_query("PRAGMA foreign_keys = on;")
|
sql_query("PRAGMA foreign_keys = on;")
|
||||||
|
|
|
@ -31,7 +31,7 @@ use schema::posts;
|
||||||
use search::Searcher;
|
use search::Searcher;
|
||||||
use tags::*;
|
use tags::*;
|
||||||
use users::User;
|
use users::User;
|
||||||
use {ap_url, ApiResult, Connection, Error, Result, BASE_URL};
|
use {ap_url, ApiResult, Connection, Error, Result, CONFIG};
|
||||||
|
|
||||||
pub type LicensedArticle = CustomObject<Licensed, Article>;
|
pub type LicensedArticle = CustomObject<Licensed, Article>;
|
||||||
|
|
||||||
|
@ -354,7 +354,7 @@ impl Post {
|
||||||
if post.ap_url.is_empty() {
|
if post.ap_url.is_empty() {
|
||||||
post.ap_url = ap_url(&format!(
|
post.ap_url = ap_url(&format!(
|
||||||
"{}/~/{}/{}/",
|
"{}/~/{}/{}/",
|
||||||
*BASE_URL,
|
CONFIG.base_url,
|
||||||
post.get_blog(conn)?.fqn,
|
post.get_blog(conn)?.fqn,
|
||||||
post.slug
|
post.slug
|
||||||
));
|
));
|
||||||
|
|
|
@ -43,7 +43,7 @@ use posts::Post;
|
||||||
use safe_string::SafeString;
|
use safe_string::SafeString;
|
||||||
use schema::users;
|
use schema::users;
|
||||||
use search::Searcher;
|
use search::Searcher;
|
||||||
use {ap_url, Connection, Error, Result, BASE_URL, USE_HTTPS};
|
use {ap_url, Connection, Error, Result, CONFIG};
|
||||||
|
|
||||||
pub type CustomPerson = CustomObject<ApSignature, Person>;
|
pub type CustomPerson = CustomObject<ApSignature, Person>;
|
||||||
|
|
||||||
|
@ -239,7 +239,7 @@ impl User {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fetch_from_webfinger(conn: &Connection, acct: &str) -> Result<User> {
|
fn fetch_from_webfinger(conn: &Connection, acct: &str) -> Result<User> {
|
||||||
let link = resolve(acct.to_owned(), *USE_HTTPS)?
|
let link = resolve(acct.to_owned(), true)?
|
||||||
.links
|
.links
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.find(|l| l.mime_type == Some(String::from("application/activity+json")))
|
.find(|l| l.mime_type == Some(String::from("application/activity+json")))
|
||||||
|
@ -683,7 +683,7 @@ impl User {
|
||||||
.set_followers_string(self.followers_endpoint.clone())?;
|
.set_followers_string(self.followers_endpoint.clone())?;
|
||||||
|
|
||||||
let mut endpoints = Endpoint::default();
|
let mut endpoints = Endpoint::default();
|
||||||
endpoints.set_shared_inbox_string(ap_url(&format!("{}/inbox/", BASE_URL.as_str())))?;
|
endpoints.set_shared_inbox_string(ap_url(&format!("{}/inbox/", CONFIG.base_url.as_str())))?;
|
||||||
actor.ap_actor_props.set_endpoints_endpoint(endpoints)?;
|
actor.ap_actor_props.set_endpoints_endpoint(endpoints)?;
|
||||||
|
|
||||||
let mut public_key = PublicKey::default();
|
let mut public_key = PublicKey::default();
|
||||||
|
@ -749,7 +749,7 @@ impl User {
|
||||||
User::find_by_ap_url(conn, url).or_else(|_| {
|
User::find_by_ap_url(conn, url).or_else(|_| {
|
||||||
// The requested user was not in the DB
|
// The requested user was not in the DB
|
||||||
// We try to fetch it if it is remote
|
// We try to fetch it if it is remote
|
||||||
if Url::parse(&url)?.host_str()? != BASE_URL.as_str() {
|
if Url::parse(&url)?.host_str()? != CONFIG.base_url.as_str() {
|
||||||
User::fetch_from_url(conn, url)
|
User::fetch_from_url(conn, url)
|
||||||
} else {
|
} else {
|
||||||
Err(Error::NotFound)
|
Err(Error::NotFound)
|
||||||
|
|
|
@ -5,7 +5,7 @@ extern crate diesel_migrations;
|
||||||
extern crate plume_models;
|
extern crate plume_models;
|
||||||
|
|
||||||
use diesel::Connection;
|
use diesel::Connection;
|
||||||
use plume_models::{Connection as Conn, DATABASE_URL};
|
use plume_models::{Connection as Conn, CONFIG};
|
||||||
|
|
||||||
#[cfg(feature = "sqlite")]
|
#[cfg(feature = "sqlite")]
|
||||||
embed_migrations!("../migrations/sqlite");
|
embed_migrations!("../migrations/sqlite");
|
||||||
|
@ -14,7 +14,7 @@ embed_migrations!("../migrations/sqlite");
|
||||||
embed_migrations!("../migrations/postgres");
|
embed_migrations!("../migrations/postgres");
|
||||||
|
|
||||||
fn db() -> Conn {
|
fn db() -> Conn {
|
||||||
let conn = Conn::establish(&*DATABASE_URL.as_str()).expect("Couldn't connect to the database");
|
let conn = Conn::establish(CONFIG.database_url.as_str()).expect("Couldn't connect to the database");
|
||||||
embedded_migrations::run(&conn).expect("Couldn't run migrations");
|
embedded_migrations::run(&conn).expect("Couldn't run migrations");
|
||||||
conn
|
conn
|
||||||
}
|
}
|
||||||
|
|
52
src/main.rs
52
src/main.rs
|
@ -42,12 +42,11 @@ use diesel::r2d2::ConnectionManager;
|
||||||
use plume_models::{
|
use plume_models::{
|
||||||
db_conn::{DbPool, PragmaForeignKey},
|
db_conn::{DbPool, PragmaForeignKey},
|
||||||
search::{Searcher as UnmanagedSearcher, SearcherError},
|
search::{Searcher as UnmanagedSearcher, SearcherError},
|
||||||
Connection, Error, DATABASE_URL,
|
Connection, Error, CONFIG,
|
||||||
};
|
};
|
||||||
use rocket::{config::Limits, Config, State};
|
use rocket::State;
|
||||||
use rocket_csrf::CsrfFairingBuilder;
|
use rocket_csrf::CsrfFairingBuilder;
|
||||||
use scheduled_thread_pool::ScheduledThreadPool;
|
use scheduled_thread_pool::ScheduledThreadPool;
|
||||||
use std::env;
|
|
||||||
use std::process::exit;
|
use std::process::exit;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
@ -72,7 +71,7 @@ type Searcher<'a> = State<'a, Arc<UnmanagedSearcher>>;
|
||||||
fn init_pool() -> Option<DbPool> {
|
fn init_pool() -> Option<DbPool> {
|
||||||
dotenv::dotenv().ok();
|
dotenv::dotenv().ok();
|
||||||
|
|
||||||
let manager = ConnectionManager::<Connection>::new(DATABASE_URL.as_str());
|
let manager = ConnectionManager::<Connection>::new(CONFIG.database_url.as_str());
|
||||||
DbPool::builder()
|
DbPool::builder()
|
||||||
.connection_customizer(Box::new(PragmaForeignKey))
|
.connection_customizer(Box::new(PragmaForeignKey))
|
||||||
.build(manager)
|
.build(manager)
|
||||||
|
@ -84,15 +83,28 @@ fn main() {
|
||||||
let workpool = ScheduledThreadPool::with_name("worker {}", num_cpus::get());
|
let workpool = ScheduledThreadPool::with_name("worker {}", num_cpus::get());
|
||||||
// we want a fast exit here, so
|
// we want a fast exit here, so
|
||||||
#[allow(clippy::match_wild_err_arm)]
|
#[allow(clippy::match_wild_err_arm)]
|
||||||
let searcher = match UnmanagedSearcher::open(&"search_index") {
|
let searcher = match UnmanagedSearcher::open(&CONFIG.search_index) {
|
||||||
Err(Error::Search(e)) => match e {
|
Err(Error::Search(e)) => match e {
|
||||||
SearcherError::WriteLockAcquisitionError => panic!(
|
SearcherError::WriteLockAcquisitionError => panic!(
|
||||||
r#"Your search index is locked. Plume can't start. To fix this issue
|
r#"
|
||||||
|
Your search index is locked. Plume can't start. To fix this issue
|
||||||
make sure no other Plume instance is started, and run:
|
make sure no other Plume instance is started, and run:
|
||||||
|
|
||||||
plm search unlock
|
plm search unlock
|
||||||
|
|
||||||
Then try to restart Plume.
|
Then try to restart Plume.
|
||||||
|
"#
|
||||||
|
),
|
||||||
|
SearcherError::IndexOpeningError => panic!(
|
||||||
|
r#"
|
||||||
|
Plume was unable to open the search index. If you created the index
|
||||||
|
before, make sure to run Plume in the directory it was created, or
|
||||||
|
to set SEARCH_INDEX accordingly. If you did not create the search
|
||||||
|
index, run this command:
|
||||||
|
|
||||||
|
plm search init
|
||||||
|
|
||||||
|
Then try to restart Plume
|
||||||
"#
|
"#
|
||||||
),
|
),
|
||||||
e => Err(e).unwrap(),
|
e => Err(e).unwrap(),
|
||||||
|
@ -115,38 +127,14 @@ Then try to restart Plume.
|
||||||
})
|
})
|
||||||
.expect("Error setting Ctrl-c handler");
|
.expect("Error setting Ctrl-c handler");
|
||||||
|
|
||||||
let mut config = Config::active().unwrap();
|
|
||||||
config
|
|
||||||
.set_address(env::var("ROCKET_ADDRESS").unwrap_or_else(|_| "localhost".to_owned()))
|
|
||||||
.unwrap();
|
|
||||||
config.set_port(
|
|
||||||
env::var("ROCKET_PORT")
|
|
||||||
.ok()
|
|
||||||
.map(|s| s.parse::<u16>().unwrap())
|
|
||||||
.unwrap_or(7878),
|
|
||||||
);
|
|
||||||
let _ = env::var("ROCKET_SECRET_KEY").map(|k| config.set_secret_key(k).unwrap());
|
|
||||||
let form_size = &env::var("FORM_SIZE")
|
|
||||||
.unwrap_or_else(|_| "32".to_owned())
|
|
||||||
.parse::<u64>()
|
|
||||||
.unwrap();
|
|
||||||
let activity_size = &env::var("ACTIVITY_SIZE")
|
|
||||||
.unwrap_or_else(|_| "1024".to_owned())
|
|
||||||
.parse::<u64>()
|
|
||||||
.unwrap();
|
|
||||||
config.set_limits(
|
|
||||||
Limits::new()
|
|
||||||
.limit("forms", form_size * 1024)
|
|
||||||
.limit("json", activity_size * 1024),
|
|
||||||
);
|
|
||||||
|
|
||||||
let mail = mail::init();
|
let mail = mail::init();
|
||||||
if mail.is_none() && config.environment.is_prod() {
|
if mail.is_none() && CONFIG.rocket.as_ref().unwrap().environment.is_prod() {
|
||||||
println!("Warning: the email server is not configured (or not completely).");
|
println!("Warning: the email server is not configured (or not completely).");
|
||||||
println!("Please refer to the documentation to see how to configure it.");
|
println!("Please refer to the documentation to see how to configure it.");
|
||||||
}
|
}
|
||||||
|
|
||||||
rocket::custom(config)
|
rocket::custom(CONFIG.rocket.clone().unwrap())
|
||||||
.mount(
|
.mount(
|
||||||
"/",
|
"/",
|
||||||
routes![
|
routes![
|
||||||
|
|
|
@ -19,7 +19,7 @@ use mail::{build_mail, Mailer};
|
||||||
use plume_models::{
|
use plume_models::{
|
||||||
db_conn::DbConn,
|
db_conn::DbConn,
|
||||||
users::{User, AUTH_COOKIE},
|
users::{User, AUTH_COOKIE},
|
||||||
Error, BASE_URL,
|
Error, CONFIG,
|
||||||
};
|
};
|
||||||
use routes::errors::ErrorPage;
|
use routes::errors::ErrorPage;
|
||||||
|
|
||||||
|
@ -175,7 +175,7 @@ pub fn password_reset_request(
|
||||||
creation_date: Instant::now(),
|
creation_date: Instant::now(),
|
||||||
});
|
});
|
||||||
|
|
||||||
let link = format!("https://{}/password-reset/{}", *BASE_URL, id);
|
let link = format!("https://{}/password-reset/{}", CONFIG.base_url, id);
|
||||||
if let Some(message) = build_mail(
|
if let Some(message) = build_mail(
|
||||||
form.email.clone(),
|
form.email.clone(),
|
||||||
i18n!(intl.catalog, "Password reset"),
|
i18n!(intl.catalog, "Password reset"),
|
||||||
|
|
|
@ -3,7 +3,7 @@ use rocket::response::Content;
|
||||||
use serde_json;
|
use serde_json;
|
||||||
use webfinger::*;
|
use webfinger::*;
|
||||||
|
|
||||||
use plume_models::{ap_url, blogs::Blog, db_conn::DbConn, users::User, BASE_URL};
|
use plume_models::{ap_url, blogs::Blog, db_conn::DbConn, users::User, CONFIG};
|
||||||
|
|
||||||
#[get("/.well-known/nodeinfo")]
|
#[get("/.well-known/nodeinfo")]
|
||||||
pub fn nodeinfo() -> Content<String> {
|
pub fn nodeinfo() -> Content<String> {
|
||||||
|
@ -13,11 +13,11 @@ pub fn nodeinfo() -> Content<String> {
|
||||||
"links": [
|
"links": [
|
||||||
{
|
{
|
||||||
"rel": "http://nodeinfo.diaspora.software/ns/schema/2.0",
|
"rel": "http://nodeinfo.diaspora.software/ns/schema/2.0",
|
||||||
"href": ap_url(&format!("{domain}/nodeinfo/2.0", domain = BASE_URL.as_str()))
|
"href": ap_url(&format!("{domain}/nodeinfo/2.0", domain = CONFIG.base_url.as_str()))
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"rel": "http://nodeinfo.diaspora.software/ns/schema/2.1",
|
"rel": "http://nodeinfo.diaspora.software/ns/schema/2.1",
|
||||||
"href": ap_url(&format!("{domain}/nodeinfo/2.1", domain = BASE_URL.as_str()))
|
"href": ap_url(&format!("{domain}/nodeinfo/2.1", domain = CONFIG.base_url.as_str()))
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
@ -36,7 +36,7 @@ pub fn host_meta() -> String {
|
||||||
"#,
|
"#,
|
||||||
url = ap_url(&format!(
|
url = ap_url(&format!(
|
||||||
"{domain}/.well-known/webfinger?resource={{uri}}",
|
"{domain}/.well-known/webfinger?resource={{uri}}",
|
||||||
domain = BASE_URL.as_str()
|
domain = CONFIG.base_url.as_str()
|
||||||
))
|
))
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,7 @@ struct WebfingerResolver;
|
||||||
|
|
||||||
impl Resolver<DbConn> for WebfingerResolver {
|
impl Resolver<DbConn> for WebfingerResolver {
|
||||||
fn instance_domain<'a>() -> &'a str {
|
fn instance_domain<'a>() -> &'a str {
|
||||||
BASE_URL.as_str()
|
CONFIG.base_url.as_str()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find(acct: String, conn: DbConn) -> Result<Webfinger, ResolverError> {
|
fn find(acct: String, conn: DbConn) -> Result<Webfinger, ResolverError> {
|
||||||
|
|
Loading…
Reference in New Issue