Follow Rust and crates update

This commit is contained in:
Kitaiti Makoto 2021-11-28 07:53:13 +09:00
parent 9187e4dde9
commit ae3344f318
35 changed files with 208 additions and 160 deletions

View file

@ -41,9 +41,9 @@ fn main() {
.expect("compile templates"); .expect("compile templates");
compile_themes().expect("Theme compilation error"); compile_themes().expect("Theme compilation error");
recursive_copy(&Path::new("assets").join("icons"), &Path::new("static")) recursive_copy(&Path::new("assets").join("icons"), Path::new("static"))
.expect("Couldn't copy icons"); .expect("Couldn't copy icons");
recursive_copy(&Path::new("assets").join("images"), &Path::new("static")) recursive_copy(&Path::new("assets").join("images"), Path::new("static"))
.expect("Couldn't copy images"); .expect("Couldn't copy images");
create_dir_all(&Path::new("static").join("media")).expect("Couldn't init media directory"); create_dir_all(&Path::new("static").join("media")).expect("Couldn't init media directory");
@ -97,12 +97,12 @@ fn compile_theme(path: &Path, out_dir: &Path) -> std::io::Result<()> {
.components() .components()
.skip_while(|c| *c != Component::Normal(OsStr::new("themes"))) .skip_while(|c| *c != Component::Normal(OsStr::new("themes")))
.skip(1) .skip(1)
.filter_map(|c| { .map(|c| {
c.as_os_str() c.as_os_str()
.to_str() .to_str()
.unwrap_or_default() .unwrap_or_default()
.splitn(2, '.') .split_once('.')
.next() .map_or(c.as_os_str().to_str().unwrap_or_default(), |x| x.0)
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join("-"); .join("-");

View file

@ -207,7 +207,7 @@ where
}; };
// Handle the activity // Handle the activity
match obj.activity(ctx, actor, &act_id) { match obj.activity(ctx, actor, act_id) {
Ok(res) => Inbox::Handled(res.into()), Ok(res) => Inbox::Handled(res.into()),
Err(e) => Inbox::Failed(e), Err(e) => Inbox::Failed(e),
} }

View file

@ -145,7 +145,7 @@ where
warn!("Inbox doesn't have host: {:?}", &inbox); warn!("Inbox doesn't have host: {:?}", &inbox);
continue; continue;
}; };
let host_header_value = HeaderValue::from_str(&url.host_str().expect("Unreachable")); let host_header_value = HeaderValue::from_str(url.host_str().expect("Unreachable"));
if host_header_value.is_err() { if host_header_value.is_err() {
warn!("Header value is invalid: {:?}", url.host_str()); warn!("Header value is invalid: {:?}", url.host_str());
continue; continue;

View file

@ -182,7 +182,7 @@ pub fn verify_http_headers<S: Signer + ::std::fmt::Debug>(
} }
let digest = all_headers.get_one("digest").unwrap_or(""); let digest = all_headers.get_one("digest").unwrap_or("");
let digest = request::Digest::from_header(digest); let digest = request::Digest::from_header(digest);
if !digest.map(|d| d.verify_header(&data)).unwrap_or(false) { if !digest.map(|d| d.verify_header(data)).unwrap_or(false) {
// signature was valid, but body content does not match its digest // signature was valid, but body content does not match its digest
return SignatureValidity::Invalid; return SignatureValidity::Invalid;
} }

View file

@ -141,13 +141,13 @@ fn highlight_code<'a>(
unreachable!(); unreachable!();
}; };
let syntax_set = SyntaxSet::load_defaults_newlines(); let syntax_set = SyntaxSet::load_defaults_newlines();
let syntax = syntax_set.find_syntax_by_token(&lang).unwrap_or_else(|| { let syntax = syntax_set.find_syntax_by_token(lang).unwrap_or_else(|| {
syntax_set syntax_set
.find_syntax_by_name(&lang) .find_syntax_by_name(lang)
.unwrap_or_else(|| syntax_set.find_syntax_plain_text()) .unwrap_or_else(|| syntax_set.find_syntax_plain_text())
}); });
let mut html = ClassedHTMLGenerator::new_with_class_style( let mut html = ClassedHTMLGenerator::new_with_class_style(
&syntax, syntax,
&syntax_set, &syntax_set,
ClassStyle::Spaced, ClassStyle::Spaced,
); );
@ -334,16 +334,15 @@ pub fn md_to_html<'a>(
text_acc.push(c) text_acc.push(c)
} }
let mention = text_acc; let mention = text_acc;
let short_mention = mention.splitn(1, '@').next().unwrap_or("");
let link = Tag::Link( let link = Tag::Link(
LinkType::Inline, LinkType::Inline,
format!("{}@/{}/", base_url, &mention).into(), format!("{}@/{}/", base_url, &mention).into(),
short_mention.to_owned().into(), mention.clone().into(),
); );
mentions.push(mention.clone()); mentions.push(mention.clone());
events.push(Event::Start(link.clone())); events.push(Event::Start(link.clone()));
events.push(Event::Text(format!("@{}", &short_mention).into())); events.push(Event::Text(format!("@{}", &mention).into()));
events.push(Event::End(link)); events.push(Event::End(link));
( (

View file

@ -58,7 +58,7 @@ pub fn import_migrations(input: TokenStream) -> TokenStream {
(name, up_sql, down_sql) (name, up_sql, down_sql)
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let migrations_name = migrations.iter().map(|m| &m.0).collect::<Vec<_>>(); let migrations_name = migrations.iter().map(|m| &m.0);
let migrations_up = migrations let migrations_up = migrations
.iter() .iter()
.map(|m| m.1.as_str()) .map(|m| m.1.as_str())
@ -103,7 +103,7 @@ fn file_to_migration(file: &str) -> TokenStream2 {
acc.push('\n'); acc.push('\n');
} }
} else if let Some(acc_str) = line.strip_prefix("--#!") { } else if let Some(acc_str) = line.strip_prefix("--#!") {
acc.push_str(&acc_str); acc.push_str(acc_str);
acc.push('\n'); acc.push('\n');
} else if line.starts_with("--") { } else if line.starts_with("--") {
continue; continue;

View file

@ -86,14 +86,18 @@ impl<'a, 'r> FromRequest<'a, 'r> for ApiToken {
} }
let mut parsed_header = headers[0].split(' '); let mut parsed_header = headers[0].split(' ');
let auth_type = parsed_header.next().map_or_else( let auth_type = parsed_header
|| Outcome::Failure((Status::BadRequest, TokenError::NoType)), .next()
Outcome::Success, .map_or_else::<rocket::Outcome<&str, _, ()>, _, _>(
)?; || Outcome::Failure((Status::BadRequest, TokenError::NoType)),
let val = parsed_header.next().map_or_else( Outcome::Success,
|| Outcome::Failure((Status::BadRequest, TokenError::NoValue)), )?;
Outcome::Success, let val = parsed_header
)?; .next()
.map_or_else::<rocket::Outcome<&str, _, ()>, _, _>(
|| Outcome::Failure((Status::BadRequest, TokenError::NoValue)),
Outcome::Success,
)?;
if auth_type == "Bearer" { if auth_type == "Bearer" {
let conn = request let conn = request

View file

@ -28,7 +28,7 @@ impl BlocklistedEmail {
pub fn delete_entries(conn: &Connection, ids: Vec<i32>) -> Result<bool> { pub fn delete_entries(conn: &Connection, ids: Vec<i32>) -> Result<bool> {
use diesel::delete; use diesel::delete;
for i in ids { for i in ids {
let be: BlocklistedEmail = BlocklistedEmail::find_by_id(&conn, i)?; let be: BlocklistedEmail = BlocklistedEmail::find_by_id(conn, i)?;
delete(&be).execute(conn)?; delete(&be).execute(conn)?;
} }
Ok(true) Ok(true)

View file

@ -149,7 +149,15 @@ impl Blog {
.into_iter() .into_iter()
.find(|l| l.mime_type == Some(String::from("application/activity+json"))) .find(|l| l.mime_type == Some(String::from("application/activity+json")))
.ok_or(Error::Webfinger) .ok_or(Error::Webfinger)
.and_then(|l| Blog::from_id(conn, &l.href?, None, CONFIG.proxy()).map_err(|(_, e)| e)) .and_then(|l| {
Blog::from_id(
conn,
&l.href.ok_or(Error::MissingApProperty)?,
None,
CONFIG.proxy(),
)
.map_err(|(_, e)| e)
})
} }
pub fn to_activity(&self, conn: &Connection) -> Result<CustomGroup> { pub fn to_activity(&self, conn: &Connection) -> Result<CustomGroup> {
@ -236,7 +244,7 @@ impl Blog {
(min, max): (i32, i32), (min, max): (i32, i32),
) -> Result<ActivityStream<OrderedCollectionPage>> { ) -> Result<ActivityStream<OrderedCollectionPage>> {
let mut coll = OrderedCollectionPage::default(); let mut coll = OrderedCollectionPage::default();
let acts = self.get_activity_page(&conn, (min, max)); let acts = self.get_activity_page(conn, (min, max));
//This still doesn't do anything because the outbox //This still doesn't do anything because the outbox
//doesn't do anything yet //doesn't do anything yet
coll.collection_page_props.set_next_link(Id::new(&format!( coll.collection_page_props.set_next_link(Id::new(&format!(
@ -265,7 +273,10 @@ impl Blog {
pub fn get_keypair(&self) -> Result<PKey<Private>> { pub fn get_keypair(&self) -> Result<PKey<Private>> {
PKey::from_rsa(Rsa::private_key_from_pem( PKey::from_rsa(Rsa::private_key_from_pem(
self.private_key.clone()?.as_ref(), self.private_key
.clone()
.ok_or(Error::MissingApProperty)?
.as_ref(),
)?) )?)
.map_err(Error::from) .map_err(Error::from)
} }
@ -318,7 +329,7 @@ impl Blog {
} }
pub fn delete(&self, conn: &Connection) -> Result<()> { pub fn delete(&self, conn: &Connection) -> Result<()> {
for post in Post::get_for_blog(conn, &self)? { for post in Post::get_for_blog(conn, self)? {
post.delete(conn)?; post.delete(conn)?;
} }
diesel::delete(self) diesel::delete(self)
@ -339,12 +350,12 @@ impl FromId<DbConn> for Blog {
type Object = CustomGroup; type Object = CustomGroup;
fn from_db(conn: &DbConn, id: &str) -> Result<Self> { fn from_db(conn: &DbConn, id: &str) -> Result<Self> {
Self::find_by_ap_url(&conn, id) Self::find_by_ap_url(conn, id)
} }
fn from_activity(conn: &DbConn, acct: CustomGroup) -> Result<Self> { fn from_activity(conn: &DbConn, acct: CustomGroup) -> Result<Self> {
let url = Url::parse(&acct.object.object_props.id_string()?)?; let url = Url::parse(&acct.object.object_props.id_string()?)?;
let inst = url.host_str()?; let inst = url.host_str().ok_or(Error::Url)?;
let instance = Instance::find_by_domain(conn, inst).or_else(|_| { let instance = Instance::find_by_domain(conn, inst).or_else(|_| {
Instance::insert( Instance::insert(
conn, conn,
@ -468,7 +479,7 @@ impl sign::Signer for Blog {
let key = PKey::from_rsa(Rsa::public_key_from_pem(self.public_key.as_ref())?)?; let key = PKey::from_rsa(Rsa::public_key_from_pem(self.public_key.as_ref())?)?;
let mut verifier = Verifier::new(MessageDigest::sha256(), &key)?; let mut verifier = Verifier::new(MessageDigest::sha256(), &key)?;
verifier.update(data.as_bytes())?; verifier.update(data.as_bytes())?;
verifier.verify(&signature).map_err(Error::from) verifier.verify(signature).map_err(Error::from)
} }
} }

View file

@ -141,18 +141,20 @@ impl Comment {
} }
pub fn create_activity(&self, conn: &DbConn) -> Result<Create> { pub fn create_activity(&self, conn: &DbConn) -> Result<Create> {
let author = User::get(&conn, self.author_id)?; let author = User::get(conn, self.author_id)?;
let note = self.to_activity(conn)?; let note = self.to_activity(conn)?;
let mut act = Create::default(); let mut act = Create::default();
act.create_props.set_actor_link(author.into_id())?; act.create_props.set_actor_link(author.into_id())?;
act.create_props.set_object_object(note.clone())?; act.create_props.set_object_object(note.clone())?;
act.object_props act.object_props.set_id_string(format!(
.set_id_string(format!("{}/activity", self.ap_url.clone()?,))?; "{}/activity",
self.ap_url.clone().ok_or(Error::MissingApProperty)?,
))?;
act.object_props act.object_props
.set_to_link_vec(note.object_props.to_link_vec::<Id>()?)?; .set_to_link_vec(note.object_props.to_link_vec::<Id>()?)?;
act.object_props act.object_props
.set_cc_link_vec(vec![Id::new(self.get_author(&conn)?.followers_endpoint)])?; .set_cc_link_vec(vec![Id::new(self.get_author(conn)?.followers_endpoint)])?;
Ok(act) Ok(act)
} }
@ -182,7 +184,9 @@ impl Comment {
.set_actor_link(self.get_author(conn)?.into_id())?; .set_actor_link(self.get_author(conn)?.into_id())?;
let mut tombstone = Tombstone::default(); let mut tombstone = Tombstone::default();
tombstone.object_props.set_id_string(self.ap_url.clone()?)?; tombstone
.object_props
.set_id_string(self.ap_url.clone().ok_or(Error::MissingApProperty)?)?;
act.delete_props.set_object_object(tombstone)?; act.delete_props.set_object_object(tombstone)?;
act.object_props act.object_props
@ -204,7 +208,13 @@ impl FromId<DbConn> for Comment {
fn from_activity(conn: &DbConn, note: Note) -> Result<Self> { fn from_activity(conn: &DbConn, note: Note) -> Result<Self> {
let comm = { let comm = {
let previous_url = note.object_props.in_reply_to.as_ref()?.as_str()?; let previous_url = note
.object_props
.in_reply_to
.as_ref()
.ok_or(Error::MissingApProperty)?
.as_str()
.ok_or(Error::MissingApProperty)?;
let previous_comment = Comment::find_by_ap_url(conn, previous_url); let previous_comment = Comment::find_by_ap_url(conn, previous_url);
let is_public = |v: &Option<serde_json::Value>| match v let is_public = |v: &Option<serde_json::Value>| match v
@ -346,7 +356,7 @@ impl AsObject<User, Delete, &DbConn> for Comment {
m.delete(conn)?; m.delete(conn)?;
} }
for n in Notification::find_for_comment(&conn, &self)? { for n in Notification::find_for_comment(conn, &self)? {
n.delete(&**conn)?; n.delete(&**conn)?;
} }

View file

@ -17,7 +17,7 @@ pub struct Config {
pub db_min_idle: Option<u32>, pub db_min_idle: Option<u32>,
pub search_index: String, pub search_index: String,
pub search_tokenizers: SearchTokenizerConfig, pub search_tokenizers: SearchTokenizerConfig,
pub rocket: Result<RocketConfig, RocketError>, pub rocket: Result<RocketConfig, InvalidRocketConfig>,
pub logo: LogoConfig, pub logo: LogoConfig,
pub default_theme: String, pub default_theme: String,
pub media_directory: String, pub media_directory: String,
@ -31,21 +31,21 @@ impl Config {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum RocketError { pub enum InvalidRocketConfig {
InvalidEnv, Env,
InvalidAddress, Address,
InvalidSecretKey, SecretKey,
} }
fn get_rocket_config() -> Result<RocketConfig, RocketError> { fn get_rocket_config() -> Result<RocketConfig, InvalidRocketConfig> {
let mut c = RocketConfig::active().map_err(|_| RocketError::InvalidEnv)?; let mut c = RocketConfig::active().map_err(|_| InvalidRocketConfig::Env)?;
let address = var("ROCKET_ADDRESS").unwrap_or_else(|_| "localhost".to_owned()); let address = var("ROCKET_ADDRESS").unwrap_or_else(|_| "localhost".to_owned());
let port = var("ROCKET_PORT") let port = var("ROCKET_PORT")
.ok() .ok()
.map(|s| s.parse::<u16>().unwrap()) .map(|s| s.parse::<u16>().unwrap())
.unwrap_or(7878); .unwrap_or(7878);
let secret_key = var("ROCKET_SECRET_KEY").map_err(|_| RocketError::InvalidSecretKey)?; let secret_key = var("ROCKET_SECRET_KEY").map_err(|_| InvalidRocketConfig::SecretKey)?;
let form_size = var("FORM_SIZE") let form_size = var("FORM_SIZE")
.unwrap_or_else(|_| "128".to_owned()) .unwrap_or_else(|_| "128".to_owned())
.parse::<u64>() .parse::<u64>()
@ -56,10 +56,10 @@ fn get_rocket_config() -> Result<RocketConfig, RocketError> {
.unwrap(); .unwrap();
c.set_address(address) c.set_address(address)
.map_err(|_| RocketError::InvalidAddress)?; .map_err(|_| InvalidRocketConfig::Address)?;
c.set_port(port); c.set_port(port);
c.set_secret_key(secret_key) c.set_secret_key(secret_key)
.map_err(|_| RocketError::InvalidSecretKey)?; .map_err(|_| InvalidRocketConfig::SecretKey)?;
c.set_limits( c.set_limits(
Limits::new() Limits::new()
@ -155,7 +155,7 @@ impl Default for LogoConfig {
.ok() .ok()
.or_else(|| custom_main.clone()); .or_else(|| custom_main.clone());
let other = if let Some(main) = custom_main.clone() { let other = if let Some(main) = custom_main.clone() {
let ext = |path: &str| match path.rsplitn(2, '.').next() { let ext = |path: &str| match path.rsplit_once('.').map(|x| x.1) {
Some("png") => Some("image/png".to_owned()), Some("png") => Some("image/png".to_owned()),
Some("jpg") | Some("jpeg") => Some("image/jpeg".to_owned()), Some("jpg") | Some("jpeg") => Some("image/jpeg".to_owned()),
Some("svg") => Some("image/svg+xml".to_owned()), Some("svg") => Some("image/svg+xml".to_owned()),

View file

@ -195,7 +195,7 @@ impl AsObject<User, Undo, &DbConn> for Follow {
diesel::delete(&self).execute(&**conn)?; diesel::delete(&self).execute(&**conn)?;
// delete associated notification if any // delete associated notification if any
if let Ok(notif) = Notification::find(&conn, notification_kind::FOLLOW, self.id) { if let Ok(notif) = Notification::find(conn, notification_kind::FOLLOW, self.id) {
diesel::delete(&notif).execute(&**conn)?; diesel::delete(&notif).execute(&**conn)?;
} }

View file

@ -1,4 +1,3 @@
#![feature(try_trait)]
#![feature(never_type)] #![feature(never_type)]
#![feature(proc_macro_hygiene)] #![feature(proc_macro_hygiene)]
#![feature(box_patterns)] #![feature(box_patterns)]
@ -86,12 +85,6 @@ impl From<diesel::result::Error> for Error {
} }
} }
impl From<std::option::NoneError> for Error {
fn from(_: std::option::NoneError) -> Self {
Error::NotFound
}
}
impl From<url::ParseError> for Error { impl From<url::ParseError> for Error {
fn from(_: url::ParseError) -> Self { fn from(_: url::ParseError) -> Self {
Error::Url Error::Url

View file

@ -148,7 +148,7 @@ impl AsObject<User, activity::Undo, &DbConn> for Like {
diesel::delete(&self).execute(&**conn)?; diesel::delete(&self).execute(&**conn)?;
// delete associated notification if any // delete associated notification if any
if let Ok(notif) = Notification::find(&conn, notification_kind::LIKE, self.id) { if let Ok(notif) = Notification::find(conn, notification_kind::LIKE, self.id) {
diesel::delete(&notif).execute(&**conn)?; diesel::delete(&notif).execute(&**conn)?;
} }
Ok(()) Ok(())

View file

@ -143,6 +143,7 @@ macro_rules! func {
} }
} }
#[allow(dead_code)]
#[derive(Clone, Queryable, Identifiable)] #[derive(Clone, Queryable, Identifiable)]
struct ListElem { struct ListElem {
pub id: i32, pub id: i32,

View file

@ -104,8 +104,8 @@ impl Media {
pub fn category(&self) -> MediaCategory { pub fn category(&self) -> MediaCategory {
match &*self match &*self
.file_path .file_path
.rsplitn(2, '.') .rsplit_once('.')
.next() .map(|x| x.1)
.expect("Media::category: extension error") .expect("Media::category: extension error")
.to_lowercase() .to_lowercase()
{ {
@ -208,14 +208,17 @@ impl Media {
// TODO: merge with save_remote? // TODO: merge with save_remote?
pub fn from_activity(conn: &DbConn, image: &Image) -> Result<Media> { pub fn from_activity(conn: &DbConn, image: &Image) -> Result<Media> {
let remote_url = image.object_props.url_string().ok()?; let remote_url = image
.object_props
.url_string()
.or(Err(Error::MissingApProperty))?;
let path = determine_mirror_file_path(&remote_url); let path = determine_mirror_file_path(&remote_url);
let parent = path.parent()?; let parent = path.parent().ok_or(Error::InvalidValue)?;
if !parent.is_dir() { if !parent.is_dir() {
DirBuilder::new().recursive(true).create(parent)?; DirBuilder::new().recursive(true).create(parent)?;
} }
let mut dest = fs::File::create(path.clone()).ok()?; let mut dest = fs::File::create(path.clone())?;
// TODO: conditional GET // TODO: conditional GET
if let Some(proxy) = CONFIG.proxy() { if let Some(proxy) = CONFIG.proxy() {
reqwest::ClientBuilder::new().proxy(proxy.clone()).build()? reqwest::ClientBuilder::new().proxy(proxy.clone()).build()?
@ -223,16 +226,17 @@ impl Media {
reqwest::Client::new() reqwest::Client::new()
} }
.get(remote_url.as_str()) .get(remote_url.as_str())
.send() .send()?
.ok()? .copy_to(&mut dest)?;
.copy_to(&mut dest)
.ok()?;
Media::find_by_file_path(conn, &path.to_str()?) Media::find_by_file_path(conn, path.to_str().ok_or(Error::InvalidValue)?)
.and_then(|mut media| { .and_then(|mut media| {
let mut updated = false; let mut updated = false;
let alt_text = image.object_props.content_string().ok()?; let alt_text = image
.object_props
.content_string()
.or(Err(Error::NotFound))?;
let sensitive = image.object_props.summary_string().is_ok(); let sensitive = image.object_props.summary_string().is_ok();
let content_warning = image.object_props.summary_string().ok(); let content_warning = image.object_props.summary_string().ok();
if media.alt_text != alt_text { if media.alt_text != alt_text {
@ -264,8 +268,11 @@ impl Media {
Media::insert( Media::insert(
conn, conn,
NewMedia { NewMedia {
file_path: path.to_str()?.to_string(), file_path: path.to_str().ok_or(Error::InvalidValue)?.to_string(),
alt_text: image.object_props.content_string().ok()?, alt_text: image
.object_props
.content_string()
.or(Err(Error::NotFound))?,
is_remote: false, is_remote: false,
remote_url: None, remote_url: None,
sensitive: image.object_props.summary_string().is_ok(), sensitive: image.object_props.summary_string().is_ok(),
@ -275,9 +282,10 @@ impl Media {
image image
.object_props .object_props
.attributed_to_link_vec::<Id>() .attributed_to_link_vec::<Id>()
.ok()? .or(Err(Error::NotFound))?
.into_iter() .into_iter()
.next()? .next()
.ok_or(Error::NotFound)?
.as_ref(), .as_ref(),
None, None,
CONFIG.proxy(), CONFIG.proxy(),
@ -325,7 +333,7 @@ fn determine_mirror_file_path(url: &str) -> PathBuf {
.next() .next()
.map(ToOwned::to_owned) .map(ToOwned::to_owned)
.unwrap_or_else(|| String::from("png")); .unwrap_or_else(|| String::from("png"));
file_path.push(format!("{}.{}", GUID::rand().to_string(), ext)); file_path.push(format!("{}.{}", GUID::rand(), ext));
}); });
file_path file_path
} }

View file

@ -47,7 +47,11 @@ impl Mention {
pub fn get_user(&self, conn: &Connection) -> Result<User> { pub fn get_user(&self, conn: &Connection) -> Result<User> {
match self.get_post(conn) { match self.get_post(conn) {
Ok(p) => Ok(p.get_authors(conn)?.into_iter().next()?), Ok(p) => Ok(p
.get_authors(conn)?
.into_iter()
.next()
.ok_or(Error::NotFound)?),
Err(_) => self.get_comment(conn).and_then(|c| c.get_author(conn)), Err(_) => self.get_comment(conn).and_then(|c| c.get_author(conn)),
} }
} }
@ -77,7 +81,7 @@ impl Mention {
in_post: bool, in_post: bool,
notify: bool, notify: bool,
) -> Result<Self> { ) -> Result<Self> {
let ap_url = ment.link_props.href_string().ok()?; let ap_url = ment.link_props.href_string().or(Err(Error::NotFound))?;
let mentioned = User::find_by_ap_url(conn, &ap_url)?; let mentioned = User::find_by_ap_url(conn, &ap_url)?;
if in_post { if in_post {

View file

@ -105,7 +105,8 @@ impl ImportedMigrations {
pub fn rerun_last_migration(&self, conn: &Connection, path: &Path) -> Result<()> { pub fn rerun_last_migration(&self, conn: &Connection, path: &Path) -> Result<()> {
let latest_migration = conn.latest_run_migration_version()?; let latest_migration = conn.latest_run_migration_version()?;
let id = latest_migration let id = latest_migration
.and_then(|m| self.0.binary_search_by_key(&m.as_str(), |m| m.name).ok())?; .and_then(|m| self.0.binary_search_by_key(&m.as_str(), |m| m.name).ok())
.ok_or(Error::NotFound)?;
let migration = &self.0[id]; let migration = &self.0[id];
conn.transaction(|| { conn.transaction(|| {
migration.revert(conn, path)?; migration.revert(conn, path)?;

View file

@ -61,7 +61,7 @@ impl PasswordResetRequest {
} }
pub fn find_and_delete_by_token(conn: &Connection, token: &str) -> Result<Self> { pub fn find_and_delete_by_token(conn: &Connection, token: &str) -> Result<Self> {
let request = Self::find_by_token(&conn, &token)?; let request = Self::find_by_token(conn, token)?;
let filter = let filter =
password_reset_requests::table.filter(password_reset_requests::id.eq(request.id)); password_reset_requests::table.filter(password_reset_requests::id.eq(request.id));

View file

@ -97,7 +97,7 @@ impl Post {
} }
pub fn delete(&self, conn: &Connection) -> Result<()> { pub fn delete(&self, conn: &Connection) -> Result<()> {
for m in Mention::list_for_post(&conn, self.id)? { for m in Mention::list_for_post(conn, self.id)? {
m.delete(conn)?; m.delete(conn)?;
} }
diesel::delete(self).execute(conn)?; diesel::delete(self).execute(conn)?;
@ -457,14 +457,14 @@ impl Post {
.filter_map(|(id, m)| id.map(|id| (m, id))) .filter_map(|(id, m)| id.map(|id| (m, id)))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let old_mentions = Mention::list_for_post(&conn, self.id)?; let old_mentions = Mention::list_for_post(conn, self.id)?;
let old_user_mentioned = old_mentions let old_user_mentioned = old_mentions
.iter() .iter()
.map(|m| m.mentioned_id) .map(|m| m.mentioned_id)
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
for (m, id) in &mentions { for (m, id) in &mentions {
if !old_user_mentioned.contains(&id) { if !old_user_mentioned.contains(id) {
Mention::from_activity(&*conn, &m, self.id, true, true)?; Mention::from_activity(&*conn, m, self.id, true, true)?;
} }
} }
@ -476,7 +476,7 @@ impl Post {
.iter() .iter()
.filter(|m| !new_mentions.contains(&m.mentioned_id)) .filter(|m| !new_mentions.contains(&m.mentioned_id))
{ {
m.delete(&conn)?; m.delete(conn)?;
} }
Ok(()) Ok(())
} }
@ -700,7 +700,7 @@ impl FromId<DbConn> for Post {
Post::insert( Post::insert(
conn, conn,
NewPost { NewPost {
blog_id: blog?.id, blog_id: blog.ok_or(Error::NotFound)?.id,
slug: Self::slug(&title).to_string(), slug: Self::slug(&title).to_string(),
title, title,
content: SafeString::new(&article.object_props.content_string()?), content: SafeString::new(&article.object_props.content_string()?),

View file

@ -173,7 +173,7 @@ impl AsObject<User, Undo, &DbConn> for Reshare {
diesel::delete(&self).execute(&**conn)?; diesel::delete(&self).execute(&**conn)?;
// delete associated notification if any // delete associated notification if any
if let Ok(notif) = Notification::find(&conn, notification_kind::RESHARE, self.id) { if let Ok(notif) = Notification::find(conn, notification_kind::RESHARE, self.id) {
diesel::delete(&notif).execute(&**conn)?; diesel::delete(&notif).execute(&**conn)?;
} }

View file

@ -102,7 +102,7 @@ pub struct SafeString {
impl SafeString { impl SafeString {
pub fn new(value: &str) -> Self { pub fn new(value: &str) -> Self {
SafeString { SafeString {
value: CLEAN.clean(&value).to_string(), value: CLEAN.clean(value).to_string(),
} }
} }

View file

@ -148,7 +148,7 @@ impl PlumeQuery {
/// Parse a query string into this Query /// Parse a query string into this Query
pub fn parse_query(&mut self, query: &str) -> &mut Self { pub fn parse_query(&mut self, query: &str) -> &mut Self {
self.from_str_req(&query.trim()) self.from_str_req(query.trim())
} }
/// Convert this Query to a Tantivy Query /// Convert this Query to a Tantivy Query
@ -360,7 +360,7 @@ impl std::str::FromStr for PlumeQuery {
fn from_str(query: &str) -> Result<PlumeQuery, !> { fn from_str(query: &str) -> Result<PlumeQuery, !> {
let mut res: PlumeQuery = Default::default(); let mut res: PlumeQuery = Default::default();
res.from_str_req(&query.trim()); res.from_str_req(query.trim());
Ok(res) Ok(res)
} }
} }

View file

@ -18,12 +18,6 @@ pub enum QueryError {
RuntimeError(String), RuntimeError(String),
} }
impl From<std::option::NoneError> for QueryError {
fn from(_: std::option::NoneError) -> Self {
QueryError::UnexpectedEndOfQuery
}
}
pub type QueryResult<T> = std::result::Result<T, QueryError>; pub type QueryResult<T> = std::result::Result<T, QueryError>;
#[derive(Debug, Clone, Copy, PartialEq)] #[derive(Debug, Clone, Copy, PartialEq)]
@ -239,7 +233,7 @@ impl WithList {
) -> Result<bool> { ) -> Result<bool> {
match list { match list {
List::List(name) => { List::List(name) => {
let list = lists::List::find_for_user_by_name(conn, timeline.user_id, &name)?; let list = lists::List::find_for_user_by_name(conn, timeline.user_id, name)?;
match (self, list.kind()) { match (self, list.kind()) {
(WithList::Blog, ListType::Blog) => list.contains_blog(conn, post.blog_id), (WithList::Blog, ListType::Blog) => list.contains_blog(conn, post.blog_id),
(WithList::Author { boosts, likes }, ListType::User) => match kind { (WithList::Author { boosts, likes }, ListType::User) => match kind {
@ -414,7 +408,7 @@ enum List<'a> {
fn parse_s<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> { fn parse_s<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> {
let mut res = Vec::new(); let mut res = Vec::new();
let (left, token) = parse_a(&stream)?; let (left, token) = parse_a(stream)?;
res.push(token); res.push(token);
stream = left; stream = left;
while !stream.is_empty() { while !stream.is_empty() {
@ -436,7 +430,7 @@ fn parse_s<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>],
fn parse_a<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> { fn parse_a<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<'a>)> {
let mut res = Vec::new(); let mut res = Vec::new();
let (left, token) = parse_b(&stream)?; let (left, token) = parse_b(stream)?;
res.push(token); res.push(token);
stream = left; stream = left;
while !stream.is_empty() { while !stream.is_empty() {
@ -463,7 +457,7 @@ fn parse_b<'a, 'b>(stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<
match left.get(0) { match left.get(0) {
Some(Token::RParent(_)) => Ok((&left[1..], token)), Some(Token::RParent(_)) => Ok((&left[1..], token)),
Some(t) => t.get_error(Token::RParent(0)), Some(t) => t.get_error(Token::RParent(0)),
None => None?, None => Err(QueryError::UnexpectedEndOfQuery),
} }
} }
_ => parse_c(stream), _ => parse_c(stream),
@ -484,9 +478,13 @@ fn parse_c<'a, 'b>(stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], TQ<
} }
fn parse_d<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], Arg<'a>)> { fn parse_d<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], Arg<'a>)> {
match stream.get(0).map(Token::get_text)? { match stream
.get(0)
.map(Token::get_text)
.ok_or(QueryError::UnexpectedEndOfQuery)?
{
s @ "blog" | s @ "author" | s @ "license" | s @ "tags" | s @ "lang" => { s @ "blog" | s @ "author" | s @ "license" | s @ "tags" | s @ "lang" => {
match stream.get(1)? { match stream.get(1).ok_or(QueryError::UnexpectedEndOfQuery)? {
Token::Word(_, _, r#in) if r#in == &"in" => { Token::Word(_, _, r#in) if r#in == &"in" => {
let (mut left, list) = parse_l(&stream[2..])?; let (mut left, list) = parse_l(&stream[2..])?;
let kind = match s { let kind = match s {
@ -498,7 +496,12 @@ fn parse_d<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>],
if *clude != "include" && *clude != "exclude" { if *clude != "include" && *clude != "exclude" {
break; break;
} }
match (*clude, left.get(1).map(Token::get_text)?) { match (
*clude,
left.get(1)
.map(Token::get_text)
.ok_or(QueryError::UnexpectedEndOfQuery)?,
) {
("include", "reshares") | ("include", "reshare") => { ("include", "reshares") | ("include", "reshare") => {
boosts = true boosts = true
} }
@ -529,7 +532,10 @@ fn parse_d<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>],
t => t.get_error(Token::Word(0, 0, "'in'")), t => t.get_error(Token::Word(0, 0, "'in'")),
} }
} }
s @ "title" | s @ "subtitle" | s @ "content" => match (stream.get(1)?, stream.get(2)?) { s @ "title" | s @ "subtitle" | s @ "content" => match (
stream.get(1).ok_or(QueryError::UnexpectedEndOfQuery)?,
stream.get(2).ok_or(QueryError::UnexpectedEndOfQuery)?,
) {
(Token::Word(_, _, contains), Token::Word(_, _, w)) if contains == &"contains" => Ok(( (Token::Word(_, _, contains), Token::Word(_, _, w)) if contains == &"contains" => Ok((
&stream[3..], &stream[3..],
Arg::Contains( Arg::Contains(
@ -555,7 +561,13 @@ fn parse_d<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>],
if *clude != "include" && *clude != "exclude" { if *clude != "include" && *clude != "exclude" {
break; break;
} }
match (*clude, stream.get(2).map(Token::get_text)?) { match (
*clude,
stream
.get(2)
.map(Token::get_text)
.ok_or(QueryError::UnexpectedEndOfQuery)?,
) {
("include", "reshares") | ("include", "reshare") => boosts = true, ("include", "reshares") | ("include", "reshare") => boosts = true,
("exclude", "reshares") | ("exclude", "reshare") => boosts = false, ("exclude", "reshares") | ("exclude", "reshare") => boosts = false,
("include", "likes") | ("include", "like") => likes = true, ("include", "likes") | ("include", "like") => likes = true,
@ -577,20 +589,23 @@ fn parse_d<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>],
"all" => Ok((&stream[1..], Arg::Boolean(Bool::All))), "all" => Ok((&stream[1..], Arg::Boolean(Bool::All))),
_ => unreachable!(), _ => unreachable!(),
}, },
_ => stream.get(0)?.get_error(Token::Word( _ => stream
0, .get(0)
0, .ok_or(QueryError::UnexpectedEndOfQuery)?
"one of 'blog', 'author', 'license', 'tags', 'lang', \ .get_error(Token::Word(
0,
0,
"one of 'blog', 'author', 'license', 'tags', 'lang', \
'title', 'subtitle', 'content', 'followed', 'has_cover', 'local' or 'all'", 'title', 'subtitle', 'content', 'followed', 'has_cover', 'local' or 'all'",
)), )),
} }
} }
fn parse_l<'a, 'b>(stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], List<'a>)> { fn parse_l<'a, 'b>(stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], List<'a>)> {
match stream.get(0)? { match stream.get(0).ok_or(QueryError::UnexpectedEndOfQuery)? {
Token::LBracket(_) => { Token::LBracket(_) => {
let (left, list) = parse_m(&stream[1..])?; let (left, list) = parse_m(&stream[1..])?;
match left.get(0)? { match left.get(0).ok_or(QueryError::UnexpectedEndOfQuery)? {
Token::RBracket(_) => Ok((&left[1..], List::Array(list))), Token::RBracket(_) => Ok((&left[1..], List::Array(list))),
t => t.get_error(Token::Word(0, 0, "one of ']' or ','")), t => t.get_error(Token::Word(0, 0, "one of ']' or ','")),
} }
@ -601,16 +616,20 @@ fn parse_l<'a, 'b>(stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], Lis
} }
fn parse_m<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], Vec<&'a str>)> { fn parse_m<'a, 'b>(mut stream: &'b [Token<'a>]) -> QueryResult<(&'b [Token<'a>], Vec<&'a str>)> {
let mut res: Vec<&str> = vec![match stream.get(0)? { let mut res: Vec<&str> = vec![
Token::Word(_, _, w) => w, match stream.get(0).ok_or(QueryError::UnexpectedEndOfQuery)? {
t => return t.get_error(Token::Word(0, 0, "any word")),
}];
stream = &stream[1..];
while let Token::Comma(_) = stream[0] {
res.push(match stream.get(1)? {
Token::Word(_, _, w) => w, Token::Word(_, _, w) => w,
t => return t.get_error(Token::Word(0, 0, "any word")), t => return t.get_error(Token::Word(0, 0, "any word")),
}); },
];
stream = &stream[1..];
while let Token::Comma(_) = stream[0] {
res.push(
match stream.get(1).ok_or(QueryError::UnexpectedEndOfQuery)? {
Token::Word(_, _, w) => w,
t => return t.get_error(Token::Word(0, 0, "any word")),
},
);
stream = &stream[2..]; stream = &stream[2..];
} }

View file

@ -210,7 +210,13 @@ impl User {
.into_iter() .into_iter()
.find(|l| l.mime_type == Some(String::from("application/activity+json"))) .find(|l| l.mime_type == Some(String::from("application/activity+json")))
.ok_or(Error::Webfinger)?; .ok_or(Error::Webfinger)?;
User::from_id(conn, link.href.as_ref()?, None, CONFIG.proxy()).map_err(|(_, e)| e) User::from_id(
conn,
link.href.as_ref().ok_or(Error::Webfinger)?,
None,
CONFIG.proxy(),
)
.map_err(|(_, e)| e)
} }
pub fn fetch_remote_interact_uri(acct: &str) -> Result<String> { pub fn fetch_remote_interact_uri(acct: &str) -> Result<String> {
@ -258,7 +264,7 @@ impl User {
.icon_image()? .icon_image()?
.object_props .object_props
.url_string()?, .url_string()?,
&self, self,
) )
.ok(); .ok();
@ -427,12 +433,12 @@ impl User {
let last = &format!( let last = &format!(
"{}?page={}", "{}?page={}",
&self.outbox_url, &self.outbox_url,
self.get_activities_count(&conn) / i64::from(ITEMS_PER_PAGE) + 1 self.get_activities_count(conn) / i64::from(ITEMS_PER_PAGE) + 1
); );
coll.collection_props.set_first_link(Id::new(first))?; coll.collection_props.set_first_link(Id::new(first))?;
coll.collection_props.set_last_link(Id::new(last))?; coll.collection_props.set_last_link(Id::new(last))?;
coll.collection_props coll.collection_props
.set_total_items_u64(self.get_activities_count(&conn) as u64)?; .set_total_items_u64(self.get_activities_count(conn) as u64)?;
Ok(ActivityStream::new(coll)) Ok(ActivityStream::new(coll))
} }
pub fn outbox_page( pub fn outbox_page(
@ -441,7 +447,7 @@ impl User {
(min, max): (i32, i32), (min, max): (i32, i32),
) -> Result<ActivityStream<OrderedCollectionPage>> { ) -> Result<ActivityStream<OrderedCollectionPage>> {
let acts = self.get_activities_page(conn, (min, max))?; let acts = self.get_activities_page(conn, (min, max))?;
let n_acts = self.get_activities_count(&conn); let n_acts = self.get_activities_count(conn);
let mut coll = OrderedCollectionPage::default(); let mut coll = OrderedCollectionPage::default();
if n_acts - i64::from(min) >= i64::from(ITEMS_PER_PAGE) { if n_acts - i64::from(min) >= i64::from(ITEMS_PER_PAGE) {
coll.collection_page_props.set_next_link(Id::new(&format!( coll.collection_page_props.set_next_link(Id::new(&format!(
@ -513,7 +519,7 @@ impl User {
if page.is_empty() { if page.is_empty() {
break; break;
} }
items.extend(page.drain(..)); items.append(&mut page);
if let Some(n) = nxt { if let Some(n) = nxt {
if n == next { if n == next {
break; break;
@ -720,7 +726,7 @@ impl User {
pub fn get_keypair(&self) -> Result<PKey<Private>> { pub fn get_keypair(&self) -> Result<PKey<Private>> {
PKey::from_rsa(Rsa::private_key_from_pem( PKey::from_rsa(Rsa::private_key_from_pem(
self.private_key.clone()?.as_ref(), self.private_key.clone().ok_or(Error::Signature)?.as_ref(),
)?) )?)
.map_err(Error::from) .map_err(Error::from)
} }
@ -943,7 +949,7 @@ impl FromId<DbConn> for User {
fn from_activity(conn: &DbConn, acct: CustomPerson) -> Result<Self> { fn from_activity(conn: &DbConn, acct: CustomPerson) -> Result<Self> {
let url = Url::parse(&acct.object.object_props.id_string()?)?; let url = Url::parse(&acct.object.object_props.id_string()?)?;
let inst = url.host_str()?; let inst = url.host_str().ok_or(Error::Url)?;
let instance = Instance::find_by_domain(conn, inst).or_else(|_| { let instance = Instance::find_by_domain(conn, inst).or_else(|_| {
Instance::insert( Instance::insert(
conn, conn,
@ -1080,7 +1086,7 @@ impl Signer for User {
let key = PKey::from_rsa(Rsa::public_key_from_pem(self.public_key.as_ref())?)?; let key = PKey::from_rsa(Rsa::public_key_from_pem(self.public_key.as_ref())?)?;
let mut verifier = sign::Verifier::new(MessageDigest::sha256(), &key)?; let mut verifier = sign::Verifier::new(MessageDigest::sha256(), &key)?;
verifier.update(data.as_bytes())?; verifier.update(data.as_bytes())?;
verifier.verify(&signature).map_err(Error::from) verifier.verify(signature).map_err(Error::from)
} }
} }
@ -1121,7 +1127,7 @@ impl NewUser {
display_name, display_name,
role: role as i32, role: role as i32,
summary: summary.to_owned(), summary: summary.to_owned(),
summary_html: SafeString::new(&utils::md_to_html(&summary, None, false, None).0), summary_html: SafeString::new(&utils::md_to_html(summary, None, false, None).0),
email: Some(email), email: Some(email),
hashed_password: password, hashed_password: password,
instance_id: instance.id, instance_id: instance.id,

View file

@ -19,12 +19,6 @@ impl From<Error> for ApiError {
} }
} }
impl From<std::option::NoneError> for ApiError {
fn from(err: std::option::NoneError) -> ApiError {
ApiError(err.into())
}
}
impl<'r> Responder<'r> for ApiError { impl<'r> Responder<'r> for ApiError {
fn respond_to(self, req: &Request<'_>) -> response::Result<'r> { fn respond_to(self, req: &Request<'_>) -> response::Result<'r> {
match self.0 { match self.0 {

View file

@ -1,7 +1,7 @@
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use rocket_contrib::json::Json; use rocket_contrib::json::Json;
use crate::api::{authorization::*, Api}; use crate::api::{authorization::*, Api, ApiError};
use plume_api::posts::*; use plume_api::posts::*;
use plume_common::{activity_pub::broadcast, utils::md_to_html}; use plume_common::{activity_pub::broadcast, utils::md_to_html};
use plume_models::{ use plume_models::{
@ -121,14 +121,17 @@ pub fn create(
Some(Media::get_media_processor(&conn, vec![&author])), Some(Media::get_media_processor(&conn, vec![&author])),
); );
let blog = payload.blog_id.or_else(|| { let blog = payload
let blogs = Blog::find_for_author(&conn, &author).ok()?; .blog_id
if blogs.len() == 1 { .or_else(|| {
Some(blogs[0].id) let blogs = Blog::find_for_author(&conn, &author).ok()?;
} else { if blogs.len() == 1 {
None Some(blogs[0].id)
} } else {
})?; None
}
})
.ok_or(ApiError(Error::NotFound))?;
if Post::find_by_slug(&conn, slug, blog).is_ok() { if Post::find_by_slug(&conn, slug, blog).is_ok() {
return Err(Error::InvalidValue.into()); return Err(Error::InvalidValue.into());

View file

@ -90,8 +90,8 @@ impl<'a, T: Deserialize<'a>> FromData<'a> for SignedJson<T> {
o: Transformed<'a, Self>, o: Transformed<'a, Self>,
) -> rocket::data::Outcome<Self, Self::Error> { ) -> rocket::data::Outcome<Self, Self::Error> {
let string = o.borrowed()?; let string = o.borrowed()?;
match serde_json::from_str(&string) { match serde_json::from_str(string) {
Ok(v) => Success(SignedJson(Digest::from_body(&string), Json(v))), Ok(v) => Success(SignedJson(Digest::from_body(string), Json(v))),
Err(e) => { Err(e) => {
if e.is_data() { if e.is_data() {
Failure((Status::UnprocessableEntity, JsonError::Parse(string, e))) Failure((Status::UnprocessableEntity, JsonError::Parse(string, e)))

View file

@ -1,5 +1,5 @@
#![allow(clippy::too_many_arguments)] #![allow(clippy::too_many_arguments)]
#![feature(decl_macro, proc_macro_hygiene, try_trait)] #![feature(decl_macro, proc_macro_hygiene)]
#[macro_use] #[macro_use]
extern crate gettext_macros; extern crate gettext_macros;

View file

@ -372,7 +372,7 @@ fn ban(id: i32, conn: &Connection, worker: &ScheduledThreadPool) -> Result<(), E
.unwrap_or(false) .unwrap_or(false)
{ {
BlocklistedEmail::insert( BlocklistedEmail::insert(
&conn, conn,
NewBlocklistedEmail { NewBlocklistedEmail {
email_address: u.email.clone().unwrap(), email_address: u.email.clone().unwrap(),
note: "Banned".to_string(), note: "Banned".to_string(),

View file

@ -77,12 +77,7 @@ pub fn upload(
.map(|ext| format!(".{}", ext)) .map(|ext| format!(".{}", ext))
}) })
.unwrap_or_default(); .unwrap_or_default();
let dest = format!( let dest = format!("{}/{}{}", CONFIG.media_directory, GUID::rand(), ext);
"{}/{}{}",
CONFIG.media_directory,
GUID::rand().to_string(),
ext
);
match fields["file"][0].data { match fields["file"][0].data {
SavedData::Bytes(ref bytes) => fs::write(&dest, bytes) SavedData::Bytes(ref bytes) => fs::write(&dest, bytes)

View file

@ -425,7 +425,7 @@ pub fn create(
Ok(_) => ValidationErrors::new(), Ok(_) => ValidationErrors::new(),
Err(e) => e, Err(e) => e,
}; };
if Post::find_by_slug(&conn, &slug, blog.id).is_ok() { if Post::find_by_slug(&conn, slug, blog.id).is_ok() {
errors.add( errors.add(
"title", "title",
ValidationError { ValidationError {

View file

@ -54,7 +54,7 @@ pub fn search(query: Option<Form<SearchQuery>>, conn: DbConn, rockets: PlumeRock
let query = query.map(Form::into_inner).unwrap_or_default(); let query = query.map(Form::into_inner).unwrap_or_default();
let page = query.page.unwrap_or_default(); let page = query.page.unwrap_or_default();
let mut parsed_query = let mut parsed_query =
Query::from_str(&query.q.as_deref().unwrap_or_default()).unwrap_or_default(); Query::from_str(query.q.as_deref().unwrap_or_default()).unwrap_or_default();
param_to_query!(query, parsed_query; normal: title, subtitle, content, tag, param_to_query!(query, parsed_query; normal: title, subtitle, content, tag,
instance, author, blog, lang, license; instance, author, blog, lang, license;

View file

@ -553,14 +553,14 @@ pub fn ap_followers(
#[get("/@/<name>/atom.xml")] #[get("/@/<name>/atom.xml")]
pub fn atom_feed(name: String, conn: DbConn) -> Option<Content<String>> { pub fn atom_feed(name: String, conn: DbConn) -> Option<Content<String>> {
let conn = &conn; let conn = &conn;
let author = User::find_by_fqn(&conn, &name).ok()?; let author = User::find_by_fqn(conn, &name).ok()?;
let entries = Post::get_recents_for_author(&conn, &author, 15).ok()?; let entries = Post::get_recents_for_author(conn, &author, 15).ok()?;
let uri = Instance::get_local() let uri = Instance::get_local()
.ok()? .ok()?
.compute_box("@", &name, "atom.xml"); .compute_box("@", &name, "atom.xml");
let title = &author.display_name; let title = &author.display_name;
let default_updated = &author.creation_date; let default_updated = &author.creation_date;
let feed = super::build_atom_feed(entries, &uri, title, default_updated, &conn); let feed = super::build_atom_feed(entries, &uri, title, default_updated, conn);
Some(Content( Some(Content(
ContentType::new("application", "atom+xml"), ContentType::new("application", "atom+xml"),
feed.to_string(), feed.to_string(),

View file

@ -41,7 +41,7 @@ impl IntoContext for (&DbConn, &PlumeRocket) {
Option<(String, String)>, Option<(String, String)>,
) { ) {
( (
&self.0, self.0,
&self.1.intl.catalog, &self.1.intl.catalog,
self.1.user.clone(), self.1.user.clone(),
self.1.flash_msg.clone(), self.1.flash_msg.clone(),