Compare commits
3 commits
2d92f9c355
...
89b2b598f4
Author | SHA1 | Date | |
---|---|---|---|
89b2b598f4 | |||
b427000364 | |||
7a9a6fc245 |
7 changed files with 118 additions and 5 deletions
|
@ -74,6 +74,9 @@ pub struct SecurityConfig {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub allow_public_debugger: bool,
|
pub allow_public_debugger: bool,
|
||||||
|
|
||||||
|
#[serde(default)]
|
||||||
|
pub allow_public_search: bool,
|
||||||
|
|
||||||
#[serde_inline_default("changeme".to_string())]
|
#[serde_inline_default("changeme".to_string())]
|
||||||
pub proxy_secret: String,
|
pub proxy_secret: String,
|
||||||
|
|
||||||
|
@ -87,16 +90,16 @@ pub struct SecurityConfig {
|
||||||
pub session_duration_hours: i64,
|
pub session_duration_hours: i64,
|
||||||
|
|
||||||
#[serde_inline_default(2)]
|
#[serde_inline_default(2)]
|
||||||
pub max_id_redirects: u32,
|
pub max_id_redirects: u32, // TODO not sure it fits here
|
||||||
|
|
||||||
#[serde_inline_default(20)]
|
#[serde_inline_default(20)]
|
||||||
pub thread_crawl_depth: u32,
|
pub thread_crawl_depth: u32, // TODO doesn't really fit here
|
||||||
|
|
||||||
#[serde_inline_default(30)]
|
#[serde_inline_default(30)]
|
||||||
pub job_expiration_days: u32,
|
pub job_expiration_days: u32, // TODO doesn't really fit here
|
||||||
|
|
||||||
#[serde_inline_default(100)]
|
#[serde_inline_default(100)]
|
||||||
pub reinsertion_attempt_limit: u32,
|
pub reinsertion_attempt_limit: u32, // TODO doesn't really fit here
|
||||||
}
|
}
|
||||||
|
|
||||||
#[serde_inline_default::serde_inline_default]
|
#[serde_inline_default::serde_inline_default]
|
||||||
|
|
|
@ -19,6 +19,7 @@ mod m20240703_000002_add_image_to_objects;
|
||||||
mod m20240706_000001_add_error_to_jobs;
|
mod m20240706_000001_add_error_to_jobs;
|
||||||
mod m20240715_000001_add_quote_uri_to_objects;
|
mod m20240715_000001_add_quote_uri_to_objects;
|
||||||
mod m20240715_000002_add_actors_fields_and_aliases;
|
mod m20240715_000002_add_actors_fields_and_aliases;
|
||||||
|
mod m20240811_000001_add_full_text_index;
|
||||||
|
|
||||||
pub struct Migrator;
|
pub struct Migrator;
|
||||||
|
|
||||||
|
@ -45,6 +46,7 @@ impl MigratorTrait for Migrator {
|
||||||
Box::new(m20240706_000001_add_error_to_jobs::Migration),
|
Box::new(m20240706_000001_add_error_to_jobs::Migration),
|
||||||
Box::new(m20240715_000001_add_quote_uri_to_objects::Migration),
|
Box::new(m20240715_000001_add_quote_uri_to_objects::Migration),
|
||||||
Box::new(m20240715_000002_add_actors_fields_and_aliases::Migration),
|
Box::new(m20240715_000002_add_actors_fields_and_aliases::Migration),
|
||||||
|
Box::new(m20240811_000001_add_full_text_index::Migration),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
30
upub/migrations/src/m20240811_000001_add_full_text_index.rs
Normal file
30
upub/migrations/src/m20240811_000001_add_full_text_index.rs
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
use crate::m20240524_000001_create_actor_activity_object_tables::Objects;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.name("index-objects-content")
|
||||||
|
.table(Objects::Table)
|
||||||
|
.col(Objects::Audience)
|
||||||
|
.full_text()
|
||||||
|
.to_owned()
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_index(Index::drop().name("index-objects-content").table(Objects::Table).to_owned())
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,9 +1,12 @@
|
||||||
use apb::{LD, ActorMut, BaseMut, ObjectMut, PublicKeyMut};
|
use apb::{LD, ActorMut, BaseMut, ObjectMut, PublicKeyMut};
|
||||||
use axum::{extract::{Path, Query, State}, http::HeaderMap, response::{IntoResponse, Redirect, Response}};
|
use axum::{extract::{Path, Query, State}, http::HeaderMap, response::{IntoResponse, Redirect, Response}};
|
||||||
use reqwest::Method;
|
use reqwest::Method;
|
||||||
|
use sea_orm::{Condition, ColumnTrait};
|
||||||
use upub::{traits::{Cloaker, Fetcher}, Context};
|
use upub::{traits::{Cloaker, Fetcher}, Context};
|
||||||
|
|
||||||
use crate::{builders::JsonLD, ApiError, AuthIdentity};
|
use crate::{builders::JsonLD, ApiError, AuthIdentity, Identity};
|
||||||
|
|
||||||
|
use super::{PaginatedSearch, Pagination};
|
||||||
|
|
||||||
|
|
||||||
pub async fn view(
|
pub async fn view(
|
||||||
|
@ -39,6 +42,43 @@ pub async fn view(
|
||||||
).into_response())
|
).into_response())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn search(
|
||||||
|
State(ctx): State<Context>,
|
||||||
|
AuthIdentity(auth): AuthIdentity,
|
||||||
|
Query(page): Query<PaginatedSearch>,
|
||||||
|
) -> crate::ApiResult<JsonLD<serde_json::Value>> {
|
||||||
|
if !auth.is_local() && ctx.cfg().security.allow_public_search {
|
||||||
|
return Err(crate::ApiError::forbidden());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut filter = Condition::any()
|
||||||
|
.add(auth.filter());
|
||||||
|
|
||||||
|
if let Identity::Local { ref id, .. } = auth {
|
||||||
|
filter = filter.add(upub::model::object::Column::AttributedTo.eq(id));
|
||||||
|
}
|
||||||
|
|
||||||
|
filter = Condition::all()
|
||||||
|
.add(upub::model::object::Column::Content.like(page.q))
|
||||||
|
.add(filter);
|
||||||
|
|
||||||
|
// TODO lmao rethink this all
|
||||||
|
let page = Pagination {
|
||||||
|
offset: page.offset,
|
||||||
|
batch: page.batch,
|
||||||
|
};
|
||||||
|
|
||||||
|
crate::builders::paginate_feed(
|
||||||
|
upub::url!(ctx, "/search"),
|
||||||
|
filter,
|
||||||
|
ctx.db(),
|
||||||
|
page,
|
||||||
|
auth.my_id(),
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, serde::Deserialize)]
|
#[derive(Debug, serde::Deserialize)]
|
||||||
pub struct ProxyQuery {
|
pub struct ProxyQuery {
|
||||||
uri: String,
|
uri: String,
|
||||||
|
|
|
@ -22,6 +22,7 @@ impl ActivityPubRouter for Router<upub::Context> {
|
||||||
// core server inbox/outbox, maybe for feeds? TODO do we need these?
|
// core server inbox/outbox, maybe for feeds? TODO do we need these?
|
||||||
.route("/", get(ap::application::view))
|
.route("/", get(ap::application::view))
|
||||||
// fetch route, to debug and retreive remote objects
|
// fetch route, to debug and retreive remote objects
|
||||||
|
.route("/search", get(ap::application::search))
|
||||||
.route("/fetch", get(ap::application::ap_fetch))
|
.route("/fetch", get(ap::application::ap_fetch))
|
||||||
.route("/proxy/:hmac/:uri", get(ap::application::cloak_proxy))
|
.route("/proxy/:hmac/:uri", get(ap::application::cloak_proxy))
|
||||||
.route("/inbox", post(ap::inbox::post))
|
.route("/inbox", post(ap::inbox::post))
|
||||||
|
@ -90,6 +91,14 @@ pub struct Pagination {
|
||||||
pub batch: Option<u64>,
|
pub batch: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, serde::Deserialize)]
|
||||||
|
// TODO i don't really like how pleroma/mastodon do it actually, maybe change this?
|
||||||
|
pub struct PaginatedSearch {
|
||||||
|
pub q: String,
|
||||||
|
pub offset: Option<u64>,
|
||||||
|
pub batch: Option<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct CreationResult(pub String);
|
pub struct CreationResult(pub String);
|
||||||
impl IntoResponse for CreationResult {
|
impl IntoResponse for CreationResult {
|
||||||
fn into_response(self) -> axum::response::Response {
|
fn into_response(self) -> axum::response::Response {
|
||||||
|
|
|
@ -5,6 +5,7 @@ use upub::selector::{BatchFillable, RichActivity};
|
||||||
|
|
||||||
use crate::activitypub::Pagination;
|
use crate::activitypub::Pagination;
|
||||||
|
|
||||||
|
#[deprecated = "just query directly maybe?"]
|
||||||
pub async fn paginate_feed(
|
pub async fn paginate_feed(
|
||||||
id: String,
|
id: String,
|
||||||
filter: Condition,
|
filter: Condition,
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use apb::{Base, Collection};
|
||||||
use leptos::*;
|
use leptos::*;
|
||||||
use leptos_router::*;
|
use leptos_router::*;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
@ -24,6 +25,14 @@ pub fn SearchPage() -> impl IntoView {
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let text_search = create_local_resource(
|
||||||
|
move || use_query_map().get().get("q").cloned().unwrap_or_default(),
|
||||||
|
move |q| {
|
||||||
|
let search = format!("{URL_BASE}/search?q={q}");
|
||||||
|
async move { Http::fetch::<serde_json::Value>(&search, auth).await.ok() }
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
view! {
|
view! {
|
||||||
<blockquote class="mt-3 mb-3">
|
<blockquote class="mt-3 mb-3">
|
||||||
<details open>
|
<details open>
|
||||||
|
@ -54,5 +63,24 @@ pub fn SearchPage() -> impl IntoView {
|
||||||
</div>
|
</div>
|
||||||
</details>
|
</details>
|
||||||
</blockquote>
|
</blockquote>
|
||||||
|
|
||||||
|
{move || match text_search.get() {
|
||||||
|
None => Some(view! { <p class="center"><small>searching...</small></p> }.into_view()),
|
||||||
|
Some(None) => None,
|
||||||
|
Some(Some(items)) => Some(view! {
|
||||||
|
// TODO this is jank af! i should do the same thing i do for timelines, aka first process
|
||||||
|
// all items and store in cache and then pass a vec of strings here!!!
|
||||||
|
<For
|
||||||
|
each=move || items.ordered_items()
|
||||||
|
key=|item| item.id().unwrap_or_default().to_string()
|
||||||
|
children=move |item| {
|
||||||
|
view! {
|
||||||
|
<Item item=item.into() />
|
||||||
|
<hr />
|
||||||
|
}.into_view()
|
||||||
|
}
|
||||||
|
/ >
|
||||||
|
}.into_view())
|
||||||
|
}}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue