2024-04-09 01:14:48 +02:00
|
|
|
pub mod server;
|
|
|
|
pub mod model;
|
|
|
|
pub mod routes;
|
2024-04-06 18:03:13 +02:00
|
|
|
|
2024-04-09 01:14:48 +02:00
|
|
|
pub mod errors;
|
2024-04-06 18:03:13 +02:00
|
|
|
|
|
|
|
#[cfg(feature = "migrations")]
|
|
|
|
mod migrations;
|
|
|
|
|
|
|
|
#[cfg(feature = "migrations")]
|
|
|
|
use sea_orm_migration::MigratorTrait;
|
|
|
|
|
2024-03-16 03:29:06 +01:00
|
|
|
use clap::{Parser, Subcommand};
|
2024-04-23 16:26:08 +02:00
|
|
|
use sea_orm::{ColumnTrait, ConnectOptions, Database, EntityTrait, IntoActiveModel, QueryFilter, QueryOrder};
|
2024-03-16 03:29:06 +01:00
|
|
|
|
2024-03-28 04:52:17 +01:00
|
|
|
pub use errors::UpubResult as Result;
|
2024-04-13 22:31:46 +02:00
|
|
|
use tower_http::{cors::CorsLayer, trace::TraceLayer};
|
2024-03-28 04:52:17 +01:00
|
|
|
|
2024-04-13 22:13:36 +02:00
|
|
|
use crate::server::fetcher::Fetchable;
|
|
|
|
|
2024-03-25 05:02:20 +01:00
|
|
|
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
|
|
|
|
|
2024-03-16 03:29:06 +01:00
|
|
|
#[derive(Parser)]
|
|
|
|
/// all names were taken
|
|
|
|
struct CliArgs {
|
|
|
|
#[clap(subcommand)]
|
|
|
|
/// command to run
|
|
|
|
command: CliCommand,
|
|
|
|
|
2024-03-20 09:04:08 +01:00
|
|
|
#[arg(short = 'd', long = "db", default_value = "sqlite://./upub.db")]
|
2024-03-16 03:29:06 +01:00
|
|
|
/// database connection uri
|
|
|
|
database: String,
|
|
|
|
|
2024-03-20 09:04:08 +01:00
|
|
|
#[arg(short = 'D', long, default_value = "http://localhost:3000")]
|
2024-03-20 08:56:35 +01:00
|
|
|
/// instance base domain, for AP ids
|
|
|
|
domain: String,
|
|
|
|
|
2024-03-16 03:29:06 +01:00
|
|
|
#[arg(long, default_value_t=false)]
|
|
|
|
/// run with debug level tracing
|
|
|
|
debug: bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Subcommand)]
|
|
|
|
enum CliCommand {
|
|
|
|
/// run fediverse server
|
|
|
|
Serve ,
|
|
|
|
|
2024-04-06 18:03:13 +02:00
|
|
|
#[cfg(feature = "migrations")]
|
2024-03-16 03:29:06 +01:00
|
|
|
/// apply database migrations
|
|
|
|
Migrate,
|
2024-03-16 05:46:14 +01:00
|
|
|
|
2024-04-06 18:03:13 +02:00
|
|
|
#[cfg(feature = "faker")]
|
2024-03-16 05:46:14 +01:00
|
|
|
/// generate fake user, note and activity
|
2024-03-26 00:53:07 +01:00
|
|
|
Faker{
|
|
|
|
/// how many fake statuses to insert for root user
|
2024-03-26 00:58:01 +01:00
|
|
|
count: u64,
|
2024-03-26 00:53:07 +01:00
|
|
|
},
|
2024-03-19 07:39:12 +01:00
|
|
|
|
|
|
|
/// fetch a single AP object
|
|
|
|
Fetch {
|
|
|
|
/// object id, or uri, to fetch
|
|
|
|
uri: String,
|
|
|
|
|
|
|
|
#[arg(long, default_value_t = false)]
|
|
|
|
/// store fetched object in local db
|
|
|
|
save: bool,
|
|
|
|
},
|
2024-04-22 03:56:07 +02:00
|
|
|
|
|
|
|
/// follow a remote relay
|
|
|
|
Relay {
|
|
|
|
/// actor url, same as with pleroma
|
|
|
|
actor: String,
|
2024-04-23 16:26:08 +02:00
|
|
|
|
|
|
|
#[arg(long, default_value_t = false)]
|
2024-04-29 20:53:19 +02:00
|
|
|
/// instead of sending a follow request, send an accept
|
2024-04-23 16:26:08 +02:00
|
|
|
accept: bool
|
2024-04-29 20:53:19 +02:00
|
|
|
},
|
|
|
|
|
|
|
|
/// run db maintenance tasks
|
|
|
|
Fix {
|
|
|
|
#[arg(long, default_value_t = false)]
|
|
|
|
/// fix likes counts for posts
|
|
|
|
likes: bool,
|
|
|
|
|
|
|
|
#[arg(long, default_value_t = false)]
|
|
|
|
/// fix shares counts for posts
|
|
|
|
shares: bool,
|
|
|
|
|
|
|
|
#[arg(long, default_value_t = false)]
|
|
|
|
/// fix replies counts for posts
|
|
|
|
replies: bool,
|
2024-04-22 03:56:07 +02:00
|
|
|
}
|
2024-03-16 03:29:06 +01:00
|
|
|
}
|
2024-02-09 17:07:55 +01:00
|
|
|
|
|
|
|
#[tokio::main]
|
|
|
|
async fn main() {
|
2024-03-15 19:43:29 +01:00
|
|
|
|
2024-03-16 03:29:06 +01:00
|
|
|
let args = CliArgs::parse();
|
|
|
|
|
|
|
|
tracing_subscriber::fmt()
|
|
|
|
.compact()
|
|
|
|
.with_max_level(if args.debug { tracing::Level::DEBUG } else { tracing::Level::INFO })
|
|
|
|
.init();
|
|
|
|
|
2024-03-22 17:54:49 +01:00
|
|
|
// TODO can i do connectoptions.into() or .connect() and skip these ugly bindings?
|
2024-03-16 05:45:58 +01:00
|
|
|
let mut opts = ConnectOptions::new(&args.database);
|
2024-03-22 17:54:49 +01:00
|
|
|
|
2024-03-16 05:45:58 +01:00
|
|
|
opts
|
2024-03-22 17:54:49 +01:00
|
|
|
.sqlx_logging_level(tracing::log::LevelFilter::Debug);
|
2024-03-16 05:45:58 +01:00
|
|
|
|
|
|
|
let db = Database::connect(opts)
|
2024-03-16 03:29:06 +01:00
|
|
|
.await.expect("error connecting to db");
|
|
|
|
|
|
|
|
match args.command {
|
2024-04-06 18:03:13 +02:00
|
|
|
#[cfg(feature = "migrations")]
|
2024-03-16 03:29:06 +01:00
|
|
|
CliCommand::Migrate => migrations::Migrator::up(&db, None)
|
|
|
|
.await.expect("error applying migrations"),
|
2024-03-16 05:46:14 +01:00
|
|
|
|
2024-04-06 18:03:13 +02:00
|
|
|
#[cfg(feature = "faker")]
|
2024-03-26 00:58:01 +01:00
|
|
|
CliCommand::Faker { count } => model::faker::faker(&db, args.domain, count)
|
2024-03-16 05:46:14 +01:00
|
|
|
.await.expect("error creating fake entities"),
|
2024-03-19 07:39:12 +01:00
|
|
|
|
2024-04-13 22:13:36 +02:00
|
|
|
CliCommand::Fetch { uri, save } => fetch(db, args.domain, uri, save)
|
2024-03-19 07:39:12 +01:00
|
|
|
.await.expect("error fetching object"),
|
2024-04-09 04:35:57 +02:00
|
|
|
|
2024-04-23 16:26:08 +02:00
|
|
|
CliCommand::Relay { actor, accept } => {
|
2024-04-22 03:56:07 +02:00
|
|
|
let ctx = server::Context::new(db, args.domain)
|
|
|
|
.await.expect("failed creating server context");
|
|
|
|
|
|
|
|
let aid = ctx.aid(uuid::Uuid::new_v4().to_string());
|
|
|
|
|
2024-04-23 16:26:08 +02:00
|
|
|
let mut activity_model = model::activity::Model {
|
2024-04-22 03:56:07 +02:00
|
|
|
id: aid.clone(),
|
|
|
|
activity_type: apb::ActivityType::Follow,
|
|
|
|
actor: ctx.base(),
|
2024-04-22 04:03:47 +02:00
|
|
|
object: Some(actor.clone()),
|
2024-04-22 03:56:07 +02:00
|
|
|
target: None,
|
|
|
|
published: chrono::Utc::now(),
|
2024-04-23 04:56:29 +02:00
|
|
|
to: model::Audience(vec![actor.clone()]),
|
2024-04-22 03:56:07 +02:00
|
|
|
bto: model::Audience::default(),
|
2024-04-23 04:56:29 +02:00
|
|
|
cc: model::Audience(vec![apb::target::PUBLIC.to_string()]),
|
|
|
|
bcc: model::Audience::default(),
|
2024-04-22 03:56:07 +02:00
|
|
|
};
|
2024-04-23 16:26:08 +02:00
|
|
|
|
|
|
|
if accept {
|
|
|
|
let follow_req = model::activity::Entity::find()
|
|
|
|
.filter(model::activity::Column::ActivityType.eq("Follow"))
|
|
|
|
.filter(model::activity::Column::Actor.eq(&actor))
|
|
|
|
.filter(model::activity::Column::Object.eq(ctx.base()))
|
|
|
|
.order_by_desc(model::activity::Column::Published)
|
|
|
|
.one(ctx.db())
|
|
|
|
.await
|
|
|
|
.expect("failed querying db for relay follow req")
|
|
|
|
.expect("no follow request to accept");
|
|
|
|
activity_model.activity_type = apb::ActivityType::Accept(apb::AcceptType::Accept);
|
|
|
|
activity_model.object = Some(follow_req.id);
|
|
|
|
};
|
|
|
|
|
2024-04-22 03:56:07 +02:00
|
|
|
model::activity::Entity::insert(activity_model.into_active_model())
|
|
|
|
.exec(ctx.db()).await.expect("could not insert activity in db");
|
|
|
|
|
2024-04-23 16:54:03 +02:00
|
|
|
ctx.dispatch(&ctx.base(), vec![actor, apb::target::PUBLIC.to_string()], &aid, None).await
|
2024-04-23 16:26:08 +02:00
|
|
|
.expect("could not dispatch relay activity");
|
2024-04-22 03:56:07 +02:00
|
|
|
},
|
|
|
|
|
2024-04-29 20:53:19 +02:00
|
|
|
CliCommand::Fix { likes, shares, replies } =>
|
|
|
|
fix(db, likes, shares, replies)
|
|
|
|
.await
|
|
|
|
.expect("failed running fix task"),
|
|
|
|
|
2024-04-09 04:35:57 +02:00
|
|
|
CliCommand::Serve => {
|
|
|
|
let ctx = server::Context::new(db, args.domain)
|
|
|
|
.await.expect("failed creating server context");
|
|
|
|
|
2024-04-11 00:29:32 +02:00
|
|
|
use routes::activitypub::ActivityPubRouter;
|
|
|
|
use routes::mastodon::MastodonRouter;
|
|
|
|
|
|
|
|
let router = axum::Router::new()
|
|
|
|
.ap_routes()
|
|
|
|
.mastodon_routes() // no-op if mastodon feature is disabled
|
2024-04-13 22:31:46 +02:00
|
|
|
.layer(CorsLayer::permissive())
|
|
|
|
.layer(TraceLayer::new_for_http())
|
2024-04-11 00:29:32 +02:00
|
|
|
.with_state(ctx);
|
2024-04-09 04:35:57 +02:00
|
|
|
|
|
|
|
// run our app with hyper, listening locally on port 3000
|
|
|
|
let listener = tokio::net::TcpListener::bind("127.0.0.1:3000")
|
|
|
|
.await.expect("could not bind tcp socket");
|
|
|
|
|
|
|
|
axum::serve(listener, router)
|
|
|
|
.await
|
|
|
|
.expect("failed serving application")
|
|
|
|
},
|
2024-03-15 19:43:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-04-13 22:13:36 +02:00
|
|
|
async fn fetch(db: sea_orm::DatabaseConnection, domain: String, uri: String, save: bool) -> crate::Result<()> {
|
2024-04-13 22:17:35 +02:00
|
|
|
use apb::Base;
|
2024-03-19 07:39:12 +01:00
|
|
|
|
2024-04-13 22:13:36 +02:00
|
|
|
let ctx = server::Context::new(db, domain)
|
|
|
|
.await.expect("failed creating server context");
|
|
|
|
|
|
|
|
let mut node = apb::Node::link(uri.to_string());
|
|
|
|
node.fetch(&ctx).await?;
|
2024-03-19 07:39:12 +01:00
|
|
|
|
|
|
|
let obj = node.get().expect("node still empty after fetch?");
|
|
|
|
|
|
|
|
if save {
|
|
|
|
match obj.base_type() {
|
2024-04-06 16:56:13 +02:00
|
|
|
Some(apb::BaseType::Object(apb::ObjectType::Actor(_))) => {
|
2024-03-19 07:39:12 +01:00
|
|
|
model::user::Entity::insert(
|
2024-04-06 16:56:13 +02:00
|
|
|
model::user::Model::new(obj).unwrap().into_active_model()
|
2024-04-13 22:13:36 +02:00
|
|
|
).exec(ctx.db()).await.unwrap();
|
2024-03-19 07:39:12 +01:00
|
|
|
},
|
2024-04-06 16:56:13 +02:00
|
|
|
Some(apb::BaseType::Object(apb::ObjectType::Activity(_))) => {
|
2024-03-19 07:39:12 +01:00
|
|
|
model::activity::Entity::insert(
|
2024-04-06 16:56:13 +02:00
|
|
|
model::activity::Model::new(obj).unwrap().into_active_model()
|
2024-04-13 22:13:36 +02:00
|
|
|
).exec(ctx.db()).await.unwrap();
|
2024-03-19 07:39:12 +01:00
|
|
|
},
|
2024-04-06 16:56:13 +02:00
|
|
|
Some(apb::BaseType::Object(apb::ObjectType::Note)) => {
|
2024-03-19 07:39:12 +01:00
|
|
|
model::object::Entity::insert(
|
2024-04-06 16:56:13 +02:00
|
|
|
model::object::Model::new(obj).unwrap().into_active_model()
|
2024-04-13 22:13:36 +02:00
|
|
|
).exec(ctx.db()).await.unwrap();
|
2024-03-19 07:39:12 +01:00
|
|
|
},
|
2024-04-06 16:56:13 +02:00
|
|
|
Some(apb::BaseType::Object(t)) => tracing::warn!("not implemented: {:?}", t),
|
|
|
|
Some(apb::BaseType::Link(_)) => tracing::error!("fetched another link?"),
|
2024-03-19 07:39:12 +01:00
|
|
|
None => tracing::error!("no type on object"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-04-13 22:17:35 +02:00
|
|
|
println!("{}", serde_json::to_string_pretty(&obj).unwrap());
|
|
|
|
|
2024-03-19 07:39:12 +01:00
|
|
|
Ok(())
|
|
|
|
}
|
2024-04-29 20:53:19 +02:00
|
|
|
|
|
|
|
async fn fix(db: sea_orm::DatabaseConnection, likes: bool, shares: bool, replies: bool) -> crate::Result<()> {
|
|
|
|
use futures::TryStreamExt;
|
|
|
|
|
|
|
|
if likes {
|
|
|
|
tracing::info!("fixing likes...");
|
|
|
|
let mut store = std::collections::HashMap::new();
|
2024-04-29 21:23:12 +02:00
|
|
|
{
|
|
|
|
let mut stream = model::like::Entity::find().stream(&db).await?;
|
|
|
|
while let Some(like) = stream.try_next().await? {
|
|
|
|
store.insert(like.likes.clone(), store.get(&like.likes).unwrap_or(&0) + 1);
|
|
|
|
}
|
2024-04-29 20:53:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for (k, v) in store {
|
|
|
|
let m = model::object::ActiveModel {
|
2024-04-29 21:23:12 +02:00
|
|
|
id: sea_orm::Set(k.clone()),
|
2024-04-29 20:53:19 +02:00
|
|
|
likes: sea_orm::Set(v),
|
|
|
|
..Default::default()
|
|
|
|
};
|
2024-04-29 21:23:12 +02:00
|
|
|
if let Err(e) = model::object::Entity::update(m)
|
2024-04-29 20:53:19 +02:00
|
|
|
.exec(&db)
|
2024-04-29 21:23:12 +02:00
|
|
|
.await
|
|
|
|
{
|
|
|
|
tracing::warn!("record not updated ({k}): {e}");
|
|
|
|
}
|
2024-04-29 20:53:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if shares {
|
|
|
|
tracing::info!("fixing shares...");
|
|
|
|
let mut store = std::collections::HashMap::new();
|
2024-04-29 21:23:12 +02:00
|
|
|
{
|
|
|
|
let mut stream = model::share::Entity::find().stream(&db).await?;
|
|
|
|
while let Some(share) = stream.try_next().await? {
|
|
|
|
store.insert(share.shares.clone(), store.get(&share.shares).unwrap_or(&0) + 1);
|
|
|
|
}
|
2024-04-29 20:53:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for (k, v) in store {
|
|
|
|
let m = model::object::ActiveModel {
|
2024-04-29 21:23:12 +02:00
|
|
|
id: sea_orm::Set(k.clone()),
|
2024-04-29 20:53:19 +02:00
|
|
|
shares: sea_orm::Set(v),
|
|
|
|
..Default::default()
|
|
|
|
};
|
2024-04-29 21:23:12 +02:00
|
|
|
if let Err(e) = model::object::Entity::update(m)
|
2024-04-29 20:53:19 +02:00
|
|
|
.exec(&db)
|
2024-04-29 21:23:12 +02:00
|
|
|
.await
|
|
|
|
{
|
|
|
|
tracing::warn!("record not updated ({k}): {e}");
|
|
|
|
}
|
2024-04-29 20:53:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if replies {
|
|
|
|
tracing::info!("fixing replies...");
|
|
|
|
let mut store = std::collections::HashMap::new();
|
2024-04-29 21:23:12 +02:00
|
|
|
{
|
|
|
|
let mut stream = model::object::Entity::find().stream(&db).await?;
|
|
|
|
while let Some(object) = stream.try_next().await? {
|
|
|
|
if let Some(reply) = object.in_reply_to {
|
|
|
|
let before = store.get(&reply).unwrap_or(&0);
|
|
|
|
store.insert(reply, before + 1);
|
|
|
|
}
|
2024-04-29 20:53:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (k, v) in store {
|
|
|
|
let m = model::object::ActiveModel {
|
2024-04-29 21:23:12 +02:00
|
|
|
id: sea_orm::Set(k.clone()),
|
2024-04-29 20:53:19 +02:00
|
|
|
comments: sea_orm::Set(v),
|
|
|
|
..Default::default()
|
|
|
|
};
|
2024-04-29 21:23:12 +02:00
|
|
|
if let Err(e) = model::object::Entity::update(m)
|
2024-04-29 20:53:19 +02:00
|
|
|
.exec(&db)
|
2024-04-29 21:23:12 +02:00
|
|
|
.await
|
|
|
|
{
|
|
|
|
tracing::warn!("record not updated ({k}): {e}");
|
|
|
|
}
|
2024-04-29 20:53:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
tracing::info!("done running fix tasks");
|
|
|
|
Ok(())
|
|
|
|
}
|