mirror of
https://git.alemi.dev/fedicharter.git
synced 2024-11-12 20:09:21 +01:00
feat: initial simple implementation
This commit is contained in:
parent
746ef92b94
commit
68cbe3c062
2 changed files with 188 additions and 0 deletions
18
Cargo.toml
Normal file
18
Cargo.toml
Normal file
|
@ -0,0 +1,18 @@
|
|||
[package]
|
||||
name = "akkoma-mapper"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
async-recursion = "1.0.5"
|
||||
axum = "0.6.20"
|
||||
clap = { version = "4.4.6", features = ["derive"] }
|
||||
reqwest = { version = "0.11.20", features = ["json"] }
|
||||
serde = { version = "1.0.188", features = ["derive"] }
|
||||
serde_json = "1.0.107"
|
||||
tokio = { version = "1.32.0", features = ["full"] }
|
||||
tracing = "0.1.37"
|
||||
tracing-subscriber = "0.3.17"
|
||||
|
170
src/main.rs
Normal file
170
src/main.rs
Normal file
|
@ -0,0 +1,170 @@
|
|||
use std::{sync::Arc, collections::{HashMap, HashSet}, time::Duration, net::SocketAddr};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
use axum::{routing::get, extract::Query, Json, Router};
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
/// an API crawling akkoma bubble instances network and creating a map
|
||||
struct CliArgs {
|
||||
/// start domain for crawl, without proto base
|
||||
domain: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Params {
|
||||
domain: String
|
||||
}
|
||||
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// initialize tracing
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
// build our application with a route
|
||||
let app = Router::new()
|
||||
.route("/crawl", get(route_scan_domain));
|
||||
|
||||
// run our app with hyper, listening globally on port 3000
|
||||
let addr = SocketAddr::from(([127, 0, 0, 1], 18811));
|
||||
tracing::debug!("listening on {}", addr);
|
||||
axum::Server::bind(&addr)
|
||||
.serve(app.into_make_service())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
async fn route_scan_domain(Query(params): Query<Params>) -> Json<MapResult> {
|
||||
tracing::info!("starting new crawl from {}", params.domain);
|
||||
|
||||
let map = Arc::new(Mutex::new(Map {
|
||||
scanned: HashSet::new(),
|
||||
name_to_id: HashMap::new(),
|
||||
counter: 0,
|
||||
nodes: Vec::new(),
|
||||
vertices: Vec::new(),
|
||||
}));
|
||||
|
||||
scan_instance(¶ms.domain, map.clone()).await;
|
||||
|
||||
let _map = map.lock().await;
|
||||
|
||||
axum::Json(MapResult {
|
||||
nodes: _map.nodes.clone(),
|
||||
vertices: _map.vertices.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
struct Map {
|
||||
name_to_id: HashMap<String, usize>,
|
||||
scanned: HashSet<String>,
|
||||
nodes: Vec<Node>,
|
||||
vertices: Vec<Vertex>,
|
||||
counter: usize,
|
||||
}
|
||||
|
||||
impl Map {
|
||||
fn scanned(&mut self, name: &str) -> bool {
|
||||
let out = self.scanned.contains(name);
|
||||
self.scanned.insert(name.to_string());
|
||||
out
|
||||
}
|
||||
|
||||
fn node(&mut self, name: String) -> usize {
|
||||
match self.name_to_id.get(&name) {
|
||||
Some(id) => *id,
|
||||
None => {
|
||||
let id = self.counter;
|
||||
self.name_to_id.insert(name.clone(), id);
|
||||
self.nodes.push(Node { label: name, id });
|
||||
self.counter += 1;
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn vertex(&mut self, from_name: String, to_name: String) {
|
||||
let from = self.node(from_name);
|
||||
let to = self.node(to_name);
|
||||
self.vertices.push(Vertex { from, to });
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone, Debug)]
|
||||
struct Node {
|
||||
id: usize,
|
||||
label: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone, Debug)]
|
||||
struct Vertex {
|
||||
from: usize,
|
||||
to: usize,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone, Debug)]
|
||||
struct MapResult {
|
||||
nodes: Vec<Node>,
|
||||
vertices: Vec<Vertex>,
|
||||
}
|
||||
|
||||
#[async_recursion::async_recursion]
|
||||
async fn scan_instance(domain: &str, map: Arc<Mutex<Map>>) -> Option<()> {
|
||||
if map.lock().await.scanned(domain) { return None };
|
||||
|
||||
tracing::debug!("scanning instance {}", domain);
|
||||
let response = match instance_metadata(domain).await {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
tracing::warn!("could not fetch metadata for {}: {}", domain, e);
|
||||
return None
|
||||
}
|
||||
};
|
||||
|
||||
let node_name : String = response
|
||||
.get("metadata")?
|
||||
.get("nodeName")?
|
||||
.as_str()?
|
||||
.to_string();
|
||||
|
||||
tracing::info!("adding instance {}", node_name);
|
||||
|
||||
map.lock().await.node(domain.to_string());
|
||||
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
for bubble_instance in response
|
||||
.get("metadata")?
|
||||
.get("localBubbleInstances")?
|
||||
.as_array()?
|
||||
.iter()
|
||||
.filter_map(|x| x.as_str().map(|x| x.to_string()))
|
||||
{
|
||||
let _map = map.clone();
|
||||
map.lock().await.vertex(domain.to_string(), bubble_instance.clone());
|
||||
tasks.push(tokio::spawn(async move { scan_instance(&bubble_instance, _map).await; }));
|
||||
}
|
||||
|
||||
for t in tasks {
|
||||
t.await.expect("could not join task");
|
||||
}
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
async fn instance_metadata(domain: &str) -> reqwest::Result<serde_json::Value> {
|
||||
reqwest::Client::builder()
|
||||
.timeout(Duration::from_secs(5))
|
||||
.build()?
|
||||
.get(format!("https://{}/nodeinfo/2.0.json", domain))
|
||||
.send()
|
||||
.await?
|
||||
.json::<serde_json::Value>()
|
||||
.await
|
||||
}
|
||||
|
Loading…
Reference in a new issue