Skip to content

Commit

Permalink
refactor(feed): improve structure & compatibility (#29)
Browse files Browse the repository at this point in the history
* chore(crates): init feed crate

* refactor(feed): move feed impl to crate

* fix: format code

* chore(feed): add unit test

* fix(feed): invalid feed url

* fix: format code

* refactor(feed): read entry links

* refactor(feed): move from_entry to impl
  • Loading branch information
kwaa authored May 11, 2024
1 parent 272a9e3 commit 10c8145
Show file tree
Hide file tree
Showing 22 changed files with 318 additions and 238 deletions.
24 changes: 20 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ members = [
"crates/backend",
"crates/db_migration",
"crates/db_schema",
"crates/feed",
"crates/nodeinfo",
"crates/openapi",
"crates/scheduler",
Expand All @@ -64,6 +65,7 @@ hatsu_apub = { path = "./crates/apub" }
hatsu_backend = { path = "./crates/backend" }
hatsu_db_migration = { path = "./crates/db_migration" }
hatsu_db_schema = { path = "./crates/db_schema" }
hatsu_feed = { path = "./crates/feed" }
hatsu_nodeinfo = { path = "./crates/nodeinfo" }
hatsu_openapi = { path = "./crates/openapi" }
hatsu_scheduler = { path = "./crates/scheduler" }
Expand Down
3 changes: 1 addition & 2 deletions crates/apub/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ path = "src/lib.rs"

[dependencies]
hatsu_db_schema = { workspace = true }
hatsu_feed = { workspace = true }
hatsu_utils = { workspace = true }
activitypub_federation = { workspace = true }
anyhow = { workspace = true }
Expand All @@ -25,9 +26,7 @@ async-trait = { workspace = true }
axum = { workspace = true }
chrono = { workspace = true }
enum_delegate = { workspace = true }
feed-rs = { workspace = true }
futures = { workspace = true }
reqwest = { workspace = true }
sea-orm = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
Expand Down
94 changes: 0 additions & 94 deletions crates/apub/src/actors/db_user_feed_item_impl.rs

This file was deleted.

23 changes: 12 additions & 11 deletions crates/apub/src/actors/db_user_impl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,30 +8,31 @@ use activitypub_federation::{
traits::{ActivityHandler, Actor},
};
use hatsu_db_schema::{prelude::ReceivedFollow, user::Model as DbUser};
use hatsu_utils::{user::feed::Feed, AppData, AppError};
use hatsu_feed::SiteFeed;
use hatsu_utils::{AppData, AppError};
use sea_orm::ModelTrait;
use serde::Serialize;
use url::Url;

use super::{ApubUser, JsonUserFeed};
use super::ApubUser;

impl ApubUser {
pub async fn new(domain: &str, preferred_username: &str) -> Result<Self, AppError> {
let keypair = generate_actor_keypair()?;

let user_feed = Feed::get_site_feed(preferred_username.to_string()).await?;
let site_feed = SiteFeed::get(preferred_username.to_string()).await?;

let feed = JsonUserFeed::get_feed(user_feed.clone(), preferred_username).await?;
let user_feed = SiteFeed::get_user_feed(site_feed.clone(), preferred_username).await?;

let user_url = hatsu_utils::url::generate_user_url(domain, preferred_username)?;

let user = DbUser {
id: user_url.to_string(),
name: feed.title,
name: user_feed.title,
preferred_username: preferred_username.to_string(),
summary: feed.description,
icon: feed.icon.map(|url| url.to_string()),
image: feed
summary: user_feed.description,
icon: user_feed.icon.map(|url| url.to_string()),
image: user_feed
.hatsu
.and_then(|hatsu| hatsu.banner_image.map(|url| url.to_string())),
inbox: user_url
Expand All @@ -49,9 +50,9 @@ impl ApubUser {
local: true,
public_key: keypair.public_key,
private_key: Some(keypair.private_key),
feed_json: user_feed.json.map(|url| url.to_string()),
feed_atom: user_feed.atom.map(|url| url.to_string()),
feed_rss: user_feed.rss.map(|url| url.to_string()),
feed_json: site_feed.json.map(|url| url.to_string()),
feed_atom: site_feed.atom.map(|url| url.to_string()),
feed_rss: site_feed.rss.map(|url| url.to_string()),
last_refreshed_at: hatsu_utils::date::now(),
};

Expand Down
3 changes: 0 additions & 3 deletions crates/apub/src/actors/mod.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
mod db_user;
mod db_user_feed_item;
mod db_user_feed_item_impl;
mod db_user_impl;
mod service;

pub use db_user::ApubUser;
pub use db_user_feed_item::{ApubUserFeedItem, JsonUserFeed, JsonUserFeedHatsu, JsonUserFeedItem};
pub use service::{PublicKeySchema, Service, ServiceAttachment, ServiceImage};
11 changes: 6 additions & 5 deletions crates/apub/src/objects/note.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use activitypub_federation::{
traits::{Actor, Object},
};
use hatsu_db_schema::prelude::Post;
use hatsu_feed::UserFeedItem;
use hatsu_utils::{markdown::markdown_to_html, AppData, AppError};
use sea_orm::EntityTrait;
use serde::{Deserialize, Serialize};
Expand All @@ -17,7 +18,7 @@ use url::Url;
use utoipa::ToSchema;

use crate::{
actors::{ApubUser, JsonUserFeedItem},
actors::ApubUser,
links::{Hashtag, Tag},
objects::ApubPost,
};
Expand Down Expand Up @@ -64,7 +65,7 @@ impl Note {
// https://example.com/foo/bar => https://example.com/foo/bar
// /foo/bar => https://example.com/foo/bar
// foo/bar => https://example.com/foo/bar
pub fn parse_id(actor: &ApubUser, json: &JsonUserFeedItem) -> Result<Url, AppError> {
pub fn parse_id(actor: &ApubUser, json: &UserFeedItem) -> Result<Url, AppError> {
if let Some(url) = &json.url {
Ok(url.clone())
} else {
Expand All @@ -77,7 +78,7 @@ impl Note {

pub fn new(
actor: &ApubUser,
json: JsonUserFeedItem,
json: UserFeedItem,
published: Option<String>,
updated: Option<String>,
data: &Data<AppData>,
Expand Down Expand Up @@ -177,15 +178,15 @@ impl Note {

pub fn create(
actor: &ApubUser,
json: JsonUserFeedItem,
json: UserFeedItem,
data: &Data<AppData>,
) -> Result<Self, AppError> {
Self::new(actor, json, Some(hatsu_utils::date::now()), None, data)
}

pub fn update(
actor: &ApubUser,
json: JsonUserFeedItem,
json: UserFeedItem,
published: String,
data: &Data<AppData>,
) -> Result<Self, AppError> {
Expand Down
32 changes: 32 additions & 0 deletions crates/feed/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
[package]
name = "hatsu_feed"
version.workspace = true
edition.workspace = true
publish.workspace = true
readme.workspace = true
license.workspace = true
authors.workspace = true
description.workspace = true
documentation.workspace = true
homepage.workspace = true
repository.workspace = true

[lib]
name = "hatsu_feed"
path = "src/lib.rs"

[dependencies]
hatsu_db_schema = { workspace = true }
hatsu_utils = { workspace = true }
activitypub_federation = { workspace = true }
async-recursion = { workspace = true }
chrono = { workspace = true }
feed-rs = { workspace = true }
reqwest = { workspace = true }
scraper = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
url = { workspace = true }

[dev-dependencies]
tokio = { workspace = true }
7 changes: 7 additions & 0 deletions crates/feed/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
mod site_feed;
mod user_feed;
mod user_feed_item;

pub use site_feed::SiteFeed;
pub use user_feed::{UserFeed, UserFeedHatsu};
pub use user_feed_item::{UserFeedItem, WrappedUserFeedItem};
28 changes: 23 additions & 5 deletions crates/utils/src/user/feed.rs → crates/feed/src/site_feed.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
use hatsu_utils::{url::absolutize_relative_url, AppError};
use scraper::{ElementRef, Html, Selector};
use serde::{Deserialize, Serialize};
use url::Url;

use crate::{url::absolutize_relative_url, AppError};
use crate::UserFeed;

/// User Site Feed
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Feed {
pub struct SiteFeed {
#[serde(skip_serializing_if = "Option::is_none")]
pub json: Option<Url>,
#[serde(skip_serializing_if = "Option::is_none")]
Expand All @@ -15,11 +15,11 @@ pub struct Feed {
pub rss: Option<Url>,
}

impl Feed {
impl SiteFeed {
/// # Panics
///
/// No panic here.
pub async fn get_site_feed(domain: String) -> Result<Self, AppError> {
pub async fn get(domain: String) -> Result<Self, AppError> {
fn feed_auto_discovery(head: &ElementRef, domain: &str, kind: &str) -> Option<Url> {
head.select(
&Selector::parse(&format!("link[rel=\"alternate\"][type=\"{kind}\"]")).unwrap(),
Expand Down Expand Up @@ -54,4 +54,22 @@ impl Feed {
},
)
}

pub async fn get_user_feed(site_feed: Self, name: &str) -> Result<UserFeed, AppError> {
match site_feed {
Self {
json: Some(url), ..
} => Ok(UserFeed::parse_json_feed(url).await?),
Self {
atom: Some(url), ..
} => Ok(UserFeed::parse_xml_feed(url).await?),
Self { rss: Some(url), .. } => Ok(UserFeed::parse_xml_feed(url).await?),
Self {
json: None,
atom: None,
rss: None,
..
} => Err(AppError::not_found("Feed Url", name)),
}
}
}
Loading

0 comments on commit 10c8145

Please sign in to comment.