Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

44 add cors handling #50

Merged
merged 6 commits into from
Oct 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
299 changes: 296 additions & 3 deletions Cargo.lock

Large diffs are not rendered by default.

20 changes: 16 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "mangadex-desktop-api2"
version = "0.5.6"
version = "0.5.7"
authors = ["tonymushah <tonymushahdev06@yahoo.com>"]
license = "MIT OR Apache-2.0"
description = "A Actix server for downloading manga, chapters, covers from Mangadex"
Expand All @@ -23,20 +23,32 @@ url = { version = "2.4.1", features = ["serde"] }
mangadex-api = { version = "2.3.1", default-features = false, features = ["multi-thread", "utils", "deserializable-endpoint"] }
anyhow = "1.0.75"
serde_json = "1.0.107"
tokio = { version = "1.32.0", features = ["full"] }
tokio = { version = "1.32.0", features = ["sync", "rt", "macros", "rt-multi-thread"] }
actix-web = { version = "4" }
mangadex-api-schema-rust = { version = "0.4.1", default-features = false, features = ["serialize"] }
mangadex-api-types-rust = { version = "0.4", default-features = false }
log = "^0.4.20"
once_cell = "1.18.0"
fern = "0.6.2"
fern = {version = "0.6.2", optional = true}
futures = { version = "0.3.28" }
tokio-stream = { version = "0.1"}
async-stream = { version = "0.3"}
bytes = { version = "1.5"}
itertools = "0.11.0"
serde_qs = { version = "0", features = ["actix4"]}
async-trait = "0.1"
actix-cors = "0.6.4"

[dev-dependencies]
criterion = { version = "0.5", features = ["async_tokio"]}

[features]
default = []
use_fern = ["dep:fern"]

[[bin]]
name = "special-eureka-downloader"
required-features = ["use_fern"]

[[bench]]
name = "aggregate"
harness = false
45 changes: 45 additions & 0 deletions benches/aggregate.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
use std::sync::Arc;

use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use mangadex_desktop_api2::{settings::files_dirs::DirsOptions, utils::manga::MangaUtils};

async fn aggregate(manga_id: uuid::Uuid) {
let manga_utils = MangaUtils::new(Arc::new(DirsOptions::new().unwrap()), Default::default())
.with_id(manga_id.to_string());
serde_json::to_string(&(manga_utils.aggregate_manga_chapters().await.unwrap())).unwrap();
}

async fn aggregate_stream(manga_id: uuid::Uuid) {
let manga_utils = MangaUtils::new(Arc::new(DirsOptions::new().unwrap()), Default::default())
.with_id(manga_id.to_string());
serde_json::to_string(
&(manga_utils
.aggregate_manga_chapters_async_friendly()
.await
.unwrap()),
)
.unwrap();
}

fn criterion_benchmark(c: &mut Criterion) {
let runtime = tokio::runtime::Runtime::new().unwrap();
let manga_id = uuid::Uuid::try_parse("1c8f0358-d663-4d60-8590-b5e82890a1e3").unwrap();

c.bench_with_input(
BenchmarkId::new("aggregate", manga_id),
&manga_id,
|b, &s| {
b.to_async(&runtime).iter_batched(|| s, aggregate, criterion::BatchSize::LargeInput);
},
);
c.bench_with_input(
BenchmarkId::new("aggregate_stream", manga_id),
&manga_id,
|b, &s| {
b.to_async(&runtime).iter_batched(|| s, aggregate_stream, criterion::BatchSize::LargeInput);
},
);
}

criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
File renamed without changes.
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use log::{info, warn};
use server::AppState;
mod r#core;

pub use r#core::{Error, ErrorType};
pub use crate::r#core::{Error, ErrorType};

pub mod download;
#[cfg(feature = "feeds")]
Expand Down
5 changes: 4 additions & 1 deletion src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use crate::methods::put::{
download_cover, download_cover_quality, download_manga_by_id, download_manga_cover,
download_manga_cover_quality, download_manga_covers,
};
use actix_cors::Cors;
use actix_web::body::MessageBody;
use actix_web::dev::{self, Server, ServiceFactory, ServiceRequest, ServiceResponse};
use actix_web::http::header::{self};
Expand Down Expand Up @@ -93,10 +94,12 @@ pub fn get_actix_app(
InitError = (),
> + 'static,
> {
let cors = Cors::default().allow_any_origin().send_wildcard();
App::new()
.app_data(app_state)
.wrap(ErrorHandlers::new().handler(StatusCode::NOT_FOUND, not_found_message))
.wrap(ErrorHandlers::new().handler(StatusCode::METHOD_NOT_ALLOWED, not_allowed_message))
.wrap(cors)
/*
get Methods
*/
Expand Down Expand Up @@ -152,6 +155,6 @@ pub fn launch_async_server(
Ok(
HttpServer::new(move || get_actix_app(app_state_ref.clone()))
.bind((address, port))?
.run(),
.run()
)
}
21 changes: 12 additions & 9 deletions src/utils/manga.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,15 +41,10 @@ impl<'a> MangaUtils {
) -> ManagerCoreResult<bool> {
let chapter_utils: ChapterUtils = From::from(self);
let chapter: ApiObject<ChapterAttributes> = chapter_utils.with_id(chap_id).get_chapter()?;
let mut is = false;
for relas in chapter.relationships {
if relas.type_ == RelationshipType::Manga
&& relas.id.hyphenated().to_string() == manga_id
{
is = true;
}
}
Ok(is)

Ok(chapter.relationships.iter().any(|relas| {
relas.type_ == RelationshipType::Manga && relas.id.hyphenated().to_string() == manga_id
}))
}
pub(self) fn find_all_downloades_by_manga_id(
&'a self,
Expand Down Expand Up @@ -489,6 +484,14 @@ impl MangaUtilsWithMangaId {
volumes,
})
}
pub async fn aggregate_manga_chapters_async_friendly(&self) -> ManagerCoreResult<MangaAggregate>{
let data = Box::pin(self.get_all_downloaded_chapter_data().await?);
let volumes = super::manga_aggregate::stream::group_chapter_to_volume_aggregate(data).await?;
Ok(MangaAggregate {
result: ResultType::Ok,
volumes,
})
}
}

impl<'a> From<&'a MangaDownload> for MangaUtils {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@ use mangadex_api_schema_rust::{
ApiObject,
};

type ChapterHashMap = HashMap<String, Vec<ApiObject<ChapterAttributes>>>;
pub mod stream;

pub type ChapterHashMap = HashMap<String, Vec<ApiObject<ChapterAttributes>>>;

fn group_chapter_to_chapter_hash_map(input: Vec<ApiObject<ChapterAttributes>>) -> ChapterHashMap {
let mut data: ChapterHashMap = ChapterHashMap::new();
Expand Down
186 changes: 186 additions & 0 deletions src/utils/manga_aggregate/stream.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
use std::{cmp::Ordering, collections::HashMap, io::Result, vec};

use futures::Stream;
use mangadex_api_schema_rust::{
v5::{
manga_aggregate::{ChapterAggregate, VolumeAggregate},
ChapterAttributes,
},
ApiObject,
};
use tokio_stream::StreamExt;

pub type ChapterHashMap = HashMap<String, Vec<ApiObject<ChapterAttributes>>>;

fn group_chapter_to_chapter_hash_map(input: Vec<ApiObject<ChapterAttributes>>) -> ChapterHashMap {
let mut data: ChapterHashMap = ChapterHashMap::new();
for chap in input {
let chap_ = chap.clone();
let volume = match chap.attributes.chapter {
None => "none".to_string(),
Some(d) => d,
};
match data.get_mut(&volume) {
None => {
data.insert(volume, vec![chap_]);
}
Some(arr) => {
arr.push(chap_);
}
}
}
data
}

fn chap_hashmapentry_to_chapter_aggregate(
input: (String, Vec<ApiObject<ChapterAttributes>>),
) -> Result<ChapterAggregate> {
let id = match input.1.get(0) {
None => {
return Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
"The input vector is empty",
));
}
Some(d) => d.clone(),
};
let others: Vec<uuid::Uuid> = match input.1.get(1..(input.1.len())) {
None => Vec::new(),
Some(d) => d.iter().map(|d_| d_.id).collect(),
};
let chapter: ChapterAggregate = ChapterAggregate {
chapter: input.0,
id: id.id,
others,
count: input.1.len() as u32,
};
Ok(chapter)
}

fn group_chapter_to_chapter_aggregate(
input: Vec<ApiObject<ChapterAttributes>>,
) -> Result<Vec<ChapterAggregate>> {
let data = group_chapter_to_chapter_hash_map(input);
let mut returns: Vec<ChapterAggregate> = Vec::new();
for chunk in data {
returns.push(chap_hashmapentry_to_chapter_aggregate(chunk)?);
}
returns.sort_by(|a, b| {
let a_chp = match a.chapter.parse::<f32>() {
Ok(d) => d,
Err(_) => return Ordering::Equal,
};
let b_chp = match b.chapter.parse::<f32>() {
Ok(d) => d,
Err(_) => return Ordering::Equal,
};
a_chp.total_cmp(&b_chp)
});
Ok(returns)
}

fn chapter_volume_hashmap_entry_to_volume_aggregate(
(volume, chapters): (String, Vec<ApiObject<ChapterAttributes>>),
) -> Result<VolumeAggregate> {
let chapters = group_chapter_to_chapter_aggregate(chapters)?;
Ok(VolumeAggregate {
volume,
count: chapters.len() as u32,
chapters,
})
}

pub type ChapterVolumeHashMap = HashMap<String, Vec<ApiObject<ChapterAttributes>>>;
/// Convert an array of chapter to an HashMap with
/// - Key : volume number
/// - Value : The chapter Attributes
async fn group_chapter_to_volume_hash_map<I>(mut input: I) -> Result<ChapterVolumeHashMap>
where
I: Stream<Item = ApiObject<ChapterAttributes>> + Unpin,
{
let mut data: ChapterVolumeHashMap = ChapterVolumeHashMap::new();
while let Some(chap) = input.next().await {
let chap_ = chap.clone();
let volume = match chap.attributes.volume {
None => "none".to_string(),
Some(d) => d,
};
match data.get_mut(&volume) {
None => {
data.insert(volume, vec![chap_]);
}
Some(arr) => {
arr.push(chap_);
}
}
}
Ok(data)
}

pub async fn group_chapter_to_volume_aggregate<I>(input: I) -> Result<Vec<VolumeAggregate>>
where
I: Stream<Item = ApiObject<ChapterAttributes>> + Unpin,
{
let mut data: Vec<VolumeAggregate> = Vec::new();
for in_ in group_chapter_to_volume_hash_map(input).await? {
data.push(chapter_volume_hashmap_entry_to_volume_aggregate(in_)?);
}
data.sort_by(|a, b| {
let a = match a.volume.parse::<f32>() {
Ok(d) => d,
Err(_) => return Ordering::Equal,
};
let b = match b.volume.parse::<f32>() {
Ok(d) => d,
Err(_) => return Ordering::Equal,
};
a.total_cmp(&b)
});
Ok(data)
}

/*pub fn chapter_vec_to_chapter_aggregate_vec(input : Vec<ApiObject<ChapterAttributes>>) -> Result<()> {
ChapterAggregate{

}
Ok(())
}*/

#[cfg(test)]
mod tests {
use std::sync::Arc;

use crate::{settings::files_dirs::DirsOptions, utils::manga::MangaUtils};

use super::*;

#[tokio::test]
async fn test_to_volume_hash_map() {
let manga_id = "d58eb211-a1ae-426c-b504-fc88253de600".to_string();
let manga_utils =
MangaUtils::new(Arc::new(DirsOptions::new().unwrap()), Default::default());
let utils = manga_utils.with_id(manga_id);
let mut data = Box::pin(utils.get_all_downloaded_chapter_data().await.unwrap());

for (volume, chapters) in group_chapter_to_volume_hash_map(&mut data).await.unwrap() {
println!(
"\"{}\" : {}",
volume,
serde_json::to_string(&(group_chapter_to_chapter_aggregate(chapters).unwrap()))
.unwrap()
);
}
}
#[tokio::test]
async fn test_to_volume_aggregate() {
let manga_id = "1c8f0358-d663-4d60-8590-b5e82890a1e3".to_string();
let manga_utils =
MangaUtils::new(Arc::new(DirsOptions::new().unwrap()), Default::default())
.with_id(manga_id);
println!(
"{}",
serde_json::to_string(&(manga_utils.aggregate_manga_chapters_async_friendly().await.unwrap()))
.unwrap()
);
}
}