+
Skip to content
This repository was archived by the owner on Sep 17, 2024. It is now read-only.

feat/auto-update #562

Merged
merged 14 commits into from
Mar 13, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,14 @@ and `Removed`.

## [Unreleased]

### Added

- Addon updates are automatically checked for every 30 minutes while the program
is open. If new updates are available, they will be sorted to the top of the screen
- A new "Auto Update" setting can be enabled in the Addons section of the Settings.
When enabled, Ajour will automatically apply new addon updates when available
(new updates are checked for every 30 minutes)

### Changed

- Error messages are cleared when "Refresh" is pressed
Expand Down
8 changes: 7 additions & 1 deletion crates/core/src/addon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,12 @@ impl Addon {
self.repository = Some(repo_package);
}

pub fn set_remote_package_from_repo_package(&mut self, repo_package: &RepositoryPackage) {
if let Some(repo) = self.repository.as_mut() {
repo.metadata.remote_packages = repo_package.metadata.remote_packages.clone();
}
}

pub fn update_addon_folders(&mut self, mut folders: Vec<AddonFolder>) {
if !folders.is_empty() {
folders.sort_by(|a, b| a.id.cmp(&b.id));
Expand Down Expand Up @@ -237,7 +243,7 @@ impl Addon {
}
}

fn repository(&self) -> Option<&RepositoryPackage> {
pub fn repository(&self) -> Option<&RepositoryPackage> {
self.repository.as_ref()
}

Expand Down
3 changes: 3 additions & 0 deletions crates/core/src/config/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,9 @@ pub struct Config {

#[serde(default)]
pub catalog_source: Option<catalog::Source>,

#[serde(default)]
pub auto_update: bool,
}

impl Config {
Expand Down
163 changes: 10 additions & 153 deletions crates/core/src/parse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,17 @@ use crate::{
addon::{Addon, AddonFolder, AddonState},
cache::{self, AddonCache, AddonCacheEntry, ExternalReleaseId, FingerprintCache},
config::Flavor,
error::{CacheError, DownloadError, ParseError, RepositoryError},
error::{CacheError, DownloadError, ParseError},
fs::PersistentData,
murmur2::calculate_hash,
repository::{
curse, townlongyak, tukui, wowi, RepositoryIdentifiers, RepositoryKind, RepositoryPackage,
curse, git, townlongyak, tukui, wowi, RepositoryIdentifiers, RepositoryKind,
RepositoryPackage,
},
utility::format_interface_into_game_version,
};
use async_std::sync::{Arc, Mutex};
use fancy_regex::Regex;
use futures::future::join_all;
use isahc::http::Uri;
use once_cell::sync::Lazy;
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -521,61 +520,8 @@ async fn get_all_repo_packages(
}

// Get all curse repo packages
let curse_repo_packages = if !curse_ids.is_empty() {
let mut curse_packages = curse::fetch_remote_packages_by_ids(&curse_ids).await?;

let mut curse_repo_packages = vec![];

// Get repo packages from fingerprint exact matches
curse_repo_packages.extend(
fingerprint_info
.exact_matches
.iter()
.map(|info| {
(
info.id.to_string(),
curse::metadata_from_fingerprint_info(flavor, info),
)
})
.filter_map(|(id, metadata)| {
RepositoryPackage::from_repo_id(flavor, RepositoryKind::Curse, id)
.map(|r| r.with_metadata(metadata))
.ok()
}),
);

// Remove any packages that match a fingerprint entry and update missing
// metadata fields with that package info
curse_repo_packages.iter_mut().for_each(|r| {
if let Some(idx) = curse_packages.iter().position(|p| p.id.to_string() == r.id) {
let package = curse_packages.remove(idx);

r.metadata.title = Some(package.name.clone());
r.metadata.website_url = Some(package.website_url.clone());
r.metadata.changelog_url = Some(format!("{}/files", package.website_url));
}
});

curse_repo_packages.extend(
curse_packages
.into_iter()
.map(|package| {
(
package.id.to_string(),
curse::metadata_from_curse_package(flavor, package),
)
})
.filter_map(|(id, metadata)| {
RepositoryPackage::from_repo_id(flavor, RepositoryKind::Curse, id)
.map(|r| r.with_metadata(metadata))
.ok()
}),
);

curse_repo_packages
} else {
vec![]
};
let curse_repo_packages =
curse::batch_fetch_repo_packages(flavor, &curse_ids, Some(fingerprint_info)).await?;

log::debug!(
"{} - {} curse packages fetched",
Expand All @@ -584,26 +530,7 @@ async fn get_all_repo_packages(
);

// Get all tukui repo packages
let tukui_repo_packages = if !tukui_ids.is_empty() {
let fetch_tasks: Vec<_> = tukui_ids
.iter()
.map(|id| tukui::fetch_remote_package(&id, &flavor))
.collect();

join_all(fetch_tasks)
.await
.into_iter()
.filter_map(Result::ok)
.map(|(id, package)| (id, tukui::metadata_from_tukui_package(package)))
.filter_map(|(id, metadata)| {
RepositoryPackage::from_repo_id(flavor, RepositoryKind::Tukui, id)
.ok()
.map(|r| r.with_metadata(metadata))
})
.collect::<Vec<_>>()
} else {
vec![]
};
let tukui_repo_packages = tukui::batch_fetch_repo_packages(flavor, &tukui_ids).await?;

log::debug!(
"{} - {} tukui packages fetched",
Expand All @@ -612,26 +539,7 @@ async fn get_all_repo_packages(
);

// Get all wowi repo packages
let wowi_repo_packages = if !wowi_ids.is_empty() {
let wowi_packages = wowi::fetch_remote_packages(&wowi_ids).await?;

wowi_packages
.into_iter()
.map(|package| {
(
package.id.to_string(),
wowi::metadata_from_wowi_package(package),
)
})
.filter_map(|(id, metadata)| {
RepositoryPackage::from_repo_id(flavor, RepositoryKind::WowI, id)
.ok()
.map(|r| r.with_metadata(metadata))
})
.collect::<Vec<_>>()
} else {
vec![]
};
let wowi_repo_packages = wowi::batch_fetch_repo_packages(flavor, &wowi_ids).await?;

log::debug!(
"{} - {} wowi packages fetched",
Expand All @@ -640,26 +548,8 @@ async fn get_all_repo_packages(
);

// Get all townlong repo packages
let townlong_repo_packages = if !townlong_ids.is_empty() {
let townlong_packages = townlongyak::fetch_remote_packages(flavor, &townlong_ids).await?;

townlong_packages
.into_iter()
.map(|package| {
(
package.id.to_string(),
townlongyak::metadata_from_townlong_package(flavor, package),
)
})
.filter_map(|(id, metadata)| {
RepositoryPackage::from_repo_id(flavor, RepositoryKind::TownlongYak, id)
.ok()
.map(|r| r.with_metadata(metadata))
})
.collect::<Vec<_>>()
} else {
vec![]
};
let townlong_repo_packages =
townlongyak::batch_fetch_repo_packages(flavor, &townlong_ids).await?;

log::debug!(
"{} - {} townlong packages fetched",
Expand All @@ -668,40 +558,7 @@ async fn get_all_repo_packages(
);

// Get all git repo packages
let git_repo_packages = if !git_urls.is_empty() {
let fetch_tasks = git_urls
.iter()
.map(|url| {
let url = url
.parse::<Uri>()
.map_err(|_| RepositoryError::GitInvalidUrl { url: url.clone() })?;

RepositoryPackage::from_source_url(flavor, url)
})
.filter_map(|result| match result {
Ok(package) => Some(package),
Err(e) => {
log::error!("{}", e);
None
}
})
.map(|mut package| async {
if let Err(e) = package.resolve_metadata().await {
log::error!("{}", e);
Err(e)
} else {
Ok(package)
}
});

join_all(fetch_tasks)
.await
.into_iter()
.filter_map(Result::ok)
.collect::<Vec<_>>()
} else {
vec![]
};
let git_repo_packages = git::batch_fetch_repo_packages(flavor, &git_urls).await?;

log::debug!(
"{} - {} git packages fetched",
Expand Down
66 changes: 65 additions & 1 deletion crates/core/src/repository/backend/curse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use super::*;
use crate::config::Flavor;
use crate::error::DownloadError;
use crate::network::{post_json_async, request_async};
use crate::repository::{ReleaseChannel, RemotePackage};
use crate::repository::{ReleaseChannel, RemotePackage, RepositoryKind, RepositoryPackage};
use crate::utility::{regex_html_tags_to_newline, regex_html_tags_to_space, truncate};

use async_trait::async_trait;
Expand Down Expand Up @@ -172,6 +172,70 @@ pub(crate) fn metadata_from_fingerprint_info(
metadata
}

pub(crate) async fn batch_fetch_repo_packages(
flavor: Flavor,
curse_ids: &[i32],
fingerprint_info: Option<&FingerprintInfo>,
) -> Result<Vec<RepositoryPackage>, DownloadError> {
let mut curse_repo_packages = vec![];

if curse_ids.is_empty() {
return Ok(curse_repo_packages);
}

let mut curse_packages = curse::fetch_remote_packages_by_ids(&curse_ids).await?;

if let Some(fingerprint_info) = fingerprint_info {
// Get repo packages from fingerprint exact matches
curse_repo_packages.extend(
fingerprint_info
.exact_matches
.iter()
.map(|info| {
(
info.id.to_string(),
curse::metadata_from_fingerprint_info(flavor, info),
)
})
.filter_map(|(id, metadata)| {
RepositoryPackage::from_repo_id(flavor, RepositoryKind::Curse, id)
.map(|r| r.with_metadata(metadata))
.ok()
}),
);

// Remove any packages that match a fingerprint entry and update missing
// metadata fields with that package info
curse_repo_packages.iter_mut().for_each(|r| {
if let Some(idx) = curse_packages.iter().position(|p| p.id.to_string() == r.id) {
let package = curse_packages.remove(idx);

r.metadata.title = Some(package.name.clone());
r.metadata.website_url = Some(package.website_url.clone());
r.metadata.changelog_url = Some(format!("{}/files", package.website_url));
}
});
}

curse_repo_packages.extend(
curse_packages
.into_iter()
.map(|package| {
(
package.id.to_string(),
curse::metadata_from_curse_package(flavor, package),
)
})
.filter_map(|(id, metadata)| {
RepositoryPackage::from_repo_id(flavor, RepositoryKind::Curse, id)
.map(|r| r.with_metadata(metadata))
.ok()
}),
);

Ok(curse_repo_packages)
}

pub(crate) async fn fetch_remote_packages_by_fingerprint(
fingerprints: &[u32],
) -> Result<FingerprintInfo, DownloadError> {
Expand Down
52 changes: 52 additions & 0 deletions crates/core/src/repository/backend/git.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,58 @@
use crate::config::Flavor;
use crate::error::{DownloadError, RepositoryError};
use crate::repository::RepositoryPackage;

use futures::future::join_all;
use isahc::http::Uri;

pub use github::Github;
pub use gitlab::Gitlab;

pub(crate) async fn batch_fetch_repo_packages(
flavor: Flavor,
git_urls: &[String],
) -> Result<Vec<RepositoryPackage>, DownloadError> {
let mut git_repo_packages = vec![];

if git_urls.is_empty() {
return Ok(git_repo_packages);
}

let fetch_tasks = git_urls
.iter()
.map(|url| {
let url = url
.parse::<Uri>()
.map_err(|_| RepositoryError::GitInvalidUrl { url: url.clone() })?;

RepositoryPackage::from_source_url(flavor, url)
})
.filter_map(|result| match result {
Ok(package) => Some(package),
Err(e) => {
log::error!("{}", e);
None
}
})
.map(|mut package| async {
if let Err(e) = package.resolve_metadata().await {
log::error!("{}", e);
Err(e)
} else {
Ok(package)
}
});

git_repo_packages.extend(
join_all(fetch_tasks)
.await
.into_iter()
.filter_map(Result::ok),
);

Ok(git_repo_packages)
}

mod github {
use crate::config::Flavor;
use crate::error::RepositoryError;
Expand Down
Loading
点击 这是indexloc提供的php浏览器服务,不要输入任何密码和下载