[WIP] Implement parallel aur install
Signed-off-by: trivernis <trivernis@protonmail.com>i18n
parent
362c2cf6ea
commit
bef4fbcb02
@ -0,0 +1,69 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::internal::{
|
||||
commands::ShellCommand,
|
||||
error::{AppError, AppResult},
|
||||
};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct GitCloneBuilder {
|
||||
url: String,
|
||||
directory: PathBuf,
|
||||
}
|
||||
|
||||
impl GitCloneBuilder {
|
||||
pub fn url<S: ToString>(mut self, url: S) -> Self {
|
||||
self.url = url.to_string();
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn directory<P: AsRef<Path>>(mut self, path: P) -> Self {
|
||||
self.directory = path.as_ref().into();
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn clone(self) -> AppResult<()> {
|
||||
let result = ShellCommand::git()
|
||||
.arg("clone")
|
||||
.arg(self.url)
|
||||
.arg(self.directory)
|
||||
.wait_with_output()
|
||||
.await?;
|
||||
|
||||
if result.status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(AppError::Other(result.stderr))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct GitPullBuilder {
|
||||
directory: PathBuf,
|
||||
}
|
||||
|
||||
impl GitPullBuilder {
|
||||
pub fn directory<P: AsRef<Path>>(mut self, path: P) -> Self {
|
||||
self.directory = path.as_ref().into();
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn pull(self) -> AppResult<()> {
|
||||
let result = ShellCommand::git()
|
||||
.arg("-C")
|
||||
.arg(self.directory)
|
||||
.arg("pull")
|
||||
.wait_with_output()
|
||||
.await?;
|
||||
|
||||
if result.status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(AppError::Other(result.stderr))
|
||||
}
|
||||
}
|
||||
}
|
@ -1 +1,2 @@
|
||||
pub mod git;
|
||||
pub mod pacman;
|
||||
|
@ -0,0 +1,207 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use aur_rpc::PackageInfo;
|
||||
use futures::future;
|
||||
|
||||
use crate::builder::pacman::PacmanSearchBuilder;
|
||||
|
||||
use super::error::AppResult;
|
||||
use lazy_regex::regex;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DependencyInformation {
|
||||
pub depends: DependencyCollection,
|
||||
pub make_depends: DependencyCollection,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct DependencyCollection {
|
||||
pub aur: Vec<PackageInfo>,
|
||||
pub repo: Vec<String>,
|
||||
pub not_found: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Dependency {
|
||||
pub name: String,
|
||||
#[allow(unused)]
|
||||
pub condition: Option<Condition>,
|
||||
#[allow(unused)]
|
||||
pub version: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Condition {
|
||||
Gt,
|
||||
Ge,
|
||||
Eq,
|
||||
Le,
|
||||
Lt,
|
||||
}
|
||||
|
||||
impl Condition {
|
||||
pub fn try_from_str(s: &str) -> Option<Self> {
|
||||
match s {
|
||||
"=" => Some(Self::Eq),
|
||||
"<=" => Some(Self::Le),
|
||||
">=" => Some(Self::Ge),
|
||||
">" => Some(Self::Gt),
|
||||
"<" => Some(Self::Lt),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DependencyInformation {
|
||||
/// Resolves all dependency information for a given package
|
||||
#[tracing::instrument(level = "trace")]
|
||||
pub async fn for_package(package: &PackageInfo) -> AppResult<Self> {
|
||||
let make_depends = Self::resolve_make_depends(package).await?;
|
||||
let depends = Self::resolve_depends(package).await?;
|
||||
|
||||
Ok(Self {
|
||||
depends,
|
||||
make_depends,
|
||||
})
|
||||
}
|
||||
|
||||
/// Resolves all make dependencies for a package
|
||||
#[tracing::instrument(level = "trace")]
|
||||
async fn resolve_make_depends(package: &PackageInfo) -> AppResult<DependencyCollection> {
|
||||
let mut packages_to_resolve: HashSet<String> = package
|
||||
.make_depends
|
||||
.iter()
|
||||
.filter_map(Self::map_dep_to_name)
|
||||
.collect();
|
||||
let mut already_searched = HashSet::new();
|
||||
let mut dependencies = DependencyCollection::default();
|
||||
|
||||
while !packages_to_resolve.is_empty() {
|
||||
already_searched.extend(packages_to_resolve.iter().cloned());
|
||||
Self::extend_by_repo_packages(&mut packages_to_resolve, &mut dependencies).await?;
|
||||
|
||||
let mut aur_packages = aur_rpc::info(&packages_to_resolve).await?;
|
||||
aur_packages.iter().for_each(|p| {
|
||||
packages_to_resolve.remove(&p.metadata.name);
|
||||
});
|
||||
let not_found = std::mem::take(&mut packages_to_resolve);
|
||||
|
||||
dependencies
|
||||
.not_found
|
||||
.append(&mut not_found.into_iter().collect());
|
||||
|
||||
packages_to_resolve = Self::get_filtered_make_depends(&aur_packages, &already_searched);
|
||||
dependencies.aur.append(&mut aur_packages);
|
||||
}
|
||||
|
||||
Ok(dependencies)
|
||||
}
|
||||
|
||||
/// Resolves all dependencies for a package
|
||||
#[tracing::instrument(level = "trace")]
|
||||
async fn resolve_depends(package: &PackageInfo) -> AppResult<DependencyCollection> {
|
||||
let mut packages_to_resolve: HashSet<String> = package
|
||||
.depends
|
||||
.iter()
|
||||
.filter_map(Self::map_dep_to_name)
|
||||
.collect();
|
||||
let mut already_searched = HashSet::new();
|
||||
let mut dependencies = DependencyCollection::default();
|
||||
|
||||
while !packages_to_resolve.is_empty() {
|
||||
already_searched.extend(packages_to_resolve.iter().cloned());
|
||||
Self::extend_by_repo_packages(&mut packages_to_resolve, &mut dependencies).await?;
|
||||
|
||||
let mut aur_packages = aur_rpc::info(&packages_to_resolve).await?;
|
||||
aur_packages.iter().for_each(|p| {
|
||||
packages_to_resolve.remove(&p.metadata.name);
|
||||
});
|
||||
let not_found = std::mem::take(&mut packages_to_resolve);
|
||||
|
||||
dependencies
|
||||
.not_found
|
||||
.append(&mut not_found.into_iter().collect());
|
||||
|
||||
packages_to_resolve = Self::get_filtered_depends(&aur_packages, &already_searched);
|
||||
dependencies.aur.append(&mut aur_packages);
|
||||
}
|
||||
|
||||
Ok(dependencies)
|
||||
}
|
||||
|
||||
async fn extend_by_repo_packages(
|
||||
to_resolve: &mut HashSet<String>,
|
||||
dependencies: &mut DependencyCollection,
|
||||
) -> AppResult<()> {
|
||||
let repo_deps = Self::find_repo_packages(to_resolve.clone()).await?;
|
||||
to_resolve.retain(|p| !repo_deps.contains(p));
|
||||
dependencies
|
||||
.repo
|
||||
.append(&mut repo_deps.into_iter().collect());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_filtered_make_depends(
|
||||
aur_packages: &Vec<PackageInfo>,
|
||||
searched: &HashSet<String>,
|
||||
) -> HashSet<String> {
|
||||
aur_packages
|
||||
.iter()
|
||||
.flat_map(|p| p.make_depends.iter().filter_map(Self::map_dep_to_name))
|
||||
.filter(|d| !searched.contains(d))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn get_filtered_depends(
|
||||
aur_packages: &Vec<PackageInfo>,
|
||||
searched: &HashSet<String>,
|
||||
) -> HashSet<String> {
|
||||
aur_packages
|
||||
.iter()
|
||||
.flat_map(|p| p.depends.iter().filter_map(Self::map_dep_to_name))
|
||||
.filter(|d| !searched.contains(d))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn map_dep_to_name(dep: &String) -> Option<String> {
|
||||
Dependency::try_from_str(dep).map(|d| d.name)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace")]
|
||||
async fn find_repo_packages(pkg_names: HashSet<String>) -> AppResult<HashSet<String>> {
|
||||
let repo_searches = pkg_names.iter().cloned().map(|p| async {
|
||||
let search_result = PacmanSearchBuilder::default().query(&p).search().await?;
|
||||
AppResult::Ok((p, search_result))
|
||||
});
|
||||
let repo_deps = future::try_join_all(repo_searches).await?;
|
||||
let repo_deps: HashSet<String> = repo_deps
|
||||
.into_iter()
|
||||
.filter_map(|(p, found)| if found { Some(p) } else { None })
|
||||
.collect();
|
||||
|
||||
Ok(repo_deps)
|
||||
}
|
||||
}
|
||||
|
||||
impl Dependency {
|
||||
#[tracing::instrument(level = "trace")]
|
||||
pub fn try_from_str(s: &str) -> Option<Self> {
|
||||
let r =
|
||||
regex!(r#"^(?P<name>[\w\-]+)((?P<condition><=|=|>=|>|<)(?P<version>\d+(\.\d+)*))?$"#);
|
||||
let caps = r.captures(s)?;
|
||||
let name = caps["name"].to_string();
|
||||
let condition = caps
|
||||
.name("condition")
|
||||
.map(|c| c.as_str())
|
||||
.and_then(Condition::try_from_str);
|
||||
let version = caps.name("version").map(|v| v.as_str().into());
|
||||
tracing::debug!("Parsed dependency to {name} {condition:?} {version:?}");
|
||||
|
||||
Some(Dependency {
|
||||
name,
|
||||
condition,
|
||||
version,
|
||||
})
|
||||
}
|
||||
}
|
@ -1 +0,0 @@
|
||||
|
@ -1,173 +1,130 @@
|
||||
use async_recursion::async_recursion;
|
||||
use aur_rpc::PackageInfo;
|
||||
use crossterm::style::Stylize;
|
||||
use futures::future;
|
||||
use indicatif::ProgressBar;
|
||||
use std::env;
|
||||
use std::env::set_current_dir;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::fs;
|
||||
|
||||
use crate::builder::git::{GitCloneBuilder, GitPullBuilder};
|
||||
use crate::internal::commands::ShellCommand;
|
||||
use crate::internal::error::SilentUnwrap;
|
||||
use crate::internal::dependencies::DependencyInformation;
|
||||
use crate::internal::error::{AppError, AppResult, SilentUnwrap};
|
||||
use crate::internal::exit_code::AppExitCode;
|
||||
use crate::internal::rpc::rpcinfo;
|
||||
use crate::internal::rpc::{self, rpcinfo};
|
||||
use crate::internal::utils::get_cache_dir;
|
||||
use crate::logging::get_logger;
|
||||
use crate::{crash, internal::fs_utils::rmdir_recursive, prompt, Options};
|
||||
|
||||
/// Installs a given list of packages from the aur
|
||||
#[tracing::instrument(level = "trace")]
|
||||
#[async_recursion]
|
||||
pub async fn aur_install(packages: Vec<String>, options: Options) {
|
||||
let url = crate::internal::rpc::URL;
|
||||
let cachedir = format!("{}/.cache/ame/", env::var("HOME").unwrap());
|
||||
let noconfirm = options.noconfirm;
|
||||
|
||||
tracing::debug!("Installing from AUR: {:?}", &packages);
|
||||
|
||||
tracing::info!("Installing packages {} from the AUR", packages.join(", "));
|
||||
|
||||
for package_name in packages {
|
||||
let rpcres = rpcinfo(&package_name)
|
||||
.await
|
||||
.silent_unwrap(AppExitCode::RpcError);
|
||||
|
||||
if rpcres.is_none() {
|
||||
break;
|
||||
}
|
||||
|
||||
let package = rpcres.unwrap();
|
||||
let pkg_name = package.metadata.name;
|
||||
|
||||
tracing::debug!("Cloning {} into cachedir", pkg_name);
|
||||
|
||||
tracing::info!("Cloning package source");
|
||||
|
||||
set_current_dir(Path::new(&cachedir)).unwrap();
|
||||
ShellCommand::git()
|
||||
.arg("clone")
|
||||
.arg(format!("{}/{}", url, pkg_name))
|
||||
.wait()
|
||||
.await
|
||||
.silent_unwrap(AppExitCode::GitError);
|
||||
|
||||
tracing::debug!(
|
||||
"Cloned {} into cachedir, moving on to resolving dependencies",
|
||||
pkg_name
|
||||
);
|
||||
tracing::debug!(
|
||||
"Raw dependencies for package {} are:\n{:?}",
|
||||
pkg_name,
|
||||
package.depends,
|
||||
);
|
||||
tracing::debug!(
|
||||
"Raw makedepends for package {} are:\n{:?}",
|
||||
pkg_name,
|
||||
package.make_depends.join(", "),
|
||||
let pb = get_logger().new_progress_spinner();
|
||||
pb.set_message("Fetching package information");
|
||||
|
||||
let package_info = aur_rpc::info(&packages)
|
||||
.await
|
||||
.map_err(AppError::from)
|
||||
.silent_unwrap(AppExitCode::RpcError);
|
||||
|
||||
tracing::debug!("package info = {package_info:?}");
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
|
||||
if package_info.len() != packages.len() {
|
||||
let mut not_found = packages.clone();
|
||||
package_info
|
||||
.iter()
|
||||
.for_each(|pkg| not_found.retain(|p| pkg.metadata.name != *p));
|
||||
crash!(
|
||||
AppExitCode::MissingDeps,
|
||||
"Could not find the package: {}",
|
||||
not_found.join(",").italic(),
|
||||
);
|
||||
}
|
||||
|
||||
// dep sorting
|
||||
tracing::debug!("Sorting dependencies");
|
||||
let sorted = crate::internal::sort(&package.depends, options).await;
|
||||
tracing::debug!("Sorting make dependencies");
|
||||
let md_sorted = crate::internal::sort(&package.make_depends, options).await;
|
||||
|
||||
tracing::debug!("Sorted dependencies for {} are:\n{:?}", pkg_name, &sorted);
|
||||
tracing::debug!("Sorted makedepends for {} are:\n{:?}", pkg_name, &md_sorted);
|
||||
|
||||
let newopts = Options {
|
||||
noconfirm,
|
||||
asdeps: true,
|
||||
};
|
||||
|
||||
if !sorted.nf.is_empty() || !md_sorted.nf.is_empty() {
|
||||
crash!(
|
||||
AppExitCode::MissingDeps,
|
||||
"Could not find dependencies {} for package {}, aborting",
|
||||
sorted.nf.join(", "),
|
||||
pkg_name,
|
||||
);
|
||||
}
|
||||
|
||||
if !noconfirm {
|
||||
let p1 = prompt!(default no,
|
||||
"Would you like to review {}'s PKGBUILD (and any .install files if present)?",
|
||||
pkg_name,
|
||||
);
|
||||
let editor: &str = &env::var("PAGER").unwrap_or_else(|_| "less".parse().unwrap());
|
||||
|
||||
if p1 {
|
||||
Command::new(editor)
|
||||
.arg(format!("{}/PKGBUILD", pkg_name))
|
||||
.spawn()
|
||||
.unwrap()
|
||||
.wait()
|
||||
.unwrap();
|
||||
|
||||
let status = ShellCommand::bash()
|
||||
.arg("-c")
|
||||
.arg(format!("ls {}/*.install &> /dev/null", pkg_name))
|
||||
.wait()
|
||||
.await
|
||||
.silent_unwrap(AppExitCode::Other);
|
||||
|
||||
if status.success() {
|
||||
ShellCommand::bash()
|
||||
.arg("-c")
|
||||
.arg(format!("{} {}/*.install", editor, pkg_name))
|
||||
.wait()
|
||||
.await
|
||||
.silent_unwrap(AppExitCode::Other);
|
||||
}
|
||||
|
||||
let p2 = prompt!(default yes, "Would you still like to install {}?", pkg_name);
|
||||
if !p2 {
|
||||
fs::remove_dir_all(format!("{}/{}", cachedir, pkg_name))
|
||||
.await
|
||||
.unwrap();
|
||||
crash!(AppExitCode::UserCancellation, "Not proceeding");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// dep installing
|
||||
tracing::info!("Moving on to install dependencies");
|
||||
|
||||
if !sorted.repo.is_empty() {
|
||||
crate::operations::install(sorted.repo, newopts).await;
|
||||
crate::operations::install(md_sorted.repo, newopts).await;
|
||||
}
|
||||
if !sorted.aur.is_empty() {
|
||||
crate::operations::aur_install(sorted.aur, newopts).await;
|
||||
crate::operations::aur_install(md_sorted.aur, newopts).await;
|
||||
}
|
||||
|
||||
let mut makepkg_args = vec!["-rsci", "--skippgp"];
|
||||
if options.asdeps {
|
||||
makepkg_args.push("--asdeps")
|
||||
}
|
||||
if options.noconfirm {
|
||||
makepkg_args.push("--noconfirm")
|
||||
}
|
||||
|
||||
// package building and installing
|
||||
tracing::info!("Building time!");
|
||||
set_current_dir(format!("{}/{}", cachedir, pkg_name)).unwrap();
|
||||
let status = ShellCommand::makepkg()
|
||||
.args(makepkg_args)
|
||||
.wait()
|
||||
.await
|
||||
.silent_unwrap(AppExitCode::MakePkgError);
|
||||
|
||||
if !status.success() {
|
||||
fs::remove_dir_all(format!("{}/{}", cachedir, pkg_name))
|
||||
.await
|
||||
.unwrap();
|
||||
crash!(
|
||||
AppExitCode::PacmanError,
|
||||
"Error encountered while installing {}, aborting",
|
||||
pkg_name,
|
||||
);
|
||||
}
|
||||
|
||||
set_current_dir(&cachedir).unwrap();
|
||||
let package_cache = PathBuf::from(format!("{cachedir}/{pkg_name}"));
|
||||
rmdir_recursive(&package_cache).await.unwrap()
|
||||
pb.finish_with_message("Found all packages in the aur");
|
||||
|
||||
get_logger().new_multi_progress();
|
||||
|
||||
future::try_join_all(package_info.iter().map(download_aur_source))
|
||||
.await
|
||||
.unwrap();
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
|
||||
let dependencies = future::try_join_all(package_info.iter().map(|pkg| async {
|
||||
get_logger()
|
||||
.new_progress_spinner()
|
||||
.set_message(format!("{}: Fetching dependencies", pkg.metadata.name));
|
||||
DependencyInformation::for_package(pkg).await
|
||||
}))
|
||||
.await
|
||||
.silent_unwrap(AppExitCode::RpcError);
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
|
||||
let aur_build_dependencies: Vec<PackageInfo> = dependencies
|
||||
.iter()
|
||||
.flat_map(|d| d.make_depends.aur.clone())
|
||||
.collect();
|
||||
|
||||
let aur_dependencies: Vec<PackageInfo> = dependencies
|
||||
.iter()
|
||||
.flat_map(|d| d.depends.aur.clone())
|
||||
.collect();
|
||||
|
||||
get_logger().reset_output_type();
|
||||
tracing::info!(
|
||||
"Installing {} build dependencies",
|
||||
aur_build_dependencies.len()
|
||||
);
|
||||
get_logger().new_multi_progress();
|
||||
|
||||
future::try_join_all(aur_build_dependencies.iter().map(download_aur_source))
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all)]
|
||||
async fn download_aur_source(info: &PackageInfo) -> AppResult<PathBuf> {
|
||||
let pb = get_logger().new_progress_spinner();
|
||||
let pkg_name = &info.metadata.name;
|
||||
pb.set_message(format!("{pkg_name}: Downloading sources"));
|
||||
|
||||
let cache_dir = get_cache_dir();
|
||||
let pkg_dir = cache_dir.join(&pkg_name);
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
|
||||
if pkg_dir.exists() {
|
||||
pb.set_message(format!("{pkg_name}: Pulling latest changes {pkg_name}"));
|
||||
GitPullBuilder::default().directory(&pkg_dir).pull().await?;
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
} else {
|
||||
let aur_url = rpc::URL;
|
||||
let repository_url = format!("{aur_url}/{pkg_name}");
|
||||
pb.set_message(format!("{pkg_name}: Cloning aur repository"));
|
||||
|
||||
GitCloneBuilder::default()
|
||||
.url(repository_url)
|
||||
.directory(&pkg_dir)
|
||||
.clone()
|
||||
.await?;
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
|
||||
pb.set_message(format!("{pkg_name}: Downloading and extracting files"));
|
||||
}
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
pb.finish_with_message(format!("{pkg_name} is ready to build"));
|
||||
|
||||
Ok(pkg_dir)
|
||||
}
|
||||
|
Loading…
Reference in New Issue