Add archive extracting after download

feature/lookup-installed
trivernis 2 years ago
parent 5238fb57e0
commit a39df138c9
Signed by: Trivernis
GPG Key ID: DFFFCC2C7A02DB45

@ -19,13 +19,16 @@ dirs = "4.0.0"
futures-util = "0.3.25"
indicatif = "0.17.3"
lazy_static = "1.4.0"
libflate = "1.2.0"
miette = "5.5.0"
reqwest = { version = "0.11.14", features = ["json", "stream"] }
semver = { version = "1.0.16", features = ["std", "serde"] }
serde = { version = "1.0.152", features = ["derive"] }
serde_json = "1.0.91"
tar = "0.4.38"
thiserror = "1.0.38"
tokio = { version = "1.24.2", features = ["rt", "macros", "tracing", "net", "fs", "time"] }
toml = "0.5.11"
tracing = "0.1.37"
tracing-subscriber = "0.3.16"
zip = "0.6.3"

@ -13,8 +13,9 @@ lazy_static! {
pub static ref CACHE_DIR: PathBuf = dirs::cache_dir()
.unwrap_or_else(|| PathBuf::from(".cache"))
.join(PathBuf::from("nenv"));
pub static ref BIN_DIR: PathBuf = DATA_DIR.join(PathBuf::from("bin"));
pub static ref NODE_VERSIONS_DIR: PathBuf = DATA_DIR.join(PathBuf::from("versions"));
pub static ref CFG_FILE_PATH: PathBuf = CFG_DIR.join("config.toml");
pub static ref BIN_DIR: PathBuf = DATA_DIR.join("bin");
pub static ref NODE_VERSIONS_DIR: PathBuf = DATA_DIR.join("versions");
pub static ref NODE_ARCHIVE_SUFFIX: String = format!("-{OS}-{ARCH}.{ARCHIVE_TYPE}");
}

@ -3,7 +3,10 @@ use std::io;
use miette::Diagnostic;
use thiserror::Error;
use crate::web_api::error::ApiError;
use crate::{
repository::{config::ConfigError, extract::ExtractError},
web_api::error::ApiError,
};
pub(crate) type LibResult<T> = Result<T>;
pub(crate) type LibError = Error;
@ -19,6 +22,21 @@ pub enum Error {
#[diagnostic_source]
ApiError,
),
#[error("Failed to extract archive: {0}")]
Extract(
#[from]
#[source]
#[diagnostic_source]
ExtractError,
),
#[error("Failed to load config file: {0}")]
Config(
#[from]
#[source]
#[diagnostic_source]
ConfigError,
),
#[error("IO Error: {0}")]
Io(#[from] io::Error),

@ -3,6 +3,7 @@ use repository::{config::Config, NodeVersion, Repository};
mod consts;
pub mod error;
pub mod repository;
mod utils;
mod web_api;
use error::Result;
@ -11,5 +12,5 @@ pub async fn install_version(version: NodeVersion) -> Result<()> {
}
async fn get_repository() -> Result<Repository> {
Repository::init(Config::default()).await
Repository::init(Config::load().await?).await
}

@ -1,13 +1,64 @@
use std::io;
use miette::Diagnostic;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use tokio::fs;
use crate::consts::{CFG_FILE_PATH, NODE_DIST_URL};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Config {
pub dist_base_url: String,
pub default_version: String,
}
pub type ConfigResult<T> = Result<T, ConfigError>;
#[derive(Error, Diagnostic, Debug)]
pub enum ConfigError {
#[error("IO Error: {0}")]
Io(
#[from]
#[source]
io::Error,
),
#[error("Failed to parse config file: {0}")]
Parse(
#[from]
#[source]
toml::de::Error,
),
#[error("Failed to serialize config file: {0}")]
Serialize(
#[from]
#[source]
toml::ser::Error,
),
}
impl Default for Config {
fn default() -> Self {
Self {
dist_base_url: String::from("https://nodejs.org/dist"),
dist_base_url: String::from(NODE_DIST_URL),
default_version: String::from("latest"),
}
}
}
impl Config {
/// Loads the config file from the default config path
pub async fn load() -> ConfigResult<Self> {
if !CFG_FILE_PATH.exists() {
let cfg = Config::default();
fs::write(&*CFG_FILE_PATH, toml::to_string_pretty(&cfg)?).await?;
Ok(cfg)
} else {
let cfg_string = fs::read_to_string(&*CFG_FILE_PATH).await?;
let cfg = toml::from_str(&cfg_string)?;
Ok(cfg)
}
}
}

@ -0,0 +1,89 @@
use std::{
fs::{self, File},
io::{self, BufReader},
path::Path,
};
use libflate::gzip::Decoder;
use miette::Diagnostic;
use tar::Archive;
use thiserror::Error;
use zip::ZipArchive;
use crate::utils::{progress_bar, progress_spinner};
type ExtractResult<T> = Result<T, ExtractError>;
#[derive(Error, Debug, Diagnostic)]
pub enum ExtractError {
#[error("IO error when extracting: {0}")]
Io(
#[from]
#[source]
io::Error,
),
#[error("Failed to extract zip: {0}")]
Zip(
#[from]
#[source]
zip::result::ZipError,
),
}
pub fn extract_file(src: &Path, dst: &Path) -> ExtractResult<()> {
#[cfg(target_os = "windows")]
extract_zip(src, dst)?;
#[cfg(not(target_os = "windows"))]
extract_tar_gz(src, dst)?;
Ok(())
}
fn extract_tar_gz(src: &Path, dst: &Path) -> ExtractResult<()> {
let mut reader = BufReader::new(File::open(src)?);
let mut decoder = Decoder::new(reader)?;
let mut archive = Archive::new(decoder);
let pb = progress_spinner();
pb.set_message("Extracting tar.gz archive");
archive.unpack(dst)?;
pb.finish_with_message("Archive extracted.");
Ok(())
}
fn extract_zip(src: &Path, dst: &Path) -> ExtractResult<()> {
let mut archive = ZipArchive::new(File::open(src)?)?;
let pb = progress_bar(archive.len() as u64);
pb.set_message("Extracting zip archive");
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let Some(path) = file.enclosed_name() else {
tracing::error!(
"Cannot extract {:?} because it has an invalid name",
file.name()
);
continue;
};
let output_path = dst.join(path);
if (*file.name()).ends_with('/') {
tracing::debug!("Creating directory {output_path:?}");
fs::create_dir_all(output_path)?;
} else {
if let Some(parent) = output_path.parent() {
if !parent.exists() {
tracing::debug!("Creating parent directory {parent:?}");
fs::create_dir_all(parent)?;
}
}
let mut file_output = File::create(&output_path)?;
tracing::debug!("Extracting to {output_path:?}");
io::copy(&mut file, &mut file_output)?;
}
pb.tick()
}
pb.finish_with_message("Archive extracted.");
Ok(())
}

@ -1,4 +1,4 @@
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use semver::{Version, VersionReq};
use tokio::{
@ -15,6 +15,7 @@ use crate::{
use self::{config::Config, versions::Versions};
pub mod config;
pub(crate) mod extract;
pub mod versions;
pub enum NodeVersion {
@ -61,15 +62,21 @@ impl Repository {
Ok(())
}
/// Installs a specified node version
pub async fn install_version(&self, version_req: NodeVersion) -> LibResult<()> {
let info = self.parse_req(version_req);
let archive_path = self.download_version(&info.version).await?;
self.extract_archive(info, &archive_path)?;
todo!()
Ok(())
}
async fn download_version(&self, version: &Version) -> LibResult<PathBuf> {
let download_path = CACHE_DIR.join(format!("node-v{}{}", version, *NODE_ARCHIVE_SUFFIX));
if download_path.exists() {
return Ok(download_path);
}
let mut download_writer = BufWriter::new(File::create(&download_path).await?);
self.web_api
.download_version(version.to_string(), &mut download_writer)
@ -78,6 +85,13 @@ impl Repository {
Ok(download_path)
}
fn extract_archive(&self, info: &VersionInfo, archive_path: &Path) -> LibResult<()> {
let dst_path = NODE_VERSIONS_DIR.join(info.version.to_string());
extract::extract_file(archive_path, &dst_path)?;
Ok(())
}
fn parse_req(&self, version_req: NodeVersion) -> &VersionInfo {
match version_req {
NodeVersion::Latest => self.versions.latest(),

@ -0,0 +1,27 @@
use std::time::Duration;
use indicatif::{ProgressBar, ProgressStyle};
pub fn progress_bar(total: u64) -> ProgressBar {
let pb = ProgressBar::new(total);
pb.set_style(
ProgressStyle::default_bar()
.template(
"{msg} {spinner}\n[{wide_bar}] {bytes}/{total_bytes} ({bytes_per_sec}, {eta})",
)
.unwrap(),
);
pb.enable_steady_tick(Duration::from_millis(50));
pb
}
pub fn progress_spinner() -> ProgressBar {
let pb = ProgressBar::new_spinner();
pb.set_style(
ProgressStyle::default_bar()
.template("{msg} {spinner}")
.unwrap(),
);
pb.enable_steady_tick(Duration::from_millis(50));
pb
}

@ -4,7 +4,7 @@ use std::{
time::Duration,
};
use crate::consts::NODE_ARCHIVE_SUFFIX;
use crate::{consts::NODE_ARCHIVE_SUFFIX, utils::progress_bar};
use self::error::{ApiError, ApiResult};
use indicatif::{ProgressBar, ProgressStyle};
@ -73,16 +73,8 @@ impl WebApi {
let total_size = res
.content_length()
.ok_or_else(|| ApiError::other("Missing content length"))?;
let pb = ProgressBar::new(total_size);
let pb = progress_bar(total_size);
pb.set_message(format!("Downloading node v{version}"));
pb.set_style(
ProgressStyle::default_bar()
.template(
"{msg} {spinner}\n[{wide_bar}] {bytes}/{total_bytes} ({bytes_per_sec}, {eta})",
)
.unwrap(),
);
pb.enable_steady_tick(Duration::from_millis(50));
let mut stream = res.bytes_stream();
let mut total_downloaded = 0;

@ -1,17 +1,17 @@
use tokio::io::sink;
use super::NodejsAccess;
use super::WebApi;
#[tokio::test]
async fn it_fetches_all_versions() {
let versions = NodejsAccess::default().get_versions().await.unwrap();
let versions = WebApi::default().get_versions().await.unwrap();
assert!(!versions.is_empty());
}
#[tokio::test]
async fn it_downloads_a_specific_version() {
let mut writer = sink();
let bytes_written = NodejsAccess::default()
let bytes_written = WebApi::default()
.download_version("15.0.0", &mut writer)
.await
.unwrap();

Loading…
Cancel
Save