diff --git a/src/api/builder.rs b/src/api/builder.rs index d3a39d0..b7d3c4b 100644 --- a/src/api/builder.rs +++ b/src/api/builder.rs @@ -2,7 +2,7 @@ use std::{env, error::Error, path::PathBuf}; use home::home_dir; -use crate::{cache::ProtofetchGitCache, Protofetch}; +use crate::{git::cache::ProtofetchGitCache, Protofetch}; #[derive(Default)] pub struct ProtofetchBuilder { diff --git a/src/api/mod.rs b/src/api/mod.rs index dbf755f..1fbd4f4 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -4,8 +4,8 @@ use std::{ }; use crate::{ - cache::ProtofetchGitCache, - cli::command_handlers::{do_clean, do_clear_cache, do_fetch, do_init, do_lock, do_migrate}, + cli::command_handlers::{do_clean, do_fetch, do_init, do_lock, do_migrate}, + git::cache::ProtofetchGitCache, }; mod builder; @@ -89,6 +89,7 @@ impl Protofetch { } pub fn clear_cache(&self) -> Result<(), Box> { - do_clear_cache(&self.cache) + self.cache.clear()?; + Ok(()) } } diff --git a/src/cache/git.rs b/src/cache/git.rs new file mode 100644 index 0000000..8954121 --- /dev/null +++ b/src/cache/git.rs @@ -0,0 +1,33 @@ +use std::path::PathBuf; + +use crate::{ + git::cache::ProtofetchGitCache, + model::protofetch::{Coordinate, DependencyName, RevisionSpecification}, +}; + +use super::RepositoryCache; + +impl RepositoryCache for ProtofetchGitCache { + fn fetch( + &self, + coordinate: &Coordinate, + specification: &RevisionSpecification, + commit_hash: &str, + ) -> anyhow::Result<()> { + let repository = self.repository(coordinate)?; + repository.fetch_commit(specification, commit_hash)?; + Ok(()) + } + + fn create_worktree( + &self, + coordinate: &Coordinate, + commit_hash: &str, + name: &DependencyName, + ) -> anyhow::Result { + let path = self + .repository(coordinate)? + .create_worktree(name, commit_hash)?; + Ok(path) + } +} diff --git a/src/cache/mod.rs b/src/cache/mod.rs new file mode 100644 index 0000000..9a613ca --- /dev/null +++ b/src/cache/mod.rs @@ -0,0 +1,21 @@ +mod git; + +use std::path::PathBuf; + +use crate::model::protofetch::{Coordinate, DependencyName, RevisionSpecification}; + +pub trait RepositoryCache { + fn fetch( + &self, + coordinate: &Coordinate, + specification: &RevisionSpecification, + commit_hash: &str, + ) -> anyhow::Result<()>; + + fn create_worktree( + &self, + coordinate: &Coordinate, + commit_hash: &str, + name: &DependencyName, + ) -> anyhow::Result; +} diff --git a/src/cli/command_handlers.rs b/src/cli/command_handlers.rs index 7701ad0..4ccb6fc 100644 --- a/src/cli/command_handlers.rs +++ b/src/cli/command_handlers.rs @@ -2,8 +2,8 @@ use log::{debug, info}; use crate::{ api::LockMode, - cache::ProtofetchGitCache, fetch, + git::cache::ProtofetchGitCache, model::{ protodep::ProtodepDescriptor, protofetch::{lock::LockFile, Descriptor}, @@ -17,7 +17,6 @@ use std::{ }; const DEFAULT_OUTPUT_DIRECTORY_NAME: &str = "proto_src"; -const CACHE_WORKSPACES_DIRECTORY_NAME: &str = "dependencies"; /// Handler to fetch command pub fn do_fetch( @@ -32,18 +31,13 @@ pub fn do_fetch( let lockfile = do_lock(lock_mode, cache, root, module_file_name, lock_file_name)?; - let cache_dependencies_directory_path = cache.location.join(CACHE_WORKSPACES_DIRECTORY_NAME); let output_directory_name = output_directory_name .or_else(|| module_descriptor.proto_out_dir.as_ref().map(Path::new)) .unwrap_or(Path::new(DEFAULT_OUTPUT_DIRECTORY_NAME)); - fetch::fetch_sources(cache, &lockfile, &cache_dependencies_directory_path)?; + fetch::fetch_sources(cache, &lockfile)?; //Copy proto_out files to actual target - proto::copy_proto_files( - &root.join(output_directory_name), - &cache_dependencies_directory_path, - &lockfile, - )?; + proto::copy_proto_files(cache, &lockfile, &root.join(output_directory_name))?; Ok(()) } @@ -177,19 +171,6 @@ pub fn do_clean( Ok(()) } -pub fn do_clear_cache(cache: &ProtofetchGitCache) -> Result<(), Box> { - if cache.location.exists() { - info!( - "Clearing protofetch repository cache {}.", - &cache.location.display() - ); - std::fs::remove_dir_all(&cache.location)?; - Ok(()) - } else { - Ok(()) - } -} - fn load_module_descriptor( root: &Path, module_file_name: &Path, diff --git a/src/fetch.rs b/src/fetch.rs index a262508..d11facc 100644 --- a/src/fetch.rs +++ b/src/fetch.rs @@ -1,35 +1,28 @@ -use std::{ - collections::BTreeMap, - path::{Path, PathBuf}, - str::Utf8Error, -}; +use std::{collections::BTreeMap, str::Utf8Error}; use crate::{ - cache::{CacheError, RepositoryCache}, + cache::RepositoryCache, model::protofetch::{ lock::{LockFile, LockedDependency}, Dependency, DependencyName, Descriptor, }, - proto_repository::ProtoRepository, resolver::ModuleResolver, }; -use log::{debug, error, info}; +use log::{error, info}; use thiserror::Error; #[derive(Error, Debug)] pub enum FetchError { #[error("Error while fetching repo from cache: {0}")] - Cache(#[from] CacheError), + Cache(anyhow::Error), #[error("Git error: {0}")] GitError(#[from] git2::Error), #[error("Error while decoding utf8 bytes from blob: {0}")] BlobRead(#[from] Utf8Error), #[error("Error while parsing descriptor")] Parsing(#[from] crate::model::ParseError), - #[error("Bad output dir {0}")] - BadOutputDir(String), #[error("Error while processing protobuf repository: {0}")] - ProtoRepoError(#[from] crate::proto_repository::ProtoRepoError), + ProtoRepoError(#[from] crate::git::repository::ProtoRepoError), #[error("IO error: {0}")] IO(#[from] std::io::Error), #[error(transparent)] @@ -114,47 +107,15 @@ pub fn lock( }) } -pub fn fetch_sources( - cache: &Cache, - lockfile: &LockFile, - cache_src_dir: &Path, -) -> Result<(), FetchError> { +pub fn fetch_sources(cache: &impl RepositoryCache, lockfile: &LockFile) -> Result<(), FetchError> { info!("Fetching dependencies source files..."); - - if !cache_src_dir.exists() { - std::fs::create_dir_all(cache_src_dir)?; + for dep in &lockfile.dependencies { + cache + .fetch(&dep.coordinate, &dep.specification, &dep.commit_hash) + .map_err(FetchError::Cache)?; } - if cache_src_dir.is_dir() { - for dep in &lockfile.dependencies { - //If the dependency is already in the cache, we don't need to fetch it again - if cache_src_dir - .join(&dep.name.value) - .join(PathBuf::from(&dep.commit_hash)) - .exists() - { - debug!("Skipping fetching {:?}. Already in cache", dep.name); - continue; - } - let repo = cache.clone_or_update(&dep.coordinate)?; - let work_tree_res = repo.create_worktrees( - &lockfile.module_name, - &dep.name, - &dep.commit_hash, - cache_src_dir, - ); - if let Err(err) = work_tree_res { - error!("Error while trying to create worktrees {err}. \ - Most likely the worktree sources have been deleted but the worktree metadata has not. \ - Please delete the cache and run protofetch fetch again.") - } - } - Ok(()) - } else { - Err(FetchError::BadOutputDir( - cache_src_dir.to_str().unwrap_or("").to_string(), - )) - } + Ok(()) } #[cfg(test)] diff --git a/src/cache.rs b/src/git/cache.rs similarity index 58% rename from src/cache.rs rename to src/git/cache.rs index 7699c7c..2b02bfa 100644 --- a/src/cache.rs +++ b/src/git/cache.rs @@ -1,22 +1,18 @@ use std::path::{Path, PathBuf}; -use git2::Config; -use git2::{build::RepoBuilder, Cred, CredentialType, FetchOptions, RemoteCallbacks, Repository}; -use log::trace; +use git2::{ + build::RepoBuilder, Config, Cred, CredentialType, FetchOptions, RemoteCallbacks, Repository, +}; +use log::{info, trace}; use thiserror::Error; -use crate::{model::protofetch::Coordinate, proto_repository::ProtoGitRepository}; +use crate::{git::repository::ProtoGitRepository, model::protofetch::Coordinate}; -use crate::proto_repository::ProtoRepository; - -pub trait RepositoryCache { - type Repository: ProtoRepository; - - fn clone_or_update(&self, entry: &Coordinate) -> Result; -} +const WORKTREES_DIR: &str = "dependencies"; pub struct ProtofetchGitCache { - pub location: PathBuf, + location: PathBuf, + worktrees: PathBuf, git_config: Config, } @@ -30,43 +26,48 @@ pub enum CacheError { IO(#[from] std::io::Error), } -impl RepositoryCache for ProtofetchGitCache { - type Repository = ProtoGitRepository; +impl ProtofetchGitCache { + pub fn new(location: PathBuf, git_config: Config) -> Result { + if location.exists() { + if !location.is_dir() { + return Err(CacheError::BadLocation { + location: location.to_str().unwrap_or("").to_string(), + }); + } + } else { + std::fs::create_dir_all(&location)?; + } - fn clone_or_update(&self, entry: &Coordinate) -> Result { - let repo = match self.get_entry(entry) { - None => self.clone_repo(entry)?, - Some(path) => { - let mut repo = self.open_entry(&path)?; + let worktrees = location.join(WORKTREES_DIR); + Ok(ProtofetchGitCache { + location, + worktrees, + git_config, + }) + } - self.fetch(&mut repo)?; + pub fn clear(&self) -> anyhow::Result<()> { + if self.location.exists() { + info!( + "Clearing protofetch repository cache {}.", + &self.location.display() + ); + std::fs::remove_dir_all(&self.location)?; + } + Ok(()) + } - repo - } + pub fn repository(&self, entry: &Coordinate) -> Result { + let repo = match self.get_entry(entry) { + None => self.clone_repo(entry)?, + Some(path) => self.open_entry(&path)?, }; - Ok(ProtoGitRepository::new(repo)) + Ok(ProtoGitRepository::new(self, repo)) } -} -impl ProtofetchGitCache { - pub fn new(location: PathBuf, git_config: Config) -> Result { - if location.exists() && location.is_dir() { - Ok(ProtofetchGitCache { - location, - git_config, - }) - } else if !location.exists() { - std::fs::create_dir_all(&location)?; - Ok(ProtofetchGitCache { - location, - git_config, - }) - } else { - Err(CacheError::BadLocation { - location: location.to_str().unwrap_or("").to_string(), - }) - } + pub fn worktrees_path(&self) -> &Path { + &self.worktrees } fn get_entry(&self, entry: &Coordinate) -> Option { @@ -86,7 +87,7 @@ impl ProtofetchGitCache { fn clone_repo(&self, entry: &Coordinate) -> Result { let mut repo_builder = RepoBuilder::new(); - let options = ProtofetchGitCache::fetch_options(&self.git_config)?; + let options = self.fetch_options()?; repo_builder.bare(true).fetch_options(options); let url = entry.url(); @@ -96,19 +97,7 @@ impl ProtofetchGitCache { .map_err(|e| e.into()) } - fn fetch(&self, repo: &mut Repository) -> Result<(), CacheError> { - let mut remote = repo.find_remote("origin")?; - let refspecs: Vec = remote - .refspecs() - .filter_map(|refspec| refspec.str().map(|s| s.to_string())) - .collect(); - let options = &mut ProtofetchGitCache::fetch_options(&self.git_config)?; - remote.fetch(&refspecs, Some(options), None)?; - - Ok(()) - } - - fn fetch_options(config: &Config) -> Result, CacheError> { + pub(super) fn fetch_options(&self) -> Result, CacheError> { let mut callbacks = RemoteCallbacks::new(); // Consider using https://crates.io/crates/git2_credentials that supports // more authentication options @@ -129,7 +118,7 @@ impl ProtofetchGitCache { } // HTTP auth if allowed_types.contains(CredentialType::USER_PASS_PLAINTEXT) { - return Cred::credential_helper(config, url, username); + return Cred::credential_helper(&self.git_config, url, username); } Err(git2::Error::from_str("no valid authentication available")) }); diff --git a/src/git/mod.rs b/src/git/mod.rs new file mode 100644 index 0000000..06f0bc5 --- /dev/null +++ b/src/git/mod.rs @@ -0,0 +1,2 @@ +pub mod cache; +pub mod repository; diff --git a/src/proto_repository.rs b/src/git/repository.rs similarity index 76% rename from src/proto_repository.rs rename to src/git/repository.rs index 47e0029..617c36d 100644 --- a/src/proto_repository.rs +++ b/src/git/repository.rs @@ -1,13 +1,12 @@ -use std::{ - path::{Path, PathBuf}, - str::Utf8Error, -}; +use std::{path::PathBuf, str::Utf8Error}; use crate::model::protofetch::{DependencyName, Descriptor, Revision, RevisionSpecification}; use git2::{Oid, Repository, ResetType}; -use log::debug; +use log::{debug, warn}; use thiserror::Error; +use super::cache::ProtofetchGitCache; + #[derive(Error, Debug)] pub enum ProtoRepoError { #[error("Error while performing revparse in dep {0} for commit {1}: {2}")] @@ -38,23 +37,49 @@ pub enum ProtoRepoError { IO(#[from] std::io::Error), } -pub struct ProtoGitRepository { +pub struct ProtoGitRepository<'a> { + cache: &'a ProtofetchGitCache, git_repo: Repository, } -pub trait ProtoRepository { - fn create_worktrees( +impl<'a> ProtoGitRepository<'a> { + pub fn new(cache: &'a ProtofetchGitCache, git_repo: Repository) -> ProtoGitRepository { + ProtoGitRepository { cache, git_repo } + } + + pub fn fetch(&self, _specification: &RevisionSpecification) -> anyhow::Result<()> { + let mut remote = self.git_repo.find_remote("origin")?; + // TODO: we only need to fetch refspecs from RevisionSpecification + let refspecs: Vec = remote + .refspecs() + .filter_map(|refspec| refspec.str().map(|s| s.to_string())) + .collect(); + remote.fetch(&refspecs, Some(&mut self.cache.fetch_options()?), None)?; + Ok(()) + } + + pub fn fetch_commit( &self, - module_name: &str, - dep_name: &DependencyName, + specification: &RevisionSpecification, commit_hash: &str, - out_dir: &Path, - ) -> Result<(), ProtoRepoError>; -} + ) -> anyhow::Result<()> { + let oid = Oid::from_str(commit_hash)?; + if self.git_repo.find_commit(oid).is_ok() { + return Ok(()); + } + let mut remote = self.git_repo.find_remote("origin")?; + + if let Err(error) = + remote.fetch(&[commit_hash], Some(&mut self.cache.fetch_options()?), None) + { + warn!( + "Failed to fetch a single commit {}, falling back to a full fetch: {}", + commit_hash, error + ); + self.fetch(specification)?; + } -impl ProtoGitRepository { - pub fn new(git_repo: Repository) -> ProtoGitRepository { - ProtoGitRepository { git_repo } + Ok(()) } pub fn extract_descriptor( @@ -133,37 +158,21 @@ impl ProtoGitRepository { Ok(oid.to_string()) } - fn commit_hash_for_obj_str(&self, str: &str) -> Result { - Ok(self.git_repo.revparse_single(str)?.peel_to_commit()?.id()) - } - - // Check if `a` is an ancestor of `b` - fn is_ancestor(&self, a: Oid, b: Oid) -> Result { - Ok(self.git_repo.merge_base(a, b)? == a) - } -} - -impl ProtoRepository for ProtoGitRepository { - fn create_worktrees( + pub fn create_worktree( &self, - module_name: &str, - dep_name: &DependencyName, + name: &DependencyName, commit_hash: &str, - out_dir: &Path, - ) -> Result<(), ProtoRepoError> { - let base_path = out_dir.join(PathBuf::from(dep_name.value.as_str())); + ) -> Result { + let base_path = self.cache.worktrees_path().join(&name.value); if !base_path.exists() { - std::fs::create_dir(&base_path)?; + std::fs::create_dir_all(&base_path)?; } let worktree_path = base_path.join(PathBuf::from(commit_hash)); let worktree_name = commit_hash; - debug!( - "Module[{}] Finding worktree {} for dep {:?}.", - module_name, worktree_name, dep_name - ); + debug!("Finding worktree {} for {}.", worktree_name, name.value); match self.git_repo.find_worktree(worktree_name) { Ok(worktree) => { @@ -190,18 +199,16 @@ impl ProtoRepository for ProtoGitRepository { }); } else { log::info!( - "Module[{}] Found existing worktree for dep {:?} at {}.", - module_name, - dep_name, + "Found existing worktree for {} at {}.", + name.value, canonical_wanted_path.to_string_lossy() ); } } Err(_) => { log::info!( - "Module[{}] Creating new worktree for dep {:?} at {}.", - module_name, - dep_name, + "Creating new worktree for {} at {}.", + name.value, worktree_path.to_string_lossy() ); @@ -210,11 +217,20 @@ impl ProtoRepository for ProtoGitRepository { } }; - let worktree_repo = Repository::open(worktree_path)?; + let worktree_repo = Repository::open(&worktree_path)?; let worktree_head_object = worktree_repo.revparse_single(commit_hash)?; worktree_repo.reset(&worktree_head_object, ResetType::Hard, None)?; - Ok(()) + Ok(worktree_path) + } + + fn commit_hash_for_obj_str(&self, str: &str) -> Result { + Ok(self.git_repo.revparse_single(str)?.peel_to_commit()?.id()) + } + + // Check if `a` is an ancestor of `b` + fn is_ancestor(&self, a: Oid, b: Oid) -> Result { + Ok(self.git_repo.merge_base(a, b)? == a) } } diff --git a/src/lib.rs b/src/lib.rs index 49e2cc9..1c23358 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -2,9 +2,9 @@ mod api; mod cache; mod cli; mod fetch; +mod git; mod model; mod proto; -mod proto_repository; mod resolver; pub use api::{LockMode, Protofetch, ProtofetchBuilder}; diff --git a/src/proto.rs b/src/proto.rs index 48555f3..84cdf48 100644 --- a/src/proto.rs +++ b/src/proto.rs @@ -1,6 +1,9 @@ -use crate::model::protofetch::{ - lock::{LockFile, LockedDependency}, - AllowPolicies, DenyPolicies, DependencyName, +use crate::{ + cache::RepositoryCache, + model::protofetch::{ + lock::{LockFile, LockedDependency}, + AllowPolicies, DenyPolicies, DependencyName, + }, }; use derive_new::new; use log::{debug, info, trace}; @@ -19,6 +22,8 @@ pub enum ProtoError { BadPath(String), #[error("IO error: {0}")] IO(#[from] std::io::Error), + #[error(transparent)] + Cache(anyhow::Error), } /// Represents a mapping for a proto file between the source repo directory and the desired target. @@ -41,9 +46,9 @@ struct ProtoFileCanonicalMapping { /// cache_src_dir: Base path to the directory where the dependencies sources are cached /// lockfile: The lockfile that contains the dependencies to be copied pub fn copy_proto_files( - proto_dir: &Path, - cache_src_dir: &Path, + cache: &impl RepositoryCache, lockfile: &LockFile, + proto_dir: &Path, ) -> Result<(), ProtoError> { info!( "Copying proto files from {} descriptor...", @@ -56,11 +61,13 @@ pub fn copy_proto_files( let deps = collect_all_root_dependencies(lockfile); for dep in &deps { - let dep_cache_dir = cache_src_dir.join(&dep.name.value).join(&dep.commit_hash); + let dep_cache_dir = cache + .create_worktree(&dep.coordinate, &dep.commit_hash, &dep.name) + .map_err(ProtoError::Cache)?; let sources_to_copy: HashSet = if !dep.rules.prune { copy_all_proto_files_for_dep(&dep_cache_dir, dep)? } else { - pruned_transitive_dependencies(cache_src_dir, dep, lockfile)? + pruned_transitive_dependencies(cache, dep, lockfile)? }; let without_denied_files = sources_to_copy .into_iter() @@ -101,13 +108,13 @@ fn copy_all_proto_files_for_dep( /// iterates all the dependencies of `dep` and its transitive dependencies based on imports /// until no new dependencies are found. fn pruned_transitive_dependencies( - cache_src_dir: &Path, + cache: &impl RepositoryCache, dep: &LockedDependency, lockfile: &LockFile, ) -> Result, ProtoError> { fn process_mapping_file( + cache: &impl RepositoryCache, mapping: ProtoFileCanonicalMapping, - cache_src_dir: &Path, dep: &LockedDependency, lockfile: &LockFile, visited: &mut HashSet, @@ -117,8 +124,7 @@ fn pruned_transitive_dependencies( let file_deps = extract_proto_dependencies_from_file(mapping.full_path.as_path())?; let mut dependencies = collect_transitive_dependencies(dep, lockfile); dependencies.push(dep.clone()); - let mut new_mappings = - canonical_mapping_for_proto_files(&file_deps, cache_src_dir, &dependencies)?; + let mut new_mappings = canonical_mapping_for_proto_files(cache, &file_deps, &dependencies)?; trace!("Adding {:?}.", &new_mappings); new_mappings.push(mapping); deps.extend(new_mappings.clone()); @@ -128,13 +134,15 @@ fn pruned_transitive_dependencies( /// Recursively loop through all the file dependencies based on imports /// Looks in own repository and in transitive dependencies. fn inner_loop( - cache_src_dir: &Path, + cache: &impl RepositoryCache, dep: &LockedDependency, lockfile: &LockFile, visited: &mut HashSet, found_proto_deps: &mut HashSet, ) -> Result<(), ProtoError> { - let dep_dir = cache_src_dir.join(&dep.name.value).join(&dep.commit_hash); + let dep_dir = cache + .create_worktree(&dep.coordinate, &dep.commit_hash, &dep.name) + .map_err(ProtoError::Cache)?; for dir in dep_dir.read_dir()? { let proto_files = find_proto_files(&dir?.path())?; let filtered_mapping = filtered_proto_files(proto_files, &dep_dir, dep, false) @@ -150,15 +158,8 @@ fn pruned_transitive_dependencies( .filter(|p| !visited.contains(&p.package_path)) .collect(); for mapping in file_dependencies_not_visited { - process_mapping_file( - mapping, - cache_src_dir, - dep, - lockfile, - visited, - found_proto_deps, - )?; - inner_loop(cache_src_dir, dep, lockfile, visited, found_proto_deps)?; + process_mapping_file(cache, mapping, dep, lockfile, visited, found_proto_deps)?; + inner_loop(cache, dep, lockfile, visited, found_proto_deps)?; } } Ok(()) @@ -169,27 +170,23 @@ fn pruned_transitive_dependencies( let mut visited_dep: HashSet = HashSet::new(); debug!("Extracting proto files for {}", &dep.name.value); - let dep_dir = cache_src_dir.join(&dep.name.value).join(&dep.commit_hash); + let dep_dir = cache + .create_worktree(&dep.coordinate, &dep.commit_hash, &dep.name) + .map_err(ProtoError::Cache)?; for dir in dep_dir.read_dir()? { let proto_files = find_proto_files(&dir?.path())?; let filtered_mapping = filtered_proto_files(proto_files, &dep_dir, dep, true); trace!("Filtered size {:?}.", &filtered_mapping.len()); for mapping in filtered_mapping { process_mapping_file( + cache, mapping, - cache_src_dir, - dep, - lockfile, - &mut visited, - &mut found_proto_deps, - )?; - inner_loop( - cache_src_dir, dep, lockfile, &mut visited, &mut found_proto_deps, )?; + inner_loop(cache, dep, lockfile, &mut visited, &mut found_proto_deps)?; } } @@ -202,13 +199,7 @@ fn pruned_transitive_dependencies( &dep.name.value ); visited_dep.insert(t_dep.name.clone()); - inner_loop( - cache_src_dir, - &t_dep, - lockfile, - &mut visited, - &mut found_proto_deps, - )?; + inner_loop(cache, &t_dep, lockfile, &mut visited, &mut found_proto_deps)?; } debug!( "Found {:?} proto files for dependency {}", @@ -352,14 +343,14 @@ fn filtered_proto_files( /// and builds the full proto file paths from the package path returning a ProtoFileCanonicalMapping. /// This is used to be able to later on copy the files from the source directory to the user defined output directory. fn canonical_mapping_for_proto_files( + cache: &impl RepositoryCache, proto_files: &[PathBuf], - cache_src_dir: &Path, deps: &[LockedDependency], ) -> Result, ProtoError> { let r: Result, ProtoError> = proto_files .iter() .map(|p| { - let zoom_out = zoom_out_content_root(cache_src_dir, deps, p)?; + let zoom_out = zoom_out_content_root(cache, deps, p)?; Ok(ProtoFileCanonicalMapping::new(zoom_out, p.to_path_buf())) }) .collect::, _>>(); @@ -391,13 +382,15 @@ fn zoom_in_content_root( } fn zoom_out_content_root( - cache_src_dir: &Path, + cache: &impl RepositoryCache, deps: &[LockedDependency], proto_file_source: &Path, ) -> Result { let mut proto_src = proto_file_source.to_path_buf(); for dep in deps { - let dep_dir = cache_src_dir.join(&dep.name.value).join(&dep.commit_hash); + let dep_dir = cache + .create_worktree(&dep.coordinate, &dep.commit_hash, &dep.name) + .map_err(ProtoError::Cache)?; for dir in dep_dir.read_dir()? { let proto_files = find_proto_files(&dir?.path())?; if let Some(path) = proto_files @@ -440,6 +433,25 @@ mod tests { use pretty_assertions::assert_eq; + struct FakeCache { + root: PathBuf, + } + + impl RepositoryCache for FakeCache { + fn fetch(&self, _: &Coordinate, _: &RevisionSpecification, _: &str) -> anyhow::Result<()> { + Ok(()) + } + + fn create_worktree( + &self, + _: &Coordinate, + commit_hash: &str, + name: &DependencyName, + ) -> anyhow::Result { + Ok(self.root.join(&name.value).join(commit_hash)) + } + } + #[test] fn content_root_dependencies() { let cache_dir = project_root::get_project_root() @@ -479,7 +491,7 @@ mod tests { fn pruned_dependencies() { let cache_dir = project_root::get_project_root() .unwrap() - .join(Path::new("resources/cache")); + .join("resources/cache"); let lock_file = LockFile { module_name: "test".to_string(), dependencies: vec![ @@ -525,7 +537,7 @@ mod tests { .collect(); let pruned1: HashSet = pruned_transitive_dependencies( - &cache_dir, + &FakeCache { root: cache_dir }, lock_file.dependencies.first().unwrap(), &lock_file, ) diff --git a/src/resolver/git.rs b/src/resolver/git.rs index f51a7d7..edcf511 100644 --- a/src/resolver/git.rs +++ b/src/resolver/git.rs @@ -1,6 +1,6 @@ use crate::{ - cache::{ProtofetchGitCache, RepositoryCache}, - model::protofetch::{Coordinate, DependencyName, Revision, RevisionSpecification}, + git::cache::ProtofetchGitCache, + model::protofetch::{Coordinate, DependencyName, RevisionSpecification}, }; use super::{ModuleResolver, ResolvedModule}; @@ -13,14 +13,12 @@ impl ModuleResolver for ProtofetchGitCache { commit_hash: Option<&str>, name: &DependencyName, ) -> anyhow::Result { - let repository = self.clone_or_update(coordinate)?; - let commit_hash = if specification.revision == Revision::Arbitrary { - if let Some(commit_hash) = commit_hash { - commit_hash.to_owned() - } else { - repository.resolve_commit_hash(specification)? - } + let repository = self.repository(coordinate)?; + let commit_hash = if let Some(commit_hash) = commit_hash { + repository.fetch_commit(specification, commit_hash)?; + commit_hash.to_owned() } else { + repository.fetch(specification)?; repository.resolve_commit_hash(specification)? }; let descriptor = repository.extract_descriptor(name, &commit_hash)?;