Skip to content

Commit

Permalink
feat: persistent cache expose error to compilation.diagnostic
Browse files Browse the repository at this point in the history
  • Loading branch information
jerrykingxyz committed Dec 18, 2024
1 parent 8aeb507 commit ba3a34e
Show file tree
Hide file tree
Showing 12 changed files with 167 additions and 132 deletions.
5 changes: 4 additions & 1 deletion crates/rspack_core/src/cache/disable.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use rspack_error::Result;

use super::Cache;
use crate::make::MakeArtifact;

Expand All @@ -9,7 +11,8 @@ pub struct DisableCache;

#[async_trait::async_trait]
impl Cache for DisableCache {
async fn before_make(&self, make_artifact: &mut MakeArtifact) {
async fn before_make(&self, make_artifact: &mut MakeArtifact) -> Result<()> {
*make_artifact = Default::default();
Ok(())
}
}
39 changes: 20 additions & 19 deletions crates/rspack_core/src/cache/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ pub mod persistent;

use std::{fmt::Debug, sync::Arc};

use rspack_error::Result;
use rspack_fs::{FileSystem, IntermediateFileSystem};

pub use self::{disable::DisableCache, memory::MemoryCache, persistent::PersistentCache};
Expand All @@ -23,11 +24,19 @@ use crate::{make::MakeArtifact, Compilation, CompilerOptions, ExperimentCacheOpt
/// We can consider change to Hook when we need to open the API to js side.
#[async_trait::async_trait]
pub trait Cache: Debug + Send + Sync {
async fn before_compile(&self, _compilation: &mut Compilation) {}
fn after_compile(&self, _compilation: &Compilation) {}
async fn before_compile(&self, _compilation: &mut Compilation) -> Result<()> {
Ok(())
}
async fn after_compile(&self, _compilation: &Compilation) -> Result<()> {
Ok(())
}

async fn before_make(&self, _make_artifact: &mut MakeArtifact) {}
fn after_make(&self, _make_artifact: &MakeArtifact) {}
async fn before_make(&self, _make_artifact: &mut MakeArtifact) -> Result<()> {
Ok(())
}
async fn after_make(&self, _make_artifact: &MakeArtifact) -> Result<()> {
Ok(())
}
}

pub fn new_cache(
Expand All @@ -39,20 +48,12 @@ pub fn new_cache(
match &compiler_option.experiments.cache {
ExperimentCacheOptions::Disabled => Arc::new(DisableCache),
ExperimentCacheOptions::Memory => Arc::new(MemoryCache),
ExperimentCacheOptions::Persistent(option) => {
match PersistentCache::new(
compiler_path,
option,
compiler_option.clone(),
input_filesystem,
intermediate_filesystem,
) {
Ok(cache) => Arc::new(cache),
Err(e) => {
tracing::warn!("create persistent cache failed {e:?}");
Arc::new(MemoryCache)
}
}
}
ExperimentCacheOptions::Persistent(option) => Arc::new(PersistentCache::new(
compiler_path,
option,
compiler_option.clone(),
input_filesystem,
intermediate_filesystem,
)),
}
}
36 changes: 19 additions & 17 deletions crates/rspack_core/src/cache/persistent/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ use std::{path::PathBuf, sync::Arc};

pub use cacheable_context::{CacheableContext, FromContext};
use occasion::MakeOccasion;
use rspack_fs::{FileSystem, IntermediateFileSystem, Result};
use rspack_error::Result;
use rspack_fs::{FileSystem, IntermediateFileSystem};
use rspack_macros::rspack_version;
use rspack_paths::ArcPath;
use rustc_hash::FxHashSet as HashSet;
Expand Down Expand Up @@ -42,39 +43,40 @@ impl PersistentCache {
compiler_options: Arc<CompilerOptions>,
input_filesystem: Arc<dyn FileSystem>,
intermediate_filesystem: Arc<dyn IntermediateFileSystem>,
) -> Result<Self> {
) -> Self {
let version = version::get_version(
input_filesystem.clone(),
&option.build_dependencies,
vec![compiler_path, &option.version, rspack_version!()],
)?;
);
let storage = create_storage(option.storage.clone(), version, intermediate_filesystem);
let context = Arc::new(CacheableContext {
options: compiler_options,
input_filesystem: input_filesystem.clone(),
});
let make_occasion = MakeOccasion::new(storage.clone(), context);
Ok(Self {
Self {
snapshot: Snapshot::new(option.snapshot.clone(), input_filesystem, storage.clone()),
storage,
make_occasion,
})
}
}
}

#[async_trait::async_trait]
impl Cache for PersistentCache {
async fn before_compile(&self, compilation: &mut Compilation) {
async fn before_compile(&self, compilation: &mut Compilation) -> Result<()> {
if compilation.modified_files.is_empty() && compilation.removed_files.is_empty() {
// inject modified_files and removed_files
let (modified_paths, removed_paths) = self.snapshot.calc_modified_paths().await;
let (modified_paths, removed_paths) = self.snapshot.calc_modified_paths().await?;
tracing::info!("cache::snapshot recovery {modified_paths:?} {removed_paths:?}",);
compilation.modified_files = modified_paths;
compilation.removed_files = removed_paths;
}
Ok(())
}

fn after_compile(&self, compilation: &Compilation) {
async fn after_compile(&self, compilation: &Compilation) -> Result<()> {
// TODO add a all_dependencies to collect dependencies
let (_, file_added, file_removed) = compilation.file_dependencies();
let (_, context_added, context_removed) = compilation.context_dependencies();
Expand Down Expand Up @@ -103,24 +105,24 @@ impl Cache for PersistentCache {
.remove(removed_paths.iter().map(|item| item.as_ref()));
self
.snapshot
.add(modified_paths.iter().map(|item| item.as_ref()));
.add(modified_paths.iter().map(|item| item.as_ref()))
.await;

// TODO listen for storage finish in build mode
let _ = self.storage.trigger_save();

Ok(())
}

async fn before_make(&self, make_artifact: &mut MakeArtifact) {
async fn before_make(&self, make_artifact: &mut MakeArtifact) -> Result<()> {
if !make_artifact.initialized {
match self.make_occasion.recovery().await {
Ok(artifact) => *make_artifact = artifact,
Err(err) => {
tracing::warn!("recovery error with {err:?}")
}
}
*make_artifact = self.make_occasion.recovery().await?;
}
Ok(())
}

fn after_make(&self, make_artifact: &MakeArtifact) {
async fn after_make(&self, make_artifact: &MakeArtifact) -> Result<()> {
self.make_occasion.save(make_artifact);
Ok(())
}
}
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use std::sync::{Arc, Mutex};

use rayon::prelude::*;
use rspack_cacheable::{
cacheable, from_bytes, to_bytes, with::Inline, DeserializeError, SerializeError,
};
use rspack_cacheable::{cacheable, from_bytes, to_bytes, with::Inline};
use rspack_error::Result;
use rspack_paths::ArcPath;
use rustc_hash::FxHashMap as HashMap;

Expand Down Expand Up @@ -45,7 +44,7 @@ pub fn save_dependencies_info(
missing_dependencies: &FileCounter,
build_dependencies: &FileCounter,
storage: &Arc<dyn Storage>,
) -> Result<(), SerializeError> {
) {
let f = file_dependencies
.updated_files_count_info()
.map(|(path, count)| {
Expand Down Expand Up @@ -98,37 +97,35 @@ pub fn save_dependencies_info(
.chain(m)
.chain(b)
.par_bridge()
.try_for_each(|(dep_ref, count)| {
let dep_ref = to_bytes(&dep_ref, &())?;
.for_each(|(dep_ref, count)| {
let dep_ref = to_bytes(&dep_ref, &()).expect("should to bytes success");
if count == 0 {
storage.remove(SCOPE, &dep_ref);
} else {
storage.set(SCOPE, dep_ref, count.to_ne_bytes().to_vec());
}
Ok(())
})
});
}

pub async fn recovery_dependencies_info(
storage: &Arc<dyn Storage>,
) -> Result<(FileCounter, FileCounter, FileCounter, FileCounter), DeserializeError> {
) -> Result<(FileCounter, FileCounter, FileCounter, FileCounter)> {
let file_dep = Mutex::new(HashMap::default());
let context_dep = Mutex::new(HashMap::default());
let missing_dep = Mutex::new(HashMap::default());
let build_dep = Mutex::new(HashMap::default());
storage
.load(SCOPE)
.await
.unwrap_or_default()
.await?
.into_par_iter()
.try_for_each(|(k, v)| {
.for_each(|(k, v)| {
let count = usize::from_ne_bytes(
v.as_ref()
.clone()
.try_into()
.map_err(|_| DeserializeError::MessageError("deserialize count failed"))?,
.expect("should parse count success"),
);
let Dependency { r#type, path } = from_bytes(&k, &())?;
let Dependency { r#type, path } = from_bytes(&k, &()).expect("should from bytes success");
match r#type {
DepType::File => file_dep
.lock()
Expand All @@ -147,8 +144,7 @@ pub async fn recovery_dependencies_info(
.expect("should get build dep")
.insert(path, count),
};
Ok(())
})?;
});

Ok((
FileCounter::new(file_dep.into_inner().expect("into_inner should be success")),
Expand Down
22 changes: 12 additions & 10 deletions crates/rspack_core/src/cache/persistent/occasion/make/meta.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use std::sync::{atomic::Ordering::Relaxed, Arc};

use rspack_cacheable::{
cacheable, from_bytes, to_bytes, with::Inline, DeserializeError, SerializeError,
};
use rspack_cacheable::{cacheable, from_bytes, to_bytes, with::Inline};
use rspack_collections::IdentifierSet;
use rspack_error::Result;
use rustc_hash::FxHashSet as HashSet;

use super::Storage;
Expand Down Expand Up @@ -34,23 +33,26 @@ pub fn save_meta(
make_failed_dependencies: &HashSet<BuildDependency>,
make_failed_module: &IdentifierSet,
storage: &Arc<dyn Storage>,
) -> Result<(), SerializeError> {
) {
let meta = MetaRef {
make_failed_dependencies,
make_failed_module,
next_dependencies_id: DEPENDENCY_ID.load(Relaxed),
};
storage.set(SCOPE, "default".as_bytes().to_vec(), to_bytes(&meta, &())?);
Ok(())
storage.set(
SCOPE,
"default".as_bytes().to_vec(),
to_bytes(&meta, &()).expect("should to bytes success"),
);
}

pub async fn recovery_meta(
storage: &Arc<dyn Storage>,
) -> Result<(HashSet<BuildDependency>, IdentifierSet), DeserializeError> {
let Some((_, value)) = storage.load(SCOPE).await.unwrap_or_default().pop() else {
return Err(DeserializeError::MessageError("can not get meta data"));
) -> Result<(HashSet<BuildDependency>, IdentifierSet)> {
let Some((_, value)) = storage.load(SCOPE).await?.pop() else {
return Ok(Default::default());
};
let meta: Meta = from_bytes(&value, &())?;
let meta: Meta = from_bytes(&value, &()).expect("should from bytes success");
// TODO make dependency id to string like module id
if DEPENDENCY_ID.load(Relaxed) < meta.next_dependencies_id {
DEPENDENCY_ID.store(meta.next_dependencies_id, Relaxed);
Expand Down
12 changes: 6 additions & 6 deletions crates/rspack_core/src/cache/persistent/occasion/make/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ mod module_graph;

use std::sync::Arc;

use rspack_cacheable::DeserializeError;
use rspack_error::Result;

use super::super::{cacheable_context::CacheableContext, Storage};
use crate::make::MakeArtifact;
Expand Down Expand Up @@ -48,8 +48,7 @@ impl MakeOccasion {
missing_dependencies,
build_dependencies,
&self.storage,
)
.expect("should save dependencies success");
);

module_graph::save_module_graph(
module_graph_partial,
Expand All @@ -59,14 +58,15 @@ impl MakeOccasion {
&self.context,
);

meta::save_meta(make_failed_dependencies, make_failed_module, &self.storage)
.expect("should save make meta");
meta::save_meta(make_failed_dependencies, make_failed_module, &self.storage);
}

#[tracing::instrument(name = "MakeOccasion::recovery", skip_all)]
pub async fn recovery(&self) -> Result<MakeArtifact, DeserializeError> {
pub async fn recovery(&self) -> Result<MakeArtifact> {
let mut artifact = MakeArtifact::default();

// TODO can call recovery with multi thread
// TODO return DeserializeError not panic
let (file_dependencies, context_dependencies, missing_dependencies, build_dependencies) =
dependencies::recovery_dependencies_info(&self.storage).await?;
artifact.file_dependencies = file_dependencies;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@ use rayon::prelude::*;
use rspack_cacheable::{
cacheable, from_bytes, to_bytes,
with::{AsOption, AsTuple2, AsVec, Inline},
DeserializeError, SerializeError,
SerializeError,
};
use rspack_collections::IdentifierSet;
use rspack_error::Result;
use rustc_hash::FxHashSet as HashSet;

use super::Storage;
Expand Down Expand Up @@ -125,11 +126,11 @@ pub fn save_module_graph(
pub async fn recovery_module_graph(
storage: &Arc<dyn Storage>,
context: &CacheableContext,
) -> Result<(ModuleGraphPartial, HashSet<BuildDependency>), DeserializeError> {
) -> Result<(ModuleGraphPartial, HashSet<BuildDependency>)> {
let mut need_check_dep = vec![];
let mut partial = ModuleGraphPartial::default();
let mut mg = ModuleGraph::new(vec![], Some(&mut partial));
for (_, v) in storage.load(SCOPE).await.unwrap_or_default() {
for (_, v) in storage.load(SCOPE).await? {
let mut node: Node =
from_bytes(&v, context).expect("unexpected module graph deserialize failed");
for (dep, parent_block) in node.dependencies {
Expand Down
Loading

0 comments on commit ba3a34e

Please sign in to comment.