Skip to content

Commit

Permalink
Revise based on comments
Browse files Browse the repository at this point in the history
  • Loading branch information
cslinwang committed Jan 9, 2024
1 parent eb21188 commit eae2b55
Show file tree
Hide file tree
Showing 8 changed files with 215 additions and 407 deletions.
17 changes: 7 additions & 10 deletions builder/src/chunkdict_generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ impl Generator {
blob_mgr: &mut BlobManager,
chunkdict_origin: Vec<ChunkdictChunkInfo>,
) -> Result<BuildOutput> {
// validate and remove chunks which bloned blob size is smaller than block.
// Validate and remove chunks whose belonged blob sizes are smaller than a block.
let mut chunkdict = chunkdict_origin.to_vec();
Self::validate_and_remove_chunks(ctx, &mut chunkdict);

Expand Down Expand Up @@ -136,8 +136,8 @@ impl Generator {
inode.set_blocks(256);
let node_info = NodeInfo {
explicit_uidgid: true,
src_dev: 66305,
src_ino: 24772610,
src_dev: 0,
src_ino: 0,
rdev: 0,
source: PathBuf::from("/"),
path: PathBuf::from("/"),
Expand Down Expand Up @@ -171,8 +171,8 @@ impl Generator {
inode.set_blocks(256);
let node_info = NodeInfo {
explicit_uidgid: true,
src_dev: 66305,
src_ino: 24775126,
src_dev: 0,
src_ino: 1,
rdev: 0,
source: PathBuf::from("/"),
path: PathBuf::from("/chunkdict"),
Expand Down Expand Up @@ -211,17 +211,14 @@ impl Generator {
node: &mut Node,
chunkdict: &[ChunkdictChunkInfo],
) -> Result<()> {
for chunk_info in chunkdict.iter() {
for (i, chunk_info) in chunkdict.iter().enumerate() {
let chunk_size: u32 = chunk_info.chunk_compressed_size;
let file_offset = 1 as u64 * chunk_size as u64;
let file_offset = i as u64 * chunk_size as u64;
let mut chunk = ChunkWrapper::new(ctx.fs_version);

// update blob context
let (blob_index, blob_ctx) =
blob_mgr.get_or_cerate_blob_for_chunkdict(ctx, &chunk_info.chunk_blob_id)?;
if blob_ctx.blob_id.is_empty() {
blob_ctx.blob_id = chunk_info.chunk_blob_id.clone();
}
let chunk_uncompressed_size = chunk_info.chunk_uncompressed_size;
let pre_d_offset = blob_ctx.current_uncompressed_offset;
blob_ctx.uncompressed_blob_size = pre_d_offset + chunk_uncompressed_size as u64;
Expand Down
257 changes: 0 additions & 257 deletions builder/src/generate.rs

This file was deleted.

14 changes: 0 additions & 14 deletions contrib/nydusify/cmd/nydusify.go
Original file line number Diff line number Diff line change
Expand Up @@ -694,11 +694,6 @@ func main() {
Usage: "Json configuration file for storage backend",
EnvVars: []string{"BACKEND_CONFIG_FILE"},
},
&cli.StringFlag{
Name: "push-chunk-size",
Value: "0MB",
Usage: "Chunk size for pushing a blob layer in chunked",
},

&cli.StringFlag{
Name: "work-dir",
Expand Down Expand Up @@ -731,13 +726,6 @@ func main() {
if err != nil {
return err
}
pushChunkSize, err := humanize.ParseBytes(c.String("push-chunk-size"))
if err != nil {
return errors.Wrap(err, "invalid --push-chunk-size option")
}
if pushChunkSize > 0 {
logrus.Infof("will copy layer with chunk size %s", c.String("push-chunk-size"))
}

_, arch, err := provider.ExtractOsArch(c.String("platform"))
if err != nil {
Expand All @@ -759,8 +747,6 @@ func main() {
ExpectedArch: arch,
AllPlatforms: c.Bool("all-platforms"),
Platforms: c.String("platform"),

PushChunkSize: int64(pushChunkSize),
})
if err != nil {
return err
Expand Down
Loading

0 comments on commit eae2b55

Please sign in to comment.