Skip to content

Commit

Permalink
Merge pull request #5216 from schomatis/fix/dag-modifier/preserve-fsnode
Browse files Browse the repository at this point in the history
unixfs: fix `dagTruncate` to preserve node type
  • Loading branch information
whyrusleeping committed Jul 18, 2018
2 parents 454a170 + 70d0f13 commit e8cc529
Show file tree
Hide file tree
Showing 5 changed files with 97 additions and 7 deletions.
40 changes: 40 additions & 0 deletions mfs/mfs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1136,3 +1136,43 @@ func TestTruncateAtSize(t *testing.T) {
}
fd.Truncate(4)
}

func TestTruncateAndWrite(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
ds, rt := setupRoot(ctx, t)

dir := rt.GetDirectory()

nd := dag.NodeWithData(ft.FilePBData(nil, 0))
fi, err := NewFile("test", nd, dir, ds)
if err != nil {
t.Fatal(err)
}

fd, err := fi.Open(OpenReadWrite, true)
defer fd.Close()
if err != nil {
t.Fatal(err)
}
for i := 0; i < 200; i++ {
err = fd.Truncate(0)
if err != nil {
t.Fatal(err)
}
l, err := fd.Write([]byte("test"))
if err != nil {
t.Fatal(err)
}
if l != len("test") {
t.Fatal("incorrect write length")
}
data, err := ioutil.ReadAll(fd)
if err != nil {
t.Fatal(err)
}
if string(data) != "test" {
t.Errorf("read error at read %d, read: %v", i, data)
}
}
}
8 changes: 4 additions & 4 deletions test/sharness/t0250-files-api.sh
Original file line number Diff line number Diff line change
Expand Up @@ -613,7 +613,7 @@ tests_for_files_api() {
ROOT_HASH=QmcwKfTMCT7AaeiD92hWjnZn9b6eh9NxnhfSzN5x2vnDpt
CATS_HASH=Qma88m8ErTGkZHbBWGqy1C7VmEmX8wwNDWNpGyCaNmEgwC
FILE_HASH=QmQdQt9qooenjeaNhiKHF3hBvmNteB4MQBtgu3jxgf9c7i
TRUNC_HASH=QmdaQZbLwK5ykweGdCVovNnvBom7QhikovDUVqTPHQG4L8
TRUNC_HASH=QmPVnT9gocPbqzN4G6SMp8vAPyzcjDbUJrNdKgzQquuDg4
test_files_api "($EXTRA)"

test_expect_success "can create some files for testing with raw-leaves ($EXTRA)" '
Expand All @@ -629,13 +629,13 @@ tests_for_files_api() {
ROOT_HASH=QmW3dMSU6VNd1mEdpk9S3ZYRuR1YwwoXjGaZhkyK6ru9YU
CATS_HASH=QmPqWDEg7NoWRX8Y4vvYjZtmdg5umbfsTQ9zwNr12JoLmt
FILE_HASH=QmRCgHeoKxCqK2Es6M6nPUDVWz19yNQPnsXGsXeuTkSKpN
TRUNC_HASH=QmRFJEKWF5A5FyFYZgNhusLw2UziW9zBKYr4huyHjzcB6o
TRUNC_HASH=QmckstrVxJuecVD1FHUiURJiU9aPURZWJieeBVHJPACj8L
test_files_api "($EXTRA, raw-leaves)" '' --raw-leaves

ROOT_HASH=QmageRWxC7wWjPv5p36NeAgBAiFdBHaNfxAehBSwzNech2
CATS_HASH=zdj7WkEzPLNAr5TYJSQC8CFcBjLvWFfGdx6kaBrJXnBguwWeX
FILE_HASH=zdj7WYHvf5sBRgSBjYnq64QFr449CCbgupXfBvoYL3aHC1DzJ
TRUNC_HASH=zdj7WYLYbka6Ydg8gZUJRLKnFBVehCADhQKBsFbNiMxZSB5Gj
TRUNC_HASH=zdj7Wjr8GHZonPFVCWvz2SLLo9H6MmqBxyeB34ArHfyCbmdJG
if [ "$EXTRA" = "offline" ]; then
test_files_api "($EXTRA, cidv1)" --cid-version=1
fi
Expand All @@ -660,7 +660,7 @@ tests_for_files_api() {
ROOT_HASH=zDMZof1kxEsAwSgCZsGQRVcHCMtHLjkUQoiZUbZ87erpPQJGUeW8
CATS_HASH=zDMZof1kuAhr3zBkxq48V7o9HJZCTVyu1Wd9wnZtVcPJLW8xnGft
FILE_HASH=zDMZof1kxbB9CvxgRioBzESbGnZUxtSCsZ18H1EUkxDdWt1DYEkK
TRUNC_HASH=zDMZof1kxXqKdVsVo231qVdN3hCTF5a34UuQZpzmm5K7CbRJ4u2S
TRUNC_HASH=zDMZof1kpH1vxK3k2TeYc8w59atCbzMzrhZonsztMWSptVro2zQa
test_files_api "($EXTRA, blake2b-256 root)"
fi

Expand Down
12 changes: 9 additions & 3 deletions unixfs/mod/dagmodifier.go
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,13 @@ func dagTruncate(ctx context.Context, n ipld.Node, size uint64, ds ipld.DAGServi
var cur uint64
end := 0
var modified ipld.Node
ndata := ft.NewFSNode(ft.TRaw)
ndata, err := ft.FSNodeFromBytes(nd.Data())
if err != nil {
return nil, err
}
// Reset the block sizes of the node to adjust them
// with the new values of the truncated children.
ndata.RemoveAllBlockSizes()
for i, lnk := range nd.Links() {
child, err := lnk.GetNode(ctx, ds)
if err != nil {
Expand Down Expand Up @@ -558,7 +564,7 @@ func dagTruncate(ctx context.Context, n ipld.Node, size uint64, ds ipld.DAGServi
ndata.AddBlockSize(childsize)
}

err := ds.Add(ctx, modified)
err = ds.Add(ctx, modified)
if err != nil {
return nil, err
}
Expand All @@ -573,7 +579,7 @@ func dagTruncate(ctx context.Context, n ipld.Node, size uint64, ds ipld.DAGServi
if err != nil {
return nil, err
}

// Save the new block sizes to the original node.
nd.SetData(d)

// invalidate cache and recompute serialized data
Expand Down
38 changes: 38 additions & 0 deletions unixfs/mod/dagmodifier_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -406,6 +406,44 @@ func testDagTruncate(t *testing.T, opts testu.NodeOpts) {
}
}

// TestDagTruncateSameSize tests that a DAG truncated
// to the same size (i.e., doing nothing) doesn't modify
// the DAG (its hash).
func TestDagTruncateSameSize(t *testing.T) {
runAllSubtests(t, testDagTruncateSameSize)
}
func testDagTruncateSameSize(t *testing.T, opts testu.NodeOpts) {
dserv := testu.GetDAGServ()
_, n := testu.GetRandomNode(t, dserv, 50000, opts)
ctx, cancel := context.WithCancel(context.Background())
defer cancel()

dagmod, err := NewDagModifier(ctx, n, dserv, testu.SizeSplitterGen(512))
if err != nil {
t.Fatal(err)
}
// Copied from `TestDagTruncate`.

size, err := dagmod.Size()
if err != nil {
t.Fatal(err)
}

err = dagmod.Truncate(size)
if err != nil {
t.Fatal(err)
}

modifiedNode, err := dagmod.GetNode()
if err != nil {
t.Fatal(err)
}

if modifiedNode.Cid().Equals(n.Cid()) == false {
t.Fatal("the node has been modified!")
}
}

func TestSparseWrite(t *testing.T) {
runAllSubtests(t, testSparseWrite)
}
Expand Down
6 changes: 6 additions & 0 deletions unixfs/unixfs.go
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,12 @@ func (n *FSNode) BlockSize(i int) uint64 {
return n.format.Blocksizes[i]
}

// RemoveAllBlockSizes removes all the child block sizes of this node.
func (n *FSNode) RemoveAllBlockSizes() {
n.format.Blocksizes = []uint64{}
n.format.Filesize = proto.Uint64(uint64(len(n.Data())))
}

// GetBytes marshals this node as a protobuf message.
func (n *FSNode) GetBytes() ([]byte, error) {
return proto.Marshal(&n.format)
Expand Down

0 comments on commit e8cc529

Please sign in to comment.