Skip to content

Commit

Permalink
core/commands/unixfs/ls: Hash-map for Objects
Browse files Browse the repository at this point in the history
Discussion with Juan on IRC ([1] through [2]) lead to this adjusted
JSON output.  Benefits over the old output include:

* deduplication (we only check the children of a given Merkle node
  once, even if multiple arguments resolve to that hash)

* alphabetized output (like POSIX's ls).  As a side-effect of this
  change, I'm also matching GNU Coreutils' ls output (maybe in POSIX?)
  by printing an alphabetized list of non-directories (one per line)
  first, with alphabetized directory lists afterwards.

[1]: https://botbot.me/freenode/ipfs/2015-06-12/?msg=41725570&page=5
[2]: https://botbot.me/freenode/ipfs/2015-06-12/?msg=41726547&page=5

License: MIT
Signed-off-by: W. Trevor King <wking@tremily.us>
  • Loading branch information
wking committed Jun 15, 2015
1 parent 180ed0a commit 0e49177
Show file tree
Hide file tree
Showing 2 changed files with 141 additions and 35 deletions.
90 changes: 65 additions & 25 deletions core/commands/unixfs/ls.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"bytes"
"fmt"
"io"
"sort"
"text/tabwriter"
"time"

Expand All @@ -23,12 +24,12 @@ type LsLink struct {
}

type LsObject struct {
Argument string
Links []LsLink
Links []LsLink
}

type LsOutput struct {
Objects []*LsObject
Arguments map[string]string
Objects map[string]*LsObject
}

var LsCmd = &cmds.Command{
Expand Down Expand Up @@ -57,22 +58,40 @@ directories, the child size is the IPFS link size.

paths := req.Arguments()

output := make([]*LsObject, len(paths))
for i, fpath := range paths {
output := LsOutput{
Arguments: map[string]string{},
Objects: map[string]*LsObject{},
}

for _, fpath := range paths {
ctx := req.Context().Context
merkleNode, err := core.Resolve(ctx, node, path.Path(fpath))
if err != nil {
res.SetError(err, cmds.ErrNormal)
return
}

unixFSNode, err := unixfs.FromBytes(merkleNode.Data)
key, err := merkleNode.Key()
if err != nil {
res.SetError(err, cmds.ErrNormal)
return
}

output[i] = &LsObject{Argument: fpath}
hash := key.B58String()
output.Arguments[fpath] = hash

if _, ok := output.Objects[hash]; ok {
// duplicate argument for an already-listed node
continue
}

output.Objects[hash] = &LsObject{}

unixFSNode, err := unixfs.FromBytes(merkleNode.Data)
if err != nil {
res.SetError(err, cmds.ErrNormal)
return
}

t := unixFSNode.GetType()
switch t {
Expand All @@ -85,15 +104,16 @@ directories, the child size is the IPFS link size.
res.SetError(err, cmds.ErrNormal)
return
}
output[i].Links = []LsLink{LsLink{
output.Objects[hash].Links = []LsLink{LsLink{
Name: fpath,
Hash: key.String(),
Type: t.String(),
Size: unixFSNode.GetFilesize(),
}}
case unixfspb.Data_Directory:
output[i].Links = make([]LsLink, len(merkleNode.Links))
for j, link := range merkleNode.Links {
links := make([]LsLink, len(merkleNode.Links))
output.Objects[hash].Links = links
for i, link := range merkleNode.Links {
getCtx, cancel := context.WithTimeout(ctx, time.Minute)
defer cancel()
link.Node, err = link.GetNode(getCtx, node.DAG)
Expand All @@ -117,34 +137,54 @@ directories, the child size is the IPFS link size.
} else {
lsLink.Size = link.Size
}
output[i].Links[j] = lsLink
links[i] = lsLink
}
}
}

res.SetOutput(&LsOutput{Objects: output})
res.SetOutput(&output)
},
Marshalers: cmds.MarshalerMap{
cmds.Text: func(res cmds.Response) (io.Reader, error) {

output := res.Output().(*LsOutput)
buf := new(bytes.Buffer)
w := tabwriter.NewWriter(buf, 1, 2, 1, ' ', 0)
lastObjectDirHeader := false
for i, object := range output.Objects {
singleObject := (len(object.Links) == 1 &&
object.Links[0].Name == object.Argument)
if len(output.Objects) > 1 && !singleObject {
if i > 0 {
fmt.Fprintln(w)
}
fmt.Fprintf(w, "%s:\n", object.Argument)
lastObjectDirHeader = true

nonDirectories := []string{}
directories := []string{}
for argument := range output.Arguments {
hash := output.Arguments[argument]
object := output.Objects[hash]
if len(object.Links) == 1 && object.Links[0].Hash == hash {
nonDirectories = append(nonDirectories, argument)
} else {
if lastObjectDirHeader {
fmt.Fprintln(w)
directories = append(directories, argument)
}
}
sort.Strings(nonDirectories)
sort.Strings(directories)

for _, argument := range nonDirectories {
fmt.Fprintf(w, "%s\n", argument)
}

seen := map[string]bool{}
for i, argument := range directories {
hash := output.Arguments[argument]
if _, ok := seen[hash]; ok {
continue
}
seen[hash] = true

object := output.Objects[hash]
if i > 0 || len(nonDirectories) > 0 {
fmt.Fprintln(w)
}
for _, arg := range directories[i:] {
if output.Arguments[arg] == hash {
fmt.Fprintf(w, "%s:\n", arg)
}
lastObjectDirHeader = false
}
for _, link := range object.Links {
fmt.Fprintf(w, "%s\n", link.Name)
Expand Down
86 changes: 76 additions & 10 deletions test/sharness/t0200-unixfs-ls.sh
Original file line number Diff line number Diff line change
Expand Up @@ -44,19 +44,19 @@ test_ls_cmd() {

test_expect_success "'ipfs file ls <three dir hashes>' output looks good" '
cat <<-\EOF >expected_ls &&
QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj:
d1
d2
f1
f2
QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy:
1024
a
QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss:
128
a
QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj:
d1
d2
f1
f2
EOF
test_cmp expected_ls actual_ls
'
Expand All @@ -73,16 +73,35 @@ test_ls_cmd() {
test_cmp expected_ls_file actual_ls_file
'

test_expect_success "'ipfs file ls <duplicates>' succeeds" '
ipfs file ls /ipfs/QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj/d1 /ipfs/QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss /ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024 /ipfs/QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd >actual_ls_duplicates_file
'

test_expect_success "'ipfs file ls <duplicates>' output looks good" '
cat <<-\EOF >expected_ls_duplicates_file &&
/ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024
/ipfs/QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd
/ipfs/QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss:
/ipfs/QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj/d1:
128
a
EOF
test_cmp expected_ls_duplicates_file actual_ls_duplicates_file
'

test_expect_success "'ipfs --encoding=json file ls <file hashes>' succeeds" '
ipfs --encoding=json file ls /ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024 >actual_json_ls_file
'

test_expect_success "'ipfs --encoding=json file ls <file hashes>' output looks good" '
cat <<-\EOF >expected_json_ls_file_trailing_newline &&
{
"Objects": [
{
"Argument": "/ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024",
"Arguments": {
"/ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024": "QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd"
},
"Objects": {
"QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd": {
"Links": [
{
"Name": "/ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024",
Expand All @@ -92,12 +111,59 @@ test_ls_cmd() {
}
]
}
]
}
}
EOF
printf %s "$(cat expected_json_ls_file_trailing_newline)" >expected_json_ls_file
test_cmp expected_json_ls_file actual_json_ls_file
'

test_expect_success "'ipfs --encoding=json file ls <duplicates>' succeeds" '
ipfs --encoding=json file ls /ipfs/QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj/d1 /ipfs/QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss /ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024 /ipfs/QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd >actual_json_ls_duplicates_file
'

test_expect_success "'ipfs --encoding=json file ls <duplicates>' output looks good" '
cat <<-\EOF >expected_json_ls_duplicates_file_trailing_newline &&
{
"Arguments": {
"/ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024": "QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd",
"/ipfs/QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss": "QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss",
"/ipfs/QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd": "QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd",
"/ipfs/QmfNy183bXiRVyrhyWtq3TwHn79yHEkiAGFr18P7YNzESj/d1": "QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss"
},
"Objects": {
"QmSix55yz8CzWXf5ZVM9vgEvijnEeeXiTSarVtsqiiCJss": {
"Links": [
{
"Name": "128",
"Hash": "QmQNd6ubRXaNG6Prov8o6vk3bn6eWsj9FxLGrAVDUAGkGe",
"Size": 128,
"Type": "File"
},
{
"Name": "a",
"Hash": "QmZULkCELmmk5XNfCgTnCyFgAVxBRBXyDHGGMVoLFLiXEN",
"Size": 6,
"Type": "File"
}
]
},
"QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd": {
"Links": [
{
"Name": "/ipfs/QmR3jhV4XpxxPjPT3Y8vNnWvWNvakdcT3H6vqpRBsX1MLy/1024",
"Hash": "QmbQBUSRL9raZtNXfpTDeaxQapibJEG6qEY8WqAN22aUzd",
"Size": 1024,
"Type": "File"
}
]
}
}
}
EOF
printf %s "$(cat expected_json_ls_duplicates_file_trailing_newline)" >expected_json_ls_duplicates_file
test_cmp expected_json_ls_duplicates_file actual_json_ls_duplicates_file
'
}


Expand Down

0 comments on commit 0e49177

Please sign in to comment.