diff --git a/test/cli/pins_test.go b/test/cli/pins_test.go new file mode 100644 index 0000000000000000000000000000000000000000..14a3dc2385cbcfe99792ecba4180b13ab0aa74c7 --- /dev/null +++ b/test/cli/pins_test.go @@ -0,0 +1,214 @@ +package cli + +import ( + "fmt" + "strings" + "testing" + + "github.com/ipfs/go-cid" + "github.com/ipfs/kubo/test/cli/harness" + . "github.com/ipfs/kubo/test/cli/testutils" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type testPinsArgs struct { + runDaemon bool + pinArg string + lsArg string + baseArg string +} + +func testPins(t *testing.T, args testPinsArgs) { + t.Run(fmt.Sprintf("test pins with args=%+v", args), func(t *testing.T) { + t.Parallel() + node := harness.NewT(t).NewNode().Init() + if args.runDaemon { + node.StartDaemon("--offline") + } + + strs := []string{"a", "b", "c", "d", "e", "f", "g"} + dataToCid := map[string]string{} + cids := []string{} + + ipfsAdd := func(t *testing.T, content string) string { + cidStr := node.IPFSAddStr(content, StrCat(args.baseArg, "--pin=false")...) + + _, err := cid.Decode(cidStr) + require.NoError(t, err) + dataToCid[content] = cidStr + cids = append(cids, cidStr) + return cidStr + } + + ipfsPinAdd := func(cids []string) []string { + input := strings.Join(cids, "\n") + return node.PipeStrToIPFS(input, StrCat("pin", "add", args.pinArg, args.baseArg)...).Stdout.Lines() + } + + ipfsPinLS := func() string { + return node.IPFS(StrCat("pin", "ls", args.lsArg, args.baseArg)...).Stdout.Trimmed() + } + + for _, s := range strs { + ipfsAdd(t, s) + } + + // these subtests run sequentially since they depend on state + + t.Run("check output of pin command", func(t *testing.T) { + resLines := ipfsPinAdd(cids) + + for i, s := range resLines { + assert.Equal(t, + fmt.Sprintf("pinned %s recursively", cids[i]), + s, + ) + } + }) + + t.Run("pin verify should succeed", func(t *testing.T) { + node.IPFS("pin", "verify") + }) + + t.Run("'pin verify --verbose' should include all the cids", func(t *testing.T) { + verboseVerifyOut := node.IPFS(StrCat("pin", "verify", "--verbose", args.baseArg)...).Stdout.String() + for _, cid := range cids { + assert.Contains(t, verboseVerifyOut, fmt.Sprintf("%s ok", cid)) + } + + }) + t.Run("ls output should contain the cids", func(t *testing.T) { + lsOut := ipfsPinLS() + for _, cid := range cids { + assert.Contains(t, lsOut, cid) + } + }) + + t.Run("check 'pin ls hash' output", func(t *testing.T) { + lsHashOut := node.IPFS(StrCat("pin", "ls", args.lsArg, args.baseArg, dataToCid["b"])...) + lsHashOutStr := lsHashOut.Stdout.String() + assert.Equal(t, fmt.Sprintf("%s recursive\n", dataToCid["b"]), lsHashOutStr) + }) + + t.Run("unpinning works", func(t *testing.T) { + node.PipeStrToIPFS(strings.Join(cids, "\n"), "pin", "rm") + }) + + t.Run("test pin update", func(t *testing.T) { + cidA := dataToCid["a"] + cidB := dataToCid["b"] + + ipfsPinAdd([]string{cidA}) + beforeUpdate := ipfsPinLS() + + assert.Contains(t, beforeUpdate, cidA) + assert.NotContains(t, beforeUpdate, cidB) + + node.IPFS("pin", "update", "--unpin=true", cidA, cidB) + afterUpdate := ipfsPinLS() + + assert.NotContains(t, afterUpdate, cidA) + assert.Contains(t, afterUpdate, cidB) + + node.IPFS("pin", "update", "--unpin=true", cidB, cidB) + afterIdempotentUpdate := ipfsPinLS() + + assert.Contains(t, afterIdempotentUpdate, cidB) + + node.IPFS("pin", "rm", cidB) + }) + }) +} + +func testPinsErrorReporting(t *testing.T, args testPinsArgs) { + t.Run(fmt.Sprintf("test pins error reporting with args=%+v", args), func(t *testing.T) { + t.Parallel() + node := harness.NewT(t).NewNode().Init() + if args.runDaemon { + node.StartDaemon("--offline") + } + randomCID := "Qme8uX5n9hn15pw9p6WcVKoziyyC9LXv4LEgvsmKMULjnV" + res := node.RunIPFS(StrCat("pin", "add", args.pinArg, randomCID)...) + assert.NotEqual(t, 0, res.ExitErr.ExitCode()) + assert.Contains(t, res.Stderr.String(), "ipld: could not find") + }) +} + +func testPinDAG(t *testing.T, args testPinsArgs) { + t.Run(fmt.Sprintf("test pin DAG with args=%+v", args), func(t *testing.T) { + t.Parallel() + h := harness.NewT(t) + node := h.NewNode().Init() + if args.runDaemon { + node.StartDaemon("--offline") + } + bytes := RandomBytes(1 << 20) // 1 MiB + tmpFile := h.WriteToTemp(string(bytes)) + cid := node.IPFS(StrCat("add", args.pinArg, "--pin=false", "-q", tmpFile)...).Stdout.Trimmed() + + node.IPFS("pin", "add", "--recursive=true", cid) + node.IPFS("pin", "rm", cid) + + // remove part of the DAG + part := node.IPFS("refs", cid).Stdout.Lines()[0] + node.IPFS("block", "rm", part) + + res := node.RunIPFS("pin", "add", "--recursive=true", cid) + assert.NotEqual(t, 0, res) + assert.Contains(t, res.Stderr.String(), "ipld: could not find") + }) +} + +func testPinProgress(t *testing.T, args testPinsArgs) { + t.Run(fmt.Sprintf("test pin progress with args=%+v", args), func(t *testing.T) { + t.Parallel() + h := harness.NewT(t) + node := h.NewNode().Init() + + if args.runDaemon { + node.StartDaemon("--offline") + } + + bytes := RandomBytes(1 << 20) // 1 MiB + tmpFile := h.WriteToTemp(string(bytes)) + cid := node.IPFS(StrCat("add", args.pinArg, "--pin=false", "-q", tmpFile)...).Stdout.Trimmed() + + res := node.RunIPFS("pin", "add", "--progress", cid) + node.Runner.AssertNoError(res) + + assert.Contains(t, res.Stderr.String(), " 5 nodes") + }) +} + +func TestPins(t *testing.T) { + t.Parallel() + t.Run("test pinning without daemon running", func(t *testing.T) { + t.Parallel() + testPinsErrorReporting(t, testPinsArgs{}) + testPinsErrorReporting(t, testPinsArgs{pinArg: "--progress"}) + testPinDAG(t, testPinsArgs{}) + testPinDAG(t, testPinsArgs{pinArg: "--raw-leaves"}) + testPinProgress(t, testPinsArgs{}) + testPins(t, testPinsArgs{}) + testPins(t, testPinsArgs{pinArg: "--progress"}) + testPins(t, testPinsArgs{pinArg: "--progress", lsArg: "--stream"}) + testPins(t, testPinsArgs{baseArg: "--cid-base=base32"}) + testPins(t, testPinsArgs{lsArg: "--stream", baseArg: "--cid-base=base32"}) + + }) + + t.Run("test pinning with daemon running without network", func(t *testing.T) { + t.Parallel() + testPinsErrorReporting(t, testPinsArgs{runDaemon: true}) + testPinsErrorReporting(t, testPinsArgs{runDaemon: true, pinArg: "--progress"}) + testPinDAG(t, testPinsArgs{runDaemon: true}) + testPinDAG(t, testPinsArgs{runDaemon: true, pinArg: "--raw-leaves"}) + testPinProgress(t, testPinsArgs{runDaemon: true}) + testPins(t, testPinsArgs{runDaemon: true}) + testPins(t, testPinsArgs{runDaemon: true, pinArg: "--progress"}) + testPins(t, testPinsArgs{runDaemon: true, pinArg: "--progress", lsArg: "--stream"}) + testPins(t, testPinsArgs{runDaemon: true, baseArg: "--cid-base=base32"}) + testPins(t, testPinsArgs{runDaemon: true, lsArg: "--stream", baseArg: "--cid-base=base32"}) + }) +} diff --git a/test/cli/testutils/files.go b/test/cli/testutils/files.go new file mode 100644 index 0000000000000000000000000000000000000000..e17c98adf79f48bd39e497c9987a4d8afab07fcb --- /dev/null +++ b/test/cli/testutils/files.go @@ -0,0 +1,37 @@ +package testutils + +import ( + "log" + "os" + "path/filepath" +) + +func MustOpen(name string) *os.File { + f, err := os.Open(name) + if err != nil { + log.Panicf("opening %s: %s", name, err) + } + return f +} + +// Searches for a file in a dir, then the parent dir, etc. +// If the file is not found, an empty string is returned. +func FindUp(name, dir string) string { + curDir := dir + for { + entries, err := os.ReadDir(curDir) + if err != nil { + panic(err) + } + for _, e := range entries { + if name == e.Name() { + return filepath.Join(curDir, name) + } + } + newDir := filepath.Dir(curDir) + if newDir == curDir { + return "" + } + curDir = newDir + } +} diff --git a/test/cli/testutils/json.go b/test/cli/testutils/json.go new file mode 100644 index 0000000000000000000000000000000000000000..bc3093f135594497facf7b99b4deb57e5f1ed64d --- /dev/null +++ b/test/cli/testutils/json.go @@ -0,0 +1,13 @@ +package testutils + +import "encoding/json" + +type JSONObj map[string]interface{} + +func ToJSONStr(m JSONObj) string { + b, err := json.Marshal(m) + if err != nil { + panic(err) + } + return string(b) +} diff --git a/test/cli/testutils/random.go b/test/cli/testutils/random.go new file mode 100644 index 0000000000000000000000000000000000000000..00bb9de494c5e50e773d76e188d06f6e4b6cfce3 --- /dev/null +++ b/test/cli/testutils/random.go @@ -0,0 +1,12 @@ +package testutils + +import "crypto/rand" + +func RandomBytes(n int) []byte { + bytes := make([]byte, n) + _, err := rand.Read(bytes) + if err != nil { + panic(err) + } + return bytes +} diff --git a/test/cli/testutils/util.go b/test/cli/testutils/strings.go similarity index 59% rename from test/cli/testutils/util.go rename to test/cli/testutils/strings.go index 2c013f5b9e9ec270e24006a1e4bac20b3f9a4d83..529948d3feb32932b39ee4423930e35e2c600cf4 100644 --- a/test/cli/testutils/util.go +++ b/test/cli/testutils/strings.go @@ -2,31 +2,10 @@ package testutils import ( "bufio" - "encoding/json" "fmt" - "log" - "os" - "path/filepath" "strings" ) -func SplitLines(s string) []string { - var lines []string - scanner := bufio.NewScanner(strings.NewReader(s)) - for scanner.Scan() { - lines = append(lines, scanner.Text()) - } - return lines -} - -func MustOpen(name string) *os.File { - f, err := os.Open(name) - if err != nil { - log.Panicf("opening %s: %s", name, err) - } - return f -} - // StrCat takes a bunch of strings or string slices // and concats them all together into one string slice. // If an arg is not one of those types, this panics. @@ -64,34 +43,11 @@ func PreviewStr(s string) string { return s[0:previewLength] + suffix } -type JSONObj map[string]interface{} - -func ToJSONStr(m JSONObj) string { - b, err := json.Marshal(m) - if err != nil { - panic(err) - } - return string(b) -} - -// Searches for a file in a dir, then the parent dir, etc. -// If the file is not found, an empty string is returned. -func FindUp(name, dir string) string { - curDir := dir - for { - entries, err := os.ReadDir(curDir) - if err != nil { - panic(err) - } - for _, e := range entries { - if name == e.Name() { - return filepath.Join(curDir, name) - } - } - newDir := filepath.Dir(curDir) - if newDir == curDir { - return "" - } - curDir = newDir +func SplitLines(s string) []string { + var lines []string + scanner := bufio.NewScanner(strings.NewReader(s)) + for scanner.Scan() { + lines = append(lines, scanner.Text()) } + return lines } diff --git a/test/sharness/t0085-pins.sh b/test/sharness/t0085-pins.sh deleted file mode 100755 index c83c513682bc8ffa10325427a4f2f71ada18b1c3..0000000000000000000000000000000000000000 --- a/test/sharness/t0085-pins.sh +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env bash -# -# Copyright (c) 2016 Jeromy Johnson -# MIT Licensed; see the LICENSE file in this repository. -# - -test_description="Test ipfs pinning operations" - -. lib/test-lib.sh - - -test_pins() { - PIN_ARGS="$1" - LS_ARGS="$2" - BASE=$3 - if [ -n "$BASE" ]; then - BASE_ARGS="--cid-base=$BASE" - fi - - test_expect_success "create some hashes $BASE" ' - HASH_A=$(echo "A" | ipfs add $BASE_ARGS -q --pin=false) && - HASH_B=$(echo "B" | ipfs add $BASE_ARGS -q --pin=false) && - HASH_C=$(echo "C" | ipfs add $BASE_ARGS -q --pin=false) && - HASH_D=$(echo "D" | ipfs add $BASE_ARGS -q --pin=false) && - HASH_E=$(echo "E" | ipfs add $BASE_ARGS -q --pin=false) && - HASH_F=$(echo "F" | ipfs add $BASE_ARGS -q --pin=false) && - HASH_G=$(echo "G" | ipfs add $BASE_ARGS -q --pin=false) - ' - - test_expect_success "put all those hashes in a file" ' - echo $HASH_A > hashes && - echo $HASH_B >> hashes && - echo $HASH_C >> hashes && - echo $HASH_D >> hashes && - echo $HASH_E >> hashes && - echo $HASH_F >> hashes && - echo $HASH_G >> hashes - ' - - if [ -n "$BASE" ]; then - test_expect_success "make sure hashes are in $BASE" ' - cat hashes | xargs cid-fmt %b | sort -u > actual - echo base32 > expected - test_cmp expected actual - ' - fi - - test_expect_success "'ipfs pin add $PIN_ARGS' via stdin" ' - cat hashes | ipfs pin add $PIN_ARGS $BASE_ARGS | tee actual - ' - - test_expect_success "'ipfs pin add $PIN_ARGS' output looks good" ' - sed -e "s/^/pinned /; s/$/ recursively/" hashes > expected && - test_cmp expected actual - ' - - test_expect_success "see if verify works" ' - ipfs pin verify - ' - - test_expect_success "see if verify --verbose $BASE_ARGS works" ' - ipfs pin verify --verbose $BASE_ARGS > verify_out && - test $(cat verify_out | wc -l) -ge 7 && - test_should_contain "$HASH_A ok" verify_out && - test_should_contain "$HASH_B ok" verify_out && - test_should_contain "$HASH_C ok" verify_out && - test_should_contain "$HASH_D ok" verify_out && - test_should_contain "$HASH_E ok" verify_out && - test_should_contain "$HASH_F ok" verify_out && - test_should_contain "$HASH_G ok" verify_out - ' - - test_expect_success "ipfs pin ls $LS_ARGS $BASE_ARGS works" ' - ipfs pin ls $LS_ARGS $BASE_ARGS > ls_out && - test_should_contain "$HASH_A" ls_out && - test_should_contain "$HASH_B" ls_out && - test_should_contain "$HASH_C" ls_out && - test_should_contain "$HASH_D" ls_out && - test_should_contain "$HASH_E" ls_out && - test_should_contain "$HASH_F" ls_out && - test_should_contain "$HASH_G" ls_out - ' - - test_expect_success "test pin ls $LS_ARGS $BASE_ARGS hash" ' - echo $HASH_B | test_must_fail grep /ipfs && # just to be sure - ipfs pin ls $LS_ARGS $BASE_ARGS $HASH_B > ls_hash_out && - echo "$HASH_B recursive" > ls_hash_exp && - test_cmp ls_hash_exp ls_hash_out - ' - - test_expect_success "unpin those hashes" ' - cat hashes | ipfs pin rm - ' - - test_expect_success "test pin update" ' - ipfs pin add "$HASH_A" && - ipfs pin ls $LS_ARGS $BASE_ARGS | tee before_update && - test_should_contain "$HASH_A" before_update && - test_must_fail grep -q "$HASH_B" before_update && - ipfs pin update --unpin=true "$HASH_A" "$HASH_B" && - ipfs pin ls $LS_ARGS $BASE_ARGS > after_update && - test_must_fail grep -q "$HASH_A" after_update && - test_should_contain "$HASH_B" after_update && - ipfs pin update --unpin=true "$HASH_B" "$HASH_B" && - ipfs pin ls $LS_ARGS $BASE_ARGS > after_idempotent_update && - test_should_contain "$HASH_B" after_idempotent_update && - ipfs pin rm "$HASH_B" - ' -} - -RANDOM_HASH=Qme8uX5n9hn15pw9p6WcVKoziyyC9LXv4LEgvsmKMULjnV - -test_pins_error_reporting() { - PIN_ARGS=$1 - - test_expect_success "'ipfs pin add $PIN_ARGS' on non-existent hash should fail" ' - test_must_fail ipfs pin add $PIN_ARGS $RANDOM_HASH 2> err && - grep -q "ipld: could not find" err - ' -} - -test_pin_dag_init() { - PIN_ARGS=$1 - - test_expect_success "'ipfs add $PIN_ARGS --pin=false' 1MB file" ' - random 1048576 56 > afile && - HASH=`ipfs add $PIN_ARGS --pin=false -q afile` - ' -} - -test_pin_dag() { - test_pin_dag_init $1 - - test_expect_success "'ipfs pin add --progress' file" ' - ipfs pin add --recursive=true $HASH - ' - - test_expect_success "'ipfs pin rm' file" ' - ipfs pin rm $HASH - ' - - test_expect_success "remove part of the dag" ' - PART=`ipfs refs $HASH | head -1` && - ipfs block rm $PART - ' - - test_expect_success "pin file, should fail" ' - test_must_fail ipfs pin add --recursive=true $HASH 2> err && - cat err && - grep -q "ipld: could not find" err - ' -} - -test_pin_progress() { - test_pin_dag_init - - test_expect_success "'ipfs pin add --progress' file" ' - ipfs pin add --progress $HASH 2> err - ' - - test_expect_success "pin progress reported correctly" ' - cat err - grep -q " 5 nodes" err - ' -} - -test_init_ipfs - -test_pins '' '' '' -test_pins --progress '' '' -test_pins --progress --stream '' -test_pins '' '' base32 -test_pins '' --stream base32 - -test_pins_error_reporting -test_pins_error_reporting --progress - -test_pin_dag -test_pin_dag --raw-leaves - -test_pin_progress - -test_launch_ipfs_daemon_without_network - -test_pins '' '' '' -test_pins --progress '' '' -test_pins --progress --stream '' -test_pins '' '' base32 -test_pins '' --stream base32 - -test_pins_error_reporting -test_pins_error_reporting --progress - -test_pin_dag -test_pin_dag --raw-leaves - -test_pin_progress - -test_kill_ipfs_daemon - -test_done