Merge pull request #22 from ucan-wg/container
add a token container with serialization as CARv1 file
This commit is contained in:
5
go.mod
5
go.mod
@@ -1,8 +1,6 @@
|
||||
module github.com/ucan-wg/go-ucan
|
||||
|
||||
go 1.22
|
||||
|
||||
toolchain go1.22.4
|
||||
go 1.23
|
||||
|
||||
require (
|
||||
github.com/ipfs/go-cid v0.4.1
|
||||
@@ -31,6 +29,7 @@ require (
|
||||
golang.org/x/crypto v0.25.0 // indirect
|
||||
golang.org/x/sys v0.22.0 // indirect
|
||||
google.golang.org/protobuf v1.34.2 // indirect
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
lukechampine.com/blake3 v1.3.0 // indirect
|
||||
)
|
||||
|
||||
3
go.sum
3
go.sum
@@ -80,8 +80,9 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
|
||||
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
|
||||
86
pkg/container/Readme.md
Normal file
86
pkg/container/Readme.md
Normal file
@@ -0,0 +1,86 @@
|
||||
# Token container
|
||||
|
||||
## Why do I need that?
|
||||
|
||||
Some common situation asks to package multiple tokens together:
|
||||
- calling a service requires sending an invocation, alongside the matching delegations
|
||||
- sending a series of revocations
|
||||
- \<insert your application specific scenario here>
|
||||
|
||||
The UCAN specification defines how a single token is serialized (envelope with signature, IPLD encoded as Dag-cbor), but it's entirely left open how to package multiple tokens together. To be clear, this is a correct thing to do for a specification, as different ways equally valid to solve that problem exists and can coexist. Any wire format holding a list of bytes would do (cbor, json, csv ...).
|
||||
|
||||
**go-ucan** however, provide an opinionated implementation, which may or may not work in your situation.
|
||||
|
||||
Some experiment has been done over which format is appropriate, and two have been selected:
|
||||
- **DAG-CBOR** of a list of bytes, as a low overhead option
|
||||
- **CAR** file, as a somewhat common ways to cary arbitrary blocks of data
|
||||
|
||||
Notably, **compression is not included**, even though it does work reasonably well. This is because your transport medium might already do it, or should.
|
||||
|
||||
## Wire format consideration
|
||||
|
||||
Several possible formats have been explored:
|
||||
- CAR files (binary or base64)
|
||||
- DAG-CBOR (binary or base64)
|
||||
|
||||
Additionally, gzip and deflate compression has been experimented with.
|
||||
|
||||
Below are the results in terms of storage used, as percentage and byte overhead over the raw tokens:
|
||||
|
||||
| Token count | car | carBase64 | carGzip | carGzipBase64 | cbor | cborBase64 | cborGzip | cborGzipBase64 | cborFlate | cborFlateBase64 |
|
||||
|-------------|-----|-----------|---------|---------------|------|------------|----------|----------------|-----------|-----------------|
|
||||
| 1 | 15 | 54 | 7 | 42 | 0 | 35 | \-8 | 22 | \-12 | 16 |
|
||||
| 2 | 12 | 49 | \-12 | 15 | 0 | 34 | \-25 | 0 | \-28 | \-3 |
|
||||
| 3 | 11 | 48 | \-21 | 4 | 0 | 34 | \-32 | \-10 | \-34 | \-11 |
|
||||
| 4 | 10 | 47 | \-26 | \-1 | 0 | 34 | \-36 | \-15 | \-37 | \-17 |
|
||||
| 5 | 10 | 47 | \-28 | \-4 | 0 | 34 | \-38 | \-18 | \-40 | \-20 |
|
||||
| 6 | 10 | 47 | \-30 | \-7 | 0 | 34 | \-40 | \-20 | \-40 | \-20 |
|
||||
| 7 | 10 | 46 | \-31 | \-8 | 0 | 34 | \-41 | \-21 | \-42 | \-22 |
|
||||
| 8 | 9 | 46 | \-32 | \-10 | 0 | 34 | \-42 | \-22 | \-42 | \-23 |
|
||||
| 9 | 9 | 46 | \-33 | \-11 | 0 | 34 | \-43 | \-23 | \-43 | \-24 |
|
||||
| 10 | 9 | 46 | \-34 | \-12 | 0 | 34 | \-43 | \-25 | \-44 | \-25 |
|
||||
|
||||

|
||||
|
||||
| Token count | car | carBase64 | carGzip | carGzipBase64 | cbor | cborBase64 | cborGzip | cborGzipBase64 | cborFlate | cborFlateBase64 |
|
||||
|-------------|-----|-----------|---------|---------------|------|------------|----------|----------------|-----------|-----------------|
|
||||
| 1 | 64 | 226 | 29 | 178 | 4 | 146 | \-35 | 94 | \-52 | 70 |
|
||||
| 2 | 102 | 412 | \-107 | 128 | 7 | 288 | \-211 | 0 | \-234 | \-32 |
|
||||
| 3 | 140 | 602 | \-270 | 58 | 10 | 430 | \-405 | \-126 | \-429 | \-146 |
|
||||
| 4 | 178 | 792 | \-432 | \-28 | 13 | 572 | \-602 | \-252 | \-617 | \-288 |
|
||||
| 5 | 216 | 978 | \-582 | \-94 | 16 | 714 | \-805 | \-386 | \-839 | \-418 |
|
||||
| 6 | 254 | 1168 | \-759 | \-176 | 19 | 856 | \-1001 | \-508 | \-1018 | \-520 |
|
||||
| 7 | 292 | 1358 | \-908 | \-246 | 22 | 998 | \-1204 | \-634 | \-1229 | \-650 |
|
||||
| 8 | 330 | 1544 | \-1085 | \-332 | 25 | 1140 | \-1398 | \-756 | \-1423 | \-792 |
|
||||
| 9 | 368 | 1734 | \-1257 | \-414 | 28 | 1282 | \-1614 | \-894 | \-1625 | \-930 |
|
||||
| 10 | 406 | 1924 | \-1408 | \-508 | 31 | 1424 | \-1804 | \-1040 | \-1826 | \-1060 |
|
||||
|
||||

|
||||
|
||||
Following is the performance aspect, with CPU usage and memory allocation:
|
||||
|
||||
| | Write ns/op | Read ns/op | Write B/op | Read B/op | Write allocs/op | Read allocs/op |
|
||||
|-----------------|-------------|------------|------------|-----------|-----------------|----------------|
|
||||
| car | 8451 | 1474630 | 17928 | 149437 | 59 | 2631 |
|
||||
| carBase64 | 16750 | 1437678 | 24232 | 151502 | 61 | 2633 |
|
||||
| carGzip | 320253 | 1581412 | 823887 | 192272 | 76 | 2665 |
|
||||
| carGzipBase64 | 343305 | 1486269 | 828782 | 198543 | 77 | 2669 |
|
||||
| cbor | 6419 | 1301554 | 16368 | 138891 | 25 | 2534 |
|
||||
| cborBase64 | 12860 | 1386728 | 20720 | 140962 | 26 | 2536 |
|
||||
| cborGzip | 310106 | 1379146 | 822742 | 182003 | 42 | 2585 |
|
||||
| cborGzipBase64 | 317001 | 1462548 | 827640 | 189283 | 43 | 2594 |
|
||||
| cborFlate | 327112 | 1555007 | 822473 | 181537 | 40 | 2591 |
|
||||
| cborFlateBase64 | 311276 | 1456562 | 826042 | 188665 | 41 | 2596 |
|
||||
|
||||
(BEWARE: logarithmic scale)
|
||||
|
||||

|
||||

|
||||

|
||||
|
||||
Conclusion:
|
||||
- CAR files are heavy for this usage, notably because they carry the CIDs of the tokens
|
||||
- compression works quite well and warrants its usage even with a single token
|
||||
- DAG-CBOR outperform CAR files everywhere, and comes with a tiny ~3 bytes per token overhead.
|
||||
|
||||
**Formats beside DAG-CBOR and CAR, with or without base64, have been removed. They are in the git history though.**
|
||||
253
pkg/container/car.go
Normal file
253
pkg/container/car.go
Normal file
@@ -0,0 +1,253 @@
|
||||
package container
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
"iter"
|
||||
|
||||
"github.com/ipfs/go-cid"
|
||||
"github.com/ipld/go-ipld-prime"
|
||||
"github.com/ipld/go-ipld-prime/codec/dagcbor"
|
||||
"github.com/ipld/go-ipld-prime/datamodel"
|
||||
"github.com/ipld/go-ipld-prime/fluent/qp"
|
||||
cidlink "github.com/ipld/go-ipld-prime/linking/cid"
|
||||
"github.com/ipld/go-ipld-prime/node/basicnode"
|
||||
)
|
||||
|
||||
/*
|
||||
Note: below is essentially a re-implementation of the CAR file v1 read and write.
|
||||
This exists here for two reasons:
|
||||
- go-car's API forces to go through an IPLD getter or through a blockstore API
|
||||
- generally, go-car is a very complex and large dependency
|
||||
*/
|
||||
|
||||
// EmptyCid is a "zero" Cid: zero-length "identity" multihash with "raw" codec
|
||||
// It can be used to have at least one root in a CARv1 file (making it legal), yet
|
||||
// denote that it can be ignored.
|
||||
var EmptyCid = cid.MustParse([]byte{01, 55, 00, 00})
|
||||
|
||||
type carBlock struct {
|
||||
c cid.Cid
|
||||
data []byte
|
||||
}
|
||||
|
||||
// writeCar writes a CARv1 file containing the blocks from the iterator.
|
||||
// If no roots are provided, a single EmptyCid is used as root to make the file
|
||||
// spec compliant.
|
||||
func writeCar(w io.Writer, roots []cid.Cid, blocks iter.Seq[carBlock]) error {
|
||||
if len(roots) == 0 {
|
||||
roots = []cid.Cid{EmptyCid}
|
||||
}
|
||||
h := carHeader{
|
||||
Roots: roots,
|
||||
Version: 1,
|
||||
}
|
||||
hb, err := h.Write()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = ldWrite(w, hb)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for block := range blocks {
|
||||
err = ldWrite(w, block.c.Bytes(), block.data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// readCar reads a CARv1 file from the reader, and return a block iterator.
|
||||
// Roots are ignored.
|
||||
func readCar(r io.Reader) (roots []cid.Cid, blocks iter.Seq2[carBlock, error], err error) {
|
||||
br := bufio.NewReader(r)
|
||||
|
||||
hb, err := ldRead(br)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
h, err := readHeader(hb)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if h.Version != 1 {
|
||||
return nil, nil, fmt.Errorf("invalid car version: %d", h.Version)
|
||||
}
|
||||
|
||||
return h.Roots, func(yield func(block carBlock, err error) bool) {
|
||||
for {
|
||||
block, err := readBlock(br)
|
||||
if err == io.EOF {
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
if !yield(carBlock{}, err) {
|
||||
return
|
||||
}
|
||||
}
|
||||
if !yield(block, nil) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}, nil
|
||||
}
|
||||
|
||||
// readBlock reads a section from the reader and decode a (cid+data) block.
|
||||
func readBlock(r *bufio.Reader) (carBlock, error) {
|
||||
raw, err := ldRead(r)
|
||||
if err != nil {
|
||||
return carBlock{}, err
|
||||
}
|
||||
|
||||
n, c, err := cid.CidFromReader(bytes.NewReader(raw))
|
||||
if err != nil {
|
||||
return carBlock{}, err
|
||||
}
|
||||
data := raw[n:]
|
||||
|
||||
// integrity check
|
||||
hashed, err := c.Prefix().Sum(data)
|
||||
if err != nil {
|
||||
return carBlock{}, err
|
||||
}
|
||||
|
||||
if !hashed.Equals(c) {
|
||||
return carBlock{}, fmt.Errorf("mismatch in content integrity, name: %s, data: %s", c, hashed)
|
||||
}
|
||||
|
||||
return carBlock{c: c, data: data}, nil
|
||||
}
|
||||
|
||||
// maxAllowedSectionSize dictates the maximum number of bytes that a CARv1 header
|
||||
// or section is allowed to occupy without causing a decode to error.
|
||||
// This cannot be supplied as an option, only adjusted as a global. You should
|
||||
// use v2#NewReader instead since it allows for options to be passed in.
|
||||
var maxAllowedSectionSize uint = 32 << 20 // 32MiB
|
||||
|
||||
// ldRead performs a length-delimited read of a section from the reader.
|
||||
// A section is composed of an uint length followed by the data.
|
||||
func ldRead(r *bufio.Reader) ([]byte, error) {
|
||||
if _, err := r.Peek(1); err != nil { // no more blocks, likely clean io.EOF
|
||||
return nil, err
|
||||
}
|
||||
|
||||
l, err := binary.ReadUvarint(r)
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
return nil, io.ErrUnexpectedEOF // don't silently pretend this is a clean EOF
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if l > uint64(maxAllowedSectionSize) { // Don't OOM
|
||||
return nil, fmt.Errorf("malformed car; header is bigger than MaxAllowedSectionSize")
|
||||
}
|
||||
|
||||
buf := make([]byte, l)
|
||||
if _, err := io.ReadFull(r, buf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
// ldWrite performs a length-delimited write of a section on the writer.
|
||||
// A section is composed of an uint length followed by the data.
|
||||
func ldWrite(w io.Writer, d ...[]byte) error {
|
||||
var sum uint64
|
||||
for _, s := range d {
|
||||
sum += uint64(len(s))
|
||||
}
|
||||
|
||||
buf := make([]byte, 8)
|
||||
n := binary.PutUvarint(buf, sum)
|
||||
_, err := w.Write(buf[:n])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, s := range d {
|
||||
_, err = w.Write(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type carHeader struct {
|
||||
Roots []cid.Cid
|
||||
Version uint64
|
||||
}
|
||||
|
||||
const rootsKey = "roots"
|
||||
const versionKey = "version"
|
||||
|
||||
func readHeader(data []byte) (*carHeader, error) {
|
||||
var header carHeader
|
||||
|
||||
nd, err := ipld.Decode(data, dagcbor.Decode)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if nd.Length() != 2 {
|
||||
return nil, fmt.Errorf("malformed car header")
|
||||
}
|
||||
rootsNd, err := nd.LookupByString(rootsKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("malformed car header")
|
||||
}
|
||||
it := rootsNd.ListIterator()
|
||||
if it == nil {
|
||||
return nil, fmt.Errorf("malformed car header")
|
||||
}
|
||||
header.Roots = make([]cid.Cid, 0, rootsNd.Length())
|
||||
for !it.Done() {
|
||||
_, nd, err := it.Next()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
lk, err := nd.AsLink()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch lk := lk.(type) {
|
||||
case cidlink.Link:
|
||||
header.Roots = append(header.Roots, lk.Cid)
|
||||
default:
|
||||
return nil, fmt.Errorf("malformed car header")
|
||||
}
|
||||
}
|
||||
versionNd, err := nd.LookupByString(versionKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("malformed car header")
|
||||
}
|
||||
version, err := versionNd.AsInt()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("malformed car header")
|
||||
}
|
||||
header.Version = uint64(version)
|
||||
return &header, nil
|
||||
}
|
||||
|
||||
func (ch *carHeader) Write() ([]byte, error) {
|
||||
nd, err := qp.BuildMap(basicnode.Prototype.Any, 2, func(ma datamodel.MapAssembler) {
|
||||
qp.MapEntry(ma, rootsKey, qp.List(int64(len(ch.Roots)), func(la datamodel.ListAssembler) {
|
||||
for _, root := range ch.Roots {
|
||||
qp.ListEntry(la, qp.Link(cidlink.Link{Cid: root}))
|
||||
}
|
||||
}))
|
||||
qp.MapEntry(ma, versionKey, qp.Int(1))
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ipld.Encode(nd, dagcbor.Encode)
|
||||
}
|
||||
52
pkg/container/car_test.go
Normal file
52
pkg/container/car_test.go
Normal file
@@ -0,0 +1,52 @@
|
||||
package container
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCarRoundTrip(t *testing.T) {
|
||||
// this car file is a complex and legal CARv1 file
|
||||
original, err := os.ReadFile("testdata/sample-v1.car")
|
||||
require.NoError(t, err)
|
||||
|
||||
roots, it, err := readCar(bytes.NewReader(original))
|
||||
require.NoError(t, err)
|
||||
|
||||
var blks []carBlock
|
||||
for blk, err := range it {
|
||||
require.NoError(t, err)
|
||||
blks = append(blks, blk)
|
||||
}
|
||||
|
||||
require.Len(t, blks, 1049)
|
||||
|
||||
buf := bytes.NewBuffer(nil)
|
||||
|
||||
err = writeCar(buf, roots, func(yield func(carBlock) bool) {
|
||||
for _, blk := range blks {
|
||||
if !yield(blk) {
|
||||
return
|
||||
}
|
||||
}
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Bytes equal after the round-trip
|
||||
require.Equal(t, original, buf.Bytes())
|
||||
}
|
||||
|
||||
func FuzzCarRead(f *testing.F) {
|
||||
example, err := os.ReadFile("testdata/sample-v1.car")
|
||||
require.NoError(f, err)
|
||||
|
||||
f.Add(example)
|
||||
|
||||
f.Fuzz(func(t *testing.T, data []byte) {
|
||||
_, _, _ = readCar(bytes.NewReader(data))
|
||||
// only looking for panics
|
||||
})
|
||||
}
|
||||
BIN
pkg/container/img/alloc_byte.png
Normal file
BIN
pkg/container/img/alloc_byte.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 25 KiB |
BIN
pkg/container/img/alloc_count.png
Normal file
BIN
pkg/container/img/alloc_count.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 23 KiB |
BIN
pkg/container/img/cpu.png
Normal file
BIN
pkg/container/img/cpu.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 25 KiB |
BIN
pkg/container/img/overhead_bytes.png
Normal file
BIN
pkg/container/img/overhead_bytes.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 31 KiB |
BIN
pkg/container/img/overhead_percent.png
Normal file
BIN
pkg/container/img/overhead_percent.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 29 KiB |
122
pkg/container/reader.go
Normal file
122
pkg/container/reader.go
Normal file
@@ -0,0 +1,122 @@
|
||||
package container
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/ipfs/go-cid"
|
||||
"github.com/ipld/go-ipld-prime"
|
||||
"github.com/ipld/go-ipld-prime/codec/dagcbor"
|
||||
"github.com/ipld/go-ipld-prime/datamodel"
|
||||
|
||||
"github.com/ucan-wg/go-ucan/token"
|
||||
"github.com/ucan-wg/go-ucan/token/delegation"
|
||||
"github.com/ucan-wg/go-ucan/token/invocation"
|
||||
)
|
||||
|
||||
var ErrNotFound = fmt.Errorf("not found")
|
||||
|
||||
// Reader is a token container reader. It exposes the tokens conveniently decoded.
|
||||
type Reader map[cid.Cid]token.Token
|
||||
|
||||
// GetToken returns an arbitrary decoded token, from its CID.
|
||||
// If not found, ErrNotFound is returned.
|
||||
func (ctn Reader) GetToken(cid cid.Cid) (token.Token, error) {
|
||||
tkn, ok := ctn[cid]
|
||||
if !ok {
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
return tkn, nil
|
||||
}
|
||||
|
||||
// GetDelegation is the same as GetToken but only return a delegation.Token, with the right type.
|
||||
func (ctn Reader) GetDelegation(cid cid.Cid) (*delegation.Token, error) {
|
||||
tkn, err := ctn.GetToken(cid)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if tkn, ok := tkn.(*delegation.Token); ok {
|
||||
return tkn, nil
|
||||
}
|
||||
return nil, fmt.Errorf("not a delegation token")
|
||||
}
|
||||
|
||||
// GetInvocation returns the first found invocation.Token.
|
||||
// If none are found, ErrNotFound is returned.
|
||||
func (ctn Reader) GetInvocation() (*invocation.Token, error) {
|
||||
for _, t := range ctn {
|
||||
if inv, ok := t.(*invocation.Token); ok {
|
||||
return inv, nil
|
||||
}
|
||||
}
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
|
||||
func FromCar(r io.Reader) (Reader, error) {
|
||||
_, it, err := readCar(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctn := make(Reader)
|
||||
|
||||
for block, err := range it {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = ctn.addToken(block.data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return ctn, nil
|
||||
}
|
||||
|
||||
func FromCarBase64(r io.Reader) (Reader, error) {
|
||||
return FromCar(base64.NewDecoder(base64.StdEncoding, r))
|
||||
}
|
||||
|
||||
func FromCbor(r io.Reader) (Reader, error) {
|
||||
n, err := ipld.DecodeStreaming(r, dagcbor.Decode)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if n.Kind() != datamodel.Kind_List {
|
||||
return nil, fmt.Errorf("not a list")
|
||||
}
|
||||
|
||||
ctn := make(Reader, n.Length())
|
||||
|
||||
it := n.ListIterator()
|
||||
for !it.Done() {
|
||||
_, val, err := it.Next()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data, err := val.AsBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = ctn.addToken(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return ctn, nil
|
||||
}
|
||||
|
||||
func FromCborBase64(r io.Reader) (Reader, error) {
|
||||
return FromCbor(base64.NewDecoder(base64.StdEncoding, r))
|
||||
}
|
||||
|
||||
func (ctn Reader) addToken(data []byte) error {
|
||||
tkn, c, err := token.FromSealed(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ctn[c] = tkn
|
||||
return nil
|
||||
}
|
||||
184
pkg/container/serial_test.go
Normal file
184
pkg/container/serial_test.go
Normal file
@@ -0,0 +1,184 @@
|
||||
package container
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/rand"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/ipfs/go-cid"
|
||||
"github.com/libp2p/go-libp2p/core/crypto"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/ucan-wg/go-ucan/did"
|
||||
"github.com/ucan-wg/go-ucan/pkg/command"
|
||||
"github.com/ucan-wg/go-ucan/pkg/policy"
|
||||
"github.com/ucan-wg/go-ucan/pkg/policy/literal"
|
||||
"github.com/ucan-wg/go-ucan/pkg/policy/selector"
|
||||
"github.com/ucan-wg/go-ucan/token/delegation"
|
||||
)
|
||||
|
||||
func TestContainerRoundTrip(t *testing.T) {
|
||||
for _, tc := range []struct {
|
||||
name string
|
||||
writer func(ctn Writer, w io.Writer) error
|
||||
reader func(io.Reader) (Reader, error)
|
||||
}{
|
||||
{"car", Writer.ToCar, FromCar},
|
||||
{"carBase64", Writer.ToCarBase64, FromCarBase64},
|
||||
{"cbor", Writer.ToCbor, FromCbor},
|
||||
{"cborBase64", Writer.ToCborBase64, FromCborBase64},
|
||||
} {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
tokens := make(map[cid.Cid]*delegation.Token)
|
||||
var dataSize int
|
||||
|
||||
writer := NewWriter()
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
dlg, c, data := randToken()
|
||||
writer.AddSealed(c, data)
|
||||
tokens[c] = dlg
|
||||
dataSize += len(data)
|
||||
}
|
||||
|
||||
buf := bytes.NewBuffer(nil)
|
||||
|
||||
err := tc.writer(writer, buf)
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Logf("data size %d", dataSize)
|
||||
t.Logf("container overhead: %d%%, %d bytes", int(float32(buf.Len()-dataSize)/float32(dataSize)*100.0), buf.Len()-dataSize)
|
||||
|
||||
reader, err := tc.reader(bytes.NewReader(buf.Bytes()))
|
||||
require.NoError(t, err)
|
||||
|
||||
for c, dlg := range tokens {
|
||||
tknRead, err := reader.GetToken(c)
|
||||
require.NoError(t, err)
|
||||
|
||||
// require.Equal fails as time.Time holds a wall time that is going to be
|
||||
// different, even if it represents the same event.
|
||||
// We need to do the following instead.
|
||||
|
||||
dlgRead := tknRead.(*delegation.Token)
|
||||
require.Equal(t, dlg.Issuer(), dlgRead.Issuer())
|
||||
require.Equal(t, dlg.Audience(), dlgRead.Audience())
|
||||
require.Equal(t, dlg.Subject(), dlgRead.Subject())
|
||||
require.Equal(t, dlg.Command(), dlgRead.Command())
|
||||
require.Equal(t, dlg.Policy(), dlgRead.Policy())
|
||||
require.Equal(t, dlg.Nonce(), dlgRead.Nonce())
|
||||
require.True(t, dlg.Meta().Equals(dlgRead.Meta()))
|
||||
if dlg.NotBefore() != nil {
|
||||
// within 1s as the original value gets truncated to seconds when serialized
|
||||
require.WithinDuration(t, *dlg.NotBefore(), *dlgRead.NotBefore(), time.Second)
|
||||
}
|
||||
if dlg.Expiration() != nil {
|
||||
// within 1s as the original value gets truncated to seconds when serialized
|
||||
require.WithinDuration(t, *dlg.Expiration(), *dlgRead.Expiration(), time.Second)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkContainerSerialisation(b *testing.B) {
|
||||
var duration strings.Builder
|
||||
var allocByte strings.Builder
|
||||
var allocCount strings.Builder
|
||||
|
||||
for _, builder := range []strings.Builder{duration, allocByte, allocCount} {
|
||||
builder.WriteString("car\tcarBase64\tcarGzip\tcarGzipBase64\tcbor\tcborBase64\tcborGzip\tcborGzipBase64\tcborFlate\tcborFlateBase64\n")
|
||||
}
|
||||
|
||||
for _, tc := range []struct {
|
||||
name string
|
||||
writer func(ctn Writer, w io.Writer) error
|
||||
reader func(io.Reader) (Reader, error)
|
||||
}{
|
||||
{"car", Writer.ToCar, FromCar},
|
||||
{"carBase64", Writer.ToCarBase64, FromCarBase64},
|
||||
{"cbor", Writer.ToCbor, FromCbor},
|
||||
{"cborBase64", Writer.ToCborBase64, FromCborBase64},
|
||||
} {
|
||||
writer := NewWriter()
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
_, c, data := randToken()
|
||||
writer.AddSealed(c, data)
|
||||
}
|
||||
|
||||
buf := bytes.NewBuffer(nil)
|
||||
_ = tc.writer(writer, buf)
|
||||
|
||||
b.Run(tc.name+"_write", func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
buf := bytes.NewBuffer(nil)
|
||||
_ = tc.writer(writer, buf)
|
||||
}
|
||||
})
|
||||
|
||||
b.Run(tc.name+"_read", func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _ = tc.reader(bytes.NewReader(buf.Bytes()))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func randBytes(n int) []byte {
|
||||
b := make([]byte, n)
|
||||
_, _ = rand.Read(b)
|
||||
return b
|
||||
}
|
||||
|
||||
func randDID() (crypto.PrivKey, did.DID) {
|
||||
privKey, _, err := crypto.GenerateEd25519Key(rand.Reader)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
d, err := did.FromPrivKey(privKey)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return privKey, d
|
||||
}
|
||||
|
||||
func randomString(length int) string {
|
||||
b := make([]byte, length/2+1)
|
||||
rand.Read(b)
|
||||
return fmt.Sprintf("%x", b)[0:length]
|
||||
}
|
||||
|
||||
func randToken() (*delegation.Token, cid.Cid, []byte) {
|
||||
priv, iss := randDID()
|
||||
_, aud := randDID()
|
||||
cmd := command.New("foo", "bar")
|
||||
pol := policy.Policy{policy.All(
|
||||
selector.MustParse(".[]"),
|
||||
policy.GreaterThan(selector.MustParse(".value"), literal.Int(2)),
|
||||
)}
|
||||
|
||||
opts := []delegation.Option{
|
||||
delegation.WithExpiration(time.Now().Add(time.Hour)),
|
||||
delegation.WithSubject(iss),
|
||||
}
|
||||
for i := 0; i < 3; i++ {
|
||||
opts = append(opts, delegation.WithMeta(randomString(8), randomString(10)))
|
||||
}
|
||||
|
||||
t, err := delegation.New(priv, aud, cmd, pol, opts...)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
b, c, err := t.ToSealed(priv)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return t, c, b
|
||||
}
|
||||
BIN
pkg/container/testdata/sample-v1.car
vendored
Normal file
BIN
pkg/container/testdata/sample-v1.car
vendored
Normal file
Binary file not shown.
61
pkg/container/writer.go
Normal file
61
pkg/container/writer.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package container
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"io"
|
||||
|
||||
"github.com/ipfs/go-cid"
|
||||
"github.com/ipld/go-ipld-prime"
|
||||
"github.com/ipld/go-ipld-prime/codec/dagcbor"
|
||||
"github.com/ipld/go-ipld-prime/datamodel"
|
||||
"github.com/ipld/go-ipld-prime/fluent/qp"
|
||||
"github.com/ipld/go-ipld-prime/node/basicnode"
|
||||
)
|
||||
|
||||
// TODO: should we have a multibase to wrap the cbor? but there is no reader/write in go-multibase :-(
|
||||
|
||||
// Writer is a token container writer. It provides a convenient way to aggregate and serialize tokens together.
|
||||
type Writer map[cid.Cid][]byte
|
||||
|
||||
func NewWriter() Writer {
|
||||
return make(Writer)
|
||||
}
|
||||
|
||||
// AddSealed includes a "sealed" token (serialized with a ToSealed* function) in the container.
|
||||
func (ctn Writer) AddSealed(cid cid.Cid, data []byte) {
|
||||
ctn[cid] = data
|
||||
}
|
||||
|
||||
func (ctn Writer) ToCar(w io.Writer) error {
|
||||
return writeCar(w, nil, func(yield func(carBlock) bool) {
|
||||
for c, bytes := range ctn {
|
||||
if !yield(carBlock{c: c, data: bytes}) {
|
||||
return
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (ctn Writer) ToCarBase64(w io.Writer) error {
|
||||
w2 := base64.NewEncoder(base64.StdEncoding, w)
|
||||
defer w2.Close()
|
||||
return ctn.ToCar(w2)
|
||||
}
|
||||
|
||||
func (ctn Writer) ToCbor(w io.Writer) error {
|
||||
node, err := qp.BuildList(basicnode.Prototype.Any, int64(len(ctn)), func(la datamodel.ListAssembler) {
|
||||
for _, bytes := range ctn {
|
||||
qp.ListEntry(la, qp.Bytes(bytes))
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return ipld.EncodeStreaming(w, node, dagcbor.Encode)
|
||||
}
|
||||
|
||||
func (ctn Writer) ToCborBase64(w io.Writer) error {
|
||||
w2 := base64.NewEncoder(base64.StdEncoding, w)
|
||||
defer w2.Close()
|
||||
return ctn.ToCbor(w2)
|
||||
}
|
||||
@@ -123,6 +123,22 @@ func (m *Meta) Add(key string, val any) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Equals tells if two Meta hold the same key/values.
|
||||
func (m *Meta) Equals(other *Meta) bool {
|
||||
if len(m.Keys) != len(other.Keys) {
|
||||
return false
|
||||
}
|
||||
if len(m.Values) != len(other.Values) {
|
||||
return false
|
||||
}
|
||||
for _, key := range m.Keys {
|
||||
if !ipld.DeepEqual(m.Values[key], other.Values[key]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func fqtn(val any) string {
|
||||
var name string
|
||||
|
||||
|
||||
Reference in New Issue
Block a user