Compare commits
105 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
701f9d5222 | ||
|
|
eabda15156 | ||
|
|
766dbc86ae | ||
|
|
44b28418c8 | ||
|
|
989fa114c3 | ||
|
|
02ed95de81 | ||
|
|
a0303a6f45 | ||
|
|
8c53981821 | ||
|
|
8b61179671 | ||
|
|
58a41f7df1 | ||
|
|
a791fa9589 | ||
|
|
c36c36f88b | ||
|
|
3bbbe822b0 | ||
|
|
b85d20169d | ||
|
|
8170d966f8 | ||
|
|
c442d70f87 | ||
|
|
7160a7347e | ||
|
|
c1bc15b22e | ||
|
|
801f90a945 | ||
|
|
6e0e420356 | ||
|
|
df5b7bc6ee | ||
|
|
0bd72a8c32 | ||
|
|
714f5c0130 | ||
|
|
f067816be3 | ||
|
|
66b6f5f82a | ||
|
|
c1e5d4e95b | ||
|
|
f68598dd02 | ||
|
|
daffaef0db | ||
|
|
95cb7074c4 | ||
|
|
2985033078 | ||
|
|
e2260b5ff3 | ||
|
|
158c1deff1 | ||
|
|
ee5c23343b | ||
|
|
aa5d547a81 | ||
|
|
c03399abc2 | ||
|
|
f5fced06c2 | ||
|
|
be9e91119a | ||
|
|
6519131ca4 | ||
|
|
ef04c6a3db | ||
|
|
b84fc17b77 | ||
|
|
f279c85720 | ||
|
|
d63641945d | ||
|
|
79803cc6b5 | ||
|
|
4819336788 | ||
|
|
f25b77813c | ||
|
|
be9178df09 | ||
|
|
f7396abfab | ||
|
|
fca1c65daf | ||
|
|
0a49bd57bb | ||
|
|
5d43951a20 | ||
|
|
4cd2fef284 | ||
|
|
916e8af3d6 | ||
|
|
007b57d388 | ||
|
|
3cdc462d3f | ||
|
|
5b7719f2f5 | ||
|
|
c53ff45d4d | ||
|
|
8ab2e3688b | ||
|
|
bb91b53e56 | ||
|
|
964f55ad40 | ||
|
|
3b3047873d | ||
|
|
2170058ef9 | ||
|
|
ac3d23441b | ||
|
|
ecd5d58562 | ||
|
|
b46f1c99f0 | ||
|
|
5fb339e88a | ||
|
|
caebba6233 | ||
|
|
83915a874d | ||
|
|
03643c33f5 | ||
|
|
2eb83a994b | ||
|
|
5547437445 | ||
|
|
a0557075ec | ||
|
|
3ea5c212ef | ||
|
|
dfd5076869 | ||
|
|
3f064509b3 | ||
|
|
23774f6467 | ||
|
|
f61622a056 | ||
|
|
54686aabd4 | ||
|
|
307fbd560c | ||
|
|
36e6b228ae | ||
|
|
e81eb6473b | ||
|
|
e98e07ec91 | ||
|
|
8d1441db4b | ||
|
|
5e86107a34 | ||
|
|
af68ad2dd7 | ||
|
|
b3341b58aa | ||
|
|
81ab0b304e | ||
|
|
2de609b735 | ||
|
|
67ee8b7fc0 | ||
|
|
d74f4f4a44 | ||
|
|
8cb3334d95 | ||
|
|
7e9a23df22 | ||
|
|
158b7d8e3c | ||
|
|
a20e296ca7 | ||
|
|
a723c120b9 | ||
|
|
4cedb5a79e | ||
|
|
6a1554bbad | ||
|
|
9a56b2b802 | ||
|
|
374a93370a | ||
|
|
c264cdd0ae | ||
|
|
e11767a490 | ||
|
|
c4f49cd356 | ||
|
|
3b16e9048c | ||
|
|
ee1176c739 | ||
|
|
8e28da0e2a | ||
|
|
3d3cb9341e |
1
.codecov.yml
Normal file
1
.codecov.yml
Normal file
@@ -0,0 +1 @@
|
||||
comment: off
|
||||
14
.github/workflows/generated-pr.yml
vendored
Normal file
14
.github/workflows/generated-pr.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Close Generated PRs
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
uses: ipdxco/unified-github-workflows/.github/workflows/reusable-generated-pr.yml@v1
|
||||
18
.github/workflows/go-check.yml
vendored
Normal file
18
.github/workflows/go-check.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
name: Go Checks
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: ["master"]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'push' && github.sha || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
go-check:
|
||||
uses: ipdxco/unified-github-workflows/.github/workflows/go-check.yml@v1.0
|
||||
20
.github/workflows/go-test.yml
vendored
Normal file
20
.github/workflows/go-test.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: Go Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: ["master"]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'push' && github.sha || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
go-test:
|
||||
uses: ipdxco/unified-github-workflows/.github/workflows/go-test.yml@v1.0
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
19
.github/workflows/release-check.yml
vendored
Normal file
19
.github/workflows/release-check.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Release Checker
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths: [ 'version.json' ]
|
||||
types: [ opened, synchronize, reopened, labeled, unlabeled ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
release-check:
|
||||
uses: ipdxco/unified-github-workflows/.github/workflows/release-check.yml@v1.0
|
||||
17
.github/workflows/releaser.yml
vendored
Normal file
17
.github/workflows/releaser.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
name: Releaser
|
||||
|
||||
on:
|
||||
push:
|
||||
paths: [ 'version.json' ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
releaser:
|
||||
uses: ipdxco/unified-github-workflows/.github/workflows/releaser.yml@v1.0
|
||||
14
.github/workflows/stale.yml
vendored
Normal file
14
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Close Stale Issues
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
uses: ipdxco/unified-github-workflows/.github/workflows/reusable-stale-issue.yml@v1
|
||||
18
.github/workflows/tagpush.yml
vendored
Normal file
18
.github/workflows/tagpush.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
name: Tag Push Checker
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
releaser:
|
||||
uses: ipdxco/unified-github-workflows/.github/workflows/tagpush.yml@v1.0
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,2 +1,3 @@
|
||||
*.swp
|
||||
|
||||
multibase-conv/multibase-conv
|
||||
|
||||
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "spec"]
|
||||
path = spec
|
||||
url = https://github.com/multiformats/multibase.git
|
||||
@@ -1 +0,0 @@
|
||||
0.2.1: QmUq3H9YpcPphbRj6ct6rBgBE377A8wANP8zPMRqe1WYbf
|
||||
7
Makefile
Normal file
7
Makefile
Normal file
@@ -0,0 +1,7 @@
|
||||
test: deps
|
||||
go test -count=1 -race -v ./...
|
||||
|
||||
export IPFS_API ?= v04x.ipfs.io
|
||||
|
||||
deps:
|
||||
go get -t ./...
|
||||
21
README.md
21
README.md
@@ -1,24 +1,23 @@
|
||||
# go-multibase
|
||||
|
||||
[](http://ipn.io)
|
||||
[](http://webchat.freenode.net/?channels=%23ipfs)
|
||||
[](https://github.com/multiformats/multiformats)
|
||||
[](https://webchat.freenode.net/?channels=%23ipfs)
|
||||
[](https://github.com/RichardLitt/standard-readme)
|
||||
[](https://travis-ci.org/multiformats/go-multibase)
|
||||
[](https://codecov.io/github/multiformats/go-multibase?branch=master)
|
||||
|
||||
> Implementation of [multibase](https://github.com/multiformats/multibase) -self identifying base encodings- in Go.
|
||||
|
||||
> Go implementation of the [multibase](https://github.com/multiformats/multibase) specification.
|
||||
|
||||
## Install
|
||||
|
||||
`go-multibase` is a standard Go module which can be installed with:
|
||||
|
||||
```sh
|
||||
go get github.com/multiformats/go-multibase
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
TODO
|
||||
|
||||
## Maintainer
|
||||
|
||||
Captain: [@whyrusleeping](https://github.com/whyrusleeping).
|
||||
|
||||
## Contribute
|
||||
|
||||
Contributions welcome. Please check out [the issues](https://github.com/multiformats/go-multibase/issues).
|
||||
@@ -29,4 +28,4 @@ Small note: If editing the README, please conform to the [standard-readme](https
|
||||
|
||||
## License
|
||||
|
||||
[MIT](LICENSE) © Protocol Labs Inc.
|
||||
[MIT](LICENSE) © 2016 Protocol Labs Inc.
|
||||
|
||||
21
base16.go
Normal file
21
base16.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package multibase
|
||||
|
||||
func hexEncodeToStringUpper(src []byte) string {
|
||||
dst := make([]byte, len(src)*2)
|
||||
hexEncodeUpper(dst, src)
|
||||
return string(dst)
|
||||
}
|
||||
|
||||
var hexTableUppers = [16]byte{
|
||||
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
|
||||
'A', 'B', 'C', 'D', 'E', 'F',
|
||||
}
|
||||
|
||||
func hexEncodeUpper(dst, src []byte) int {
|
||||
for i, v := range src {
|
||||
dst[i*2] = hexTableUppers[v>>4]
|
||||
dst[i*2+1] = hexTableUppers[v&0x0f]
|
||||
}
|
||||
|
||||
return len(src) * 2
|
||||
}
|
||||
52
base2.go
Normal file
52
base2.go
Normal file
@@ -0,0 +1,52 @@
|
||||
package multibase
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// binaryEncodeToString takes an array of bytes and returns
|
||||
// multibase binary representation
|
||||
func binaryEncodeToString(src []byte) string {
|
||||
dst := make([]byte, len(src)*8)
|
||||
encodeBinary(dst, src)
|
||||
return string(dst)
|
||||
}
|
||||
|
||||
// encodeBinary takes the src and dst bytes and converts each
|
||||
// byte to their binary rep using power reduction method
|
||||
func encodeBinary(dst []byte, src []byte) {
|
||||
for i, b := range src {
|
||||
for j := 0; j < 8; j++ {
|
||||
if b&(1<<uint(7-j)) == 0 {
|
||||
dst[i*8+j] = '0'
|
||||
} else {
|
||||
dst[i*8+j] = '1'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// decodeBinaryString takes multibase binary representation
|
||||
// and returns a byte array
|
||||
func decodeBinaryString(s string) ([]byte, error) {
|
||||
if len(s)&7 != 0 {
|
||||
// prepend the padding
|
||||
s = strings.Repeat("0", 8-len(s)&7) + s
|
||||
}
|
||||
|
||||
data := make([]byte, len(s)>>3)
|
||||
|
||||
for i, dstIndex := 0, 0; i < len(s); i = i + 8 {
|
||||
value, err := strconv.ParseInt(s[i:i+8], 2, 0)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error while conversion: %s", err)
|
||||
}
|
||||
|
||||
data[dstIndex] = byte(value)
|
||||
dstIndex++
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
95
base256emoji.go
Normal file
95
base256emoji.go
Normal file
@@ -0,0 +1,95 @@
|
||||
package multibase
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
var base256emojiTable = [256]rune{
|
||||
// Curated list, this is just a list of things that *somwhat* are related to our comunity
|
||||
'🚀', '🪐', '☄', '🛰', '🌌', // Space
|
||||
'🌑', '🌒', '🌓', '🌔', '🌕', '🌖', '🌗', '🌘', // Moon
|
||||
'🌍', '🌏', '🌎', // Our Home, for now (earth)
|
||||
'🐉', // Dragon!!!
|
||||
'☀', // Our Garden, for now (sol)
|
||||
'💻', '🖥', '💾', '💿', // Computer
|
||||
// The rest is completed from https://home.unicode.org/emoji/emoji-frequency/ at the time of creation (december 2021) (the data is from 2019), most used first until we reach 256.
|
||||
// We exclude modifier based emojies (such as flags) as they are bigger than one single codepoint.
|
||||
// Some other emojies were removed adhoc for various reasons.
|
||||
'😂', '❤', '😍', '🤣', '😊', '🙏', '💕', '😭', '😘', '👍',
|
||||
'😅', '👏', '😁', '🔥', '🥰', '💔', '💖', '💙', '😢', '🤔',
|
||||
'😆', '🙄', '💪', '😉', '☺', '👌', '🤗', '💜', '😔', '😎',
|
||||
'😇', '🌹', '🤦', '🎉', '💞', '✌', '✨', '🤷', '😱', '😌',
|
||||
'🌸', '🙌', '😋', '💗', '💚', '😏', '💛', '🙂', '💓', '🤩',
|
||||
'😄', '😀', '🖤', '😃', '💯', '🙈', '👇', '🎶', '😒', '🤭',
|
||||
'❣', '😜', '💋', '👀', '😪', '😑', '💥', '🙋', '😞', '😩',
|
||||
'😡', '🤪', '👊', '🥳', '😥', '🤤', '👉', '💃', '😳', '✋',
|
||||
'😚', '😝', '😴', '🌟', '😬', '🙃', '🍀', '🌷', '😻', '😓',
|
||||
'⭐', '✅', '🥺', '🌈', '😈', '🤘', '💦', '✔', '😣', '🏃',
|
||||
'💐', '☹', '🎊', '💘', '😠', '☝', '😕', '🌺', '🎂', '🌻',
|
||||
'😐', '🖕', '💝', '🙊', '😹', '🗣', '💫', '💀', '👑', '🎵',
|
||||
'🤞', '😛', '🔴', '😤', '🌼', '😫', '⚽', '🤙', '☕', '🏆',
|
||||
'🤫', '👈', '😮', '🙆', '🍻', '🍃', '🐶', '💁', '😲', '🌿',
|
||||
'🧡', '🎁', '⚡', '🌞', '🎈', '❌', '✊', '👋', '😰', '🤨',
|
||||
'😶', '🤝', '🚶', '💰', '🍓', '💢', '🤟', '🙁', '🚨', '💨',
|
||||
'🤬', '✈', '🎀', '🍺', '🤓', '😙', '💟', '🌱', '😖', '👶',
|
||||
'🥴', '▶', '➡', '❓', '💎', '💸', '⬇', '😨', '🌚', '🦋',
|
||||
'😷', '🕺', '⚠', '🙅', '😟', '😵', '👎', '🤲', '🤠', '🤧',
|
||||
'📌', '🔵', '💅', '🧐', '🐾', '🍒', '😗', '🤑', '🌊', '🤯',
|
||||
'🐷', '☎', '💧', '😯', '💆', '👆', '🎤', '🙇', '🍑', '❄',
|
||||
'🌴', '💣', '🐸', '💌', '📍', '🥀', '🤢', '👅', '💡', '💩',
|
||||
'👐', '📸', '👻', '🤐', '🤮', '🎼', '🥵', '🚩', '🍎', '🍊',
|
||||
'👼', '💍', '📣', '🥂',
|
||||
}
|
||||
|
||||
var base256emojiReverseTable map[rune]byte
|
||||
|
||||
func init() {
|
||||
base256emojiReverseTable = make(map[rune]byte, len(base256emojiTable))
|
||||
for i, v := range base256emojiTable {
|
||||
base256emojiReverseTable[v] = byte(i)
|
||||
}
|
||||
}
|
||||
|
||||
func base256emojiEncode(in []byte) string {
|
||||
var l int
|
||||
for _, v := range in {
|
||||
l += utf8.RuneLen(base256emojiTable[v])
|
||||
}
|
||||
var out strings.Builder
|
||||
out.Grow(l)
|
||||
for _, v := range in {
|
||||
out.WriteRune(base256emojiTable[v])
|
||||
}
|
||||
return out.String()
|
||||
}
|
||||
|
||||
type base256emojiCorruptInputError struct {
|
||||
index int
|
||||
char rune
|
||||
}
|
||||
|
||||
func (e base256emojiCorruptInputError) Error() string {
|
||||
return "illegal base256emoji data at input byte " + strconv.FormatInt(int64(e.index), 10) + ", char: '" + string(e.char) + "'"
|
||||
}
|
||||
|
||||
func (e base256emojiCorruptInputError) String() string {
|
||||
return e.Error()
|
||||
}
|
||||
|
||||
func base256emojiDecode(in string) ([]byte, error) {
|
||||
out := make([]byte, utf8.RuneCountInString(in))
|
||||
var stri int
|
||||
for i := 0; len(in) > 0; i++ {
|
||||
r, n := utf8.DecodeRuneInString(in)
|
||||
in = in[n:]
|
||||
var ok bool
|
||||
out[i], ok = base256emojiReverseTable[r]
|
||||
if !ok {
|
||||
return nil, base256emojiCorruptInputError{stri, r}
|
||||
}
|
||||
stri += n
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
26
base256emoji_test.go
Normal file
26
base256emoji_test.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package multibase
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestBase256EmojiAlphabet(t *testing.T) {
|
||||
var c uint
|
||||
for _, v := range base256emojiTable {
|
||||
if v != rune(0) {
|
||||
c++
|
||||
}
|
||||
}
|
||||
if c != 256 {
|
||||
t.Errorf("Base256Emoji count is wrong, expected 256, got %d.", c)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBase256EmojiUniq(t *testing.T) {
|
||||
m := make(map[rune]struct{}, len(base256emojiTable))
|
||||
for i, v := range base256emojiTable {
|
||||
_, ok := m[v]
|
||||
if ok {
|
||||
t.Errorf("Base256Emoji duplicate %s at index %d.", string(v), i)
|
||||
}
|
||||
m[v] = struct{}{}
|
||||
}
|
||||
}
|
||||
17
base32.go
Normal file
17
base32.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package multibase
|
||||
|
||||
import (
|
||||
b32 "github.com/multiformats/go-base32"
|
||||
)
|
||||
|
||||
var base32StdLowerPad = b32.NewEncodingCI("abcdefghijklmnopqrstuvwxyz234567")
|
||||
var base32StdLowerNoPad = base32StdLowerPad.WithPadding(b32.NoPadding)
|
||||
|
||||
var base32StdUpperPad = b32.NewEncodingCI("ABCDEFGHIJKLMNOPQRSTUVWXYZ234567")
|
||||
var base32StdUpperNoPad = base32StdUpperPad.WithPadding(b32.NoPadding)
|
||||
|
||||
var base32HexLowerPad = b32.NewEncodingCI("0123456789abcdefghijklmnopqrstuv")
|
||||
var base32HexLowerNoPad = base32HexLowerPad.WithPadding(b32.NoPadding)
|
||||
|
||||
var base32HexUpperPad = b32.NewEncodingCI("0123456789ABCDEFGHIJKLMNOPQRSTUV")
|
||||
var base32HexUpperNoPad = base32HexUpperPad.WithPadding(b32.NoPadding)
|
||||
65
encoder.go
Normal file
65
encoder.go
Normal file
@@ -0,0 +1,65 @@
|
||||
package multibase
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Encoder is a multibase encoding that is verified to be supported and
|
||||
// supports an Encode method that does not return an error
|
||||
type Encoder struct {
|
||||
enc Encoding
|
||||
}
|
||||
|
||||
// NewEncoder create a new Encoder from an Encoding
|
||||
func NewEncoder(base Encoding) (Encoder, error) {
|
||||
_, ok := EncodingToStr[base]
|
||||
if !ok {
|
||||
return Encoder{-1}, fmt.Errorf("unsupported multibase encoding: %d", base)
|
||||
}
|
||||
return Encoder{base}, nil
|
||||
}
|
||||
|
||||
// MustNewEncoder is like NewEncoder but will panic if the encoding is
|
||||
// invalid.
|
||||
func MustNewEncoder(base Encoding) Encoder {
|
||||
_, ok := EncodingToStr[base]
|
||||
if !ok {
|
||||
panic("Unsupported multibase encoding")
|
||||
}
|
||||
return Encoder{base}
|
||||
}
|
||||
|
||||
// EncoderByName creates an encoder from a string, the string can
|
||||
// either be the multibase name or single character multibase prefix
|
||||
func EncoderByName(str string) (Encoder, error) {
|
||||
var base Encoding
|
||||
var ok bool
|
||||
if len(str) == 0 {
|
||||
return Encoder{-1}, fmt.Errorf("empty multibase encoding")
|
||||
} else if utf8.RuneCountInString(str) == 1 {
|
||||
r, _ := utf8.DecodeRuneInString(str)
|
||||
base = Encoding(r)
|
||||
_, ok = EncodingToStr[base]
|
||||
} else {
|
||||
base, ok = Encodings[str]
|
||||
}
|
||||
if !ok {
|
||||
return Encoder{-1}, fmt.Errorf("unsupported multibase encoding: %s", str)
|
||||
}
|
||||
return Encoder{base}, nil
|
||||
}
|
||||
|
||||
func (p Encoder) Encoding() Encoding {
|
||||
return p.enc
|
||||
}
|
||||
|
||||
// Encode encodes the multibase using the given Encoder.
|
||||
func (p Encoder) Encode(data []byte) string {
|
||||
str, err := Encode(p.enc, data)
|
||||
if err != nil {
|
||||
// should not happen
|
||||
panic(err)
|
||||
}
|
||||
return str
|
||||
}
|
||||
53
encoder_test.go
Normal file
53
encoder_test.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package multibase
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func TestInvalidCode(t *testing.T) {
|
||||
_, err := NewEncoder('q')
|
||||
if err == nil {
|
||||
t.Error("expected failure")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidName(t *testing.T) {
|
||||
values := []string{"invalid", "", "q"}
|
||||
for _, val := range values {
|
||||
_, err := EncoderByName(val)
|
||||
if err == nil {
|
||||
t.Errorf("EncoderByName(%v) expected failure", val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncoder(t *testing.T) {
|
||||
for name, code := range Encodings {
|
||||
encoder, err := NewEncoder(code)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
// Make sure the MustNewEncoder doesn't panic
|
||||
MustNewEncoder(code)
|
||||
str, err := Encode(code, sampleBytes)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
str2 := encoder.Encode(sampleBytes)
|
||||
if str != str2 {
|
||||
t.Errorf("encoded string mismatch: %s != %s", str, str2)
|
||||
}
|
||||
_, err = EncoderByName(name)
|
||||
if err != nil {
|
||||
t.Fatalf("EncoderByName(%s) failed: %v", name, err)
|
||||
}
|
||||
// Test that an encoder can be created from the single letter
|
||||
// prefix
|
||||
r, _ := utf8.DecodeRuneInString(str)
|
||||
_, err = EncoderByName(string(r))
|
||||
if err != nil {
|
||||
t.Fatalf("EncoderByName(%s) failed: %v", string(r), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
9
go.mod
Normal file
9
go.mod
Normal file
@@ -0,0 +1,9 @@
|
||||
module github.com/multiformats/go-multibase
|
||||
|
||||
go 1.24
|
||||
|
||||
require (
|
||||
github.com/mr-tron/base58 v1.1.0
|
||||
github.com/multiformats/go-base32 v0.0.3
|
||||
github.com/multiformats/go-base36 v0.1.0
|
||||
)
|
||||
6
go.sum
Normal file
6
go.sum
Normal file
@@ -0,0 +1,6 @@
|
||||
github.com/mr-tron/base58 v1.1.0 h1:Y51FGVJ91WBqCEabAi5OPUz38eAx8DakuAm5svLcsfQ=
|
||||
github.com/mr-tron/base58 v1.1.0/go.mod h1:xcD2VGqlgYjBdcBLw+TuYLr8afG+Hj8g2eTVqeSzSU8=
|
||||
github.com/multiformats/go-base32 v0.0.3 h1:tw5+NhuwaOjJCC5Pp82QuXbrmLzWg7uxlMFp8Nq/kkI=
|
||||
github.com/multiformats/go-base32 v0.0.3/go.mod h1:pLiuGC8y0QR3Ue4Zug5UzK9LjgbkL8NSQj0zQ5Nz/AA=
|
||||
github.com/multiformats/go-base36 v0.1.0 h1:JR6TyF7JjGd3m6FbLU2cOxhC0Li8z8dLNGQ89tUg4F4=
|
||||
github.com/multiformats/go-base36 v0.1.0/go.mod h1:kFGE83c6s80PklsHO9sRn2NCoffoRdUUOENyW/Vv6sM=
|
||||
41
multibase-conv/main.go
Normal file
41
multibase-conv/main.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
multibase "github.com/multiformats/go-multibase"
|
||||
)
|
||||
|
||||
func main() {
|
||||
if len(os.Args) < 3 {
|
||||
fmt.Printf("usage: %s <new-base> <multibase-str>...\n", os.Args[0])
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
var newBase multibase.Encoding
|
||||
if baseParam := os.Args[1]; len(baseParam) != 0 {
|
||||
newBase = multibase.Encoding(baseParam[0])
|
||||
} else {
|
||||
fmt.Fprintln(os.Stderr, "<new-base> is empty")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
input := os.Args[2:]
|
||||
|
||||
for _, strmbase := range input {
|
||||
_, data, err := multibase.Decode(strmbase)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "error while decoding: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
newCid, err := multibase.Encode(newBase, data)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "error while encoding: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
fmt.Println(newCid)
|
||||
}
|
||||
|
||||
}
|
||||
168
multibase.go
168
multibase.go
@@ -1,43 +1,193 @@
|
||||
package multibase
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
|
||||
b58 "github.com/jbenet/go-base58"
|
||||
b58 "github.com/mr-tron/base58/base58"
|
||||
b32 "github.com/multiformats/go-base32"
|
||||
b36 "github.com/multiformats/go-base36"
|
||||
)
|
||||
|
||||
// Encoding identifies the type of base-encoding that a multibase is carrying.
|
||||
type Encoding int
|
||||
|
||||
// These are the encodings specified in the standard, not are all
|
||||
// supported yet
|
||||
const (
|
||||
Base1 = '1'
|
||||
Identity = 0x00
|
||||
Base2 = '0'
|
||||
Base8 = '7'
|
||||
Base10 = '9'
|
||||
Base16 = 'f'
|
||||
Base58Flickr = 'Z'
|
||||
Base16Upper = 'F'
|
||||
Base32 = 'b'
|
||||
Base32Upper = 'B'
|
||||
Base32pad = 'c'
|
||||
Base32padUpper = 'C'
|
||||
Base32hex = 'v'
|
||||
Base32hexUpper = 'V'
|
||||
Base32hexPad = 't'
|
||||
Base32hexPadUpper = 'T'
|
||||
Base36 = 'k'
|
||||
Base36Upper = 'K'
|
||||
Base58BTC = 'z'
|
||||
Base58Flickr = 'Z'
|
||||
Base64 = 'm'
|
||||
Base64url = 'u'
|
||||
Base64pad = 'M'
|
||||
Base64urlPad = 'U'
|
||||
Base256Emoji = '🚀'
|
||||
)
|
||||
|
||||
// EncodingToStr is a map of the supported encoding, unsupported encoding
|
||||
// specified in standard are left out
|
||||
var EncodingToStr = map[Encoding]string{
|
||||
0x00: "identity",
|
||||
'0': "base2",
|
||||
'f': "base16",
|
||||
'F': "base16upper",
|
||||
'b': "base32",
|
||||
'B': "base32upper",
|
||||
'c': "base32pad",
|
||||
'C': "base32padupper",
|
||||
'v': "base32hex",
|
||||
'V': "base32hexupper",
|
||||
't': "base32hexpad",
|
||||
'T': "base32hexpadupper",
|
||||
'k': "base36",
|
||||
'K': "base36upper",
|
||||
'z': "base58btc",
|
||||
'Z': "base58flickr",
|
||||
'm': "base64",
|
||||
'u': "base64url",
|
||||
'M': "base64pad",
|
||||
'U': "base64urlpad",
|
||||
Base256Emoji: "base256emoji",
|
||||
}
|
||||
|
||||
var Encodings = map[string]Encoding{}
|
||||
|
||||
func init() {
|
||||
for e, n := range EncodingToStr {
|
||||
Encodings[n] = e
|
||||
}
|
||||
}
|
||||
|
||||
// ErrUnsupportedEncoding is returned when the selected encoding is not known or
|
||||
// implemented.
|
||||
var ErrUnsupportedEncoding = fmt.Errorf("selected encoding not supported")
|
||||
|
||||
func Encode(base int, data []byte) (string, error) {
|
||||
// Encode encodes a given byte slice with the selected encoding and returns a
|
||||
// multibase string (<encoding><base-encoded-string>). It will return
|
||||
// an error if the selected base is not known.
|
||||
func Encode(base Encoding, data []byte) (string, error) {
|
||||
switch base {
|
||||
case Base58BTC:
|
||||
return string(Base58BTC) + b58.EncodeAlphabet(data, b58.BTCAlphabet), nil
|
||||
case Identity:
|
||||
// 0x00 inside a string is OK in golang and causes no problems with the length calculation.
|
||||
return string(rune(Identity)) + string(data), nil
|
||||
case Base2:
|
||||
return string(Base2) + binaryEncodeToString(data), nil
|
||||
case Base16:
|
||||
return string(Base16) + hex.EncodeToString(data), nil
|
||||
case Base16Upper:
|
||||
return string(Base16Upper) + hexEncodeToStringUpper(data), nil
|
||||
case Base32:
|
||||
return string(Base32) + base32StdLowerNoPad.EncodeToString(data), nil
|
||||
case Base32Upper:
|
||||
return string(Base32Upper) + base32StdUpperNoPad.EncodeToString(data), nil
|
||||
case Base32hex:
|
||||
return string(Base32hex) + base32HexLowerNoPad.EncodeToString(data), nil
|
||||
case Base32hexUpper:
|
||||
return string(Base32hexUpper) + base32HexUpperNoPad.EncodeToString(data), nil
|
||||
case Base32pad:
|
||||
return string(Base32pad) + base32StdLowerPad.EncodeToString(data), nil
|
||||
case Base32padUpper:
|
||||
return string(Base32padUpper) + base32StdUpperPad.EncodeToString(data), nil
|
||||
case Base32hexPad:
|
||||
return string(Base32hexPad) + base32HexLowerPad.EncodeToString(data), nil
|
||||
case Base32hexPadUpper:
|
||||
return string(Base32hexPadUpper) + base32HexUpperPad.EncodeToString(data), nil
|
||||
case Base36:
|
||||
return string(Base36) + b36.EncodeToStringLc(data), nil
|
||||
case Base36Upper:
|
||||
return string(Base36Upper) + b36.EncodeToStringUc(data), nil
|
||||
case Base58BTC:
|
||||
return string(Base58BTC) + b58.EncodeAlphabet(data, b58.BTCAlphabet), nil
|
||||
case Base58Flickr:
|
||||
return string(Base58Flickr) + b58.EncodeAlphabet(data, b58.FlickrAlphabet), nil
|
||||
case Base64pad:
|
||||
return string(Base64pad) + base64.StdEncoding.EncodeToString(data), nil
|
||||
case Base64urlPad:
|
||||
return string(Base64urlPad) + base64.URLEncoding.EncodeToString(data), nil
|
||||
case Base64url:
|
||||
return string(Base64url) + base64.RawURLEncoding.EncodeToString(data), nil
|
||||
case Base64:
|
||||
return string(Base64) + base64.RawStdEncoding.EncodeToString(data), nil
|
||||
case Base256Emoji:
|
||||
return string(Base256Emoji) + base256emojiEncode(data), nil
|
||||
default:
|
||||
return "", ErrUnsupportedEncoding
|
||||
}
|
||||
}
|
||||
|
||||
func Decode(data string) (int, []byte, error) {
|
||||
// Decode takes a multibase string and decodes into a bytes buffer.
|
||||
// It will return an error if the selected base is not known.
|
||||
func Decode(data string) (Encoding, []byte, error) {
|
||||
if len(data) == 0 {
|
||||
return 0, nil, fmt.Errorf("cannot decode multibase for zero length string")
|
||||
}
|
||||
|
||||
switch data[0] {
|
||||
r, _ := utf8.DecodeRuneInString(data)
|
||||
enc := Encoding(r)
|
||||
|
||||
switch enc {
|
||||
case Identity:
|
||||
return Identity, []byte(data[1:]), nil
|
||||
case Base2:
|
||||
bytes, err := decodeBinaryString(data[1:])
|
||||
return enc, bytes, err
|
||||
case Base16, Base16Upper:
|
||||
bytes, err := hex.DecodeString(data[1:])
|
||||
return enc, bytes, err
|
||||
case Base32, Base32Upper:
|
||||
bytes, err := b32.RawStdEncoding.DecodeString(data[1:])
|
||||
return enc, bytes, err
|
||||
case Base32hex, Base32hexUpper:
|
||||
bytes, err := b32.RawHexEncoding.DecodeString(data[1:])
|
||||
return enc, bytes, err
|
||||
case Base32pad, Base32padUpper:
|
||||
bytes, err := b32.StdEncoding.DecodeString(data[1:])
|
||||
return enc, bytes, err
|
||||
case Base32hexPad, Base32hexPadUpper:
|
||||
bytes, err := b32.HexEncoding.DecodeString(data[1:])
|
||||
return enc, bytes, err
|
||||
case Base36, Base36Upper:
|
||||
bytes, err := b36.DecodeString(data[1:])
|
||||
return enc, bytes, err
|
||||
case Base58BTC:
|
||||
return Base58BTC, b58.DecodeAlphabet(data[1:], b58.BTCAlphabet), nil
|
||||
bytes, err := b58.DecodeAlphabet(data[1:], b58.BTCAlphabet)
|
||||
return Base58BTC, bytes, err
|
||||
case Base58Flickr:
|
||||
bytes, err := b58.DecodeAlphabet(data[1:], b58.FlickrAlphabet)
|
||||
return Base58Flickr, bytes, err
|
||||
case Base64pad:
|
||||
bytes, err := base64.StdEncoding.DecodeString(data[1:])
|
||||
return Base64pad, bytes, err
|
||||
case Base64urlPad:
|
||||
bytes, err := base64.URLEncoding.DecodeString(data[1:])
|
||||
return Base64urlPad, bytes, err
|
||||
case Base64:
|
||||
bytes, err := base64.RawStdEncoding.DecodeString(data[1:])
|
||||
return Base64, bytes, err
|
||||
case Base64url:
|
||||
bytes, err := base64.RawURLEncoding.DecodeString(data[1:])
|
||||
return Base64url, bytes, err
|
||||
case Base256Emoji:
|
||||
bytes, err := base256emojiDecode(data[4:])
|
||||
return Base256Emoji, bytes, err
|
||||
default:
|
||||
return -1, nil, ErrUnsupportedEncoding
|
||||
}
|
||||
|
||||
@@ -2,15 +2,130 @@ package multibase
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"math/rand"
|
||||
"crypto/rand"
|
||||
"sort"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestBase58RoundTrip(t *testing.T) {
|
||||
buf := make([]byte, 16)
|
||||
rand.Read(buf)
|
||||
func TestMap(t *testing.T) {
|
||||
for s, e := range Encodings {
|
||||
s2 := EncodingToStr[e]
|
||||
if s != s2 {
|
||||
t.Errorf("round trip failed on encoding map: %s != %s", s, s2)
|
||||
}
|
||||
}
|
||||
for e, s := range EncodingToStr {
|
||||
e2 := Encodings[s]
|
||||
if e != e2 {
|
||||
t.Errorf("round trip failed on encoding map: '%c' != '%c'", e, e2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enc, err := Encode(Base58BTC, buf)
|
||||
var sampleBytes = []byte("Decentralize everything!!!")
|
||||
var encodedSamples = map[Encoding]string{
|
||||
Identity: string(rune(0x00)) + "Decentralize everything!!!",
|
||||
Base2: "00100010001100101011000110110010101101110011101000111001001100001011011000110100101111010011001010010000001100101011101100110010101110010011110010111010001101000011010010110111001100111001000010010000100100001",
|
||||
Base16: "f446563656e7472616c697a652065766572797468696e67212121",
|
||||
Base16Upper: "F446563656E7472616C697A652065766572797468696E67212121",
|
||||
Base32: "birswgzloorzgc3djpjssazlwmvzhs5dinfxgoijbee",
|
||||
Base32Upper: "BIRSWGZLOORZGC3DJPJSSAZLWMVZHS5DINFXGOIJBEE",
|
||||
Base32pad: "cirswgzloorzgc3djpjssazlwmvzhs5dinfxgoijbee======",
|
||||
Base32padUpper: "CIRSWGZLOORZGC3DJPJSSAZLWMVZHS5DINFXGOIJBEE======",
|
||||
Base32hex: "v8him6pbeehp62r39f9ii0pbmclp7it38d5n6e89144",
|
||||
Base32hexUpper: "V8HIM6PBEEHP62R39F9II0PBMCLP7IT38D5N6E89144",
|
||||
Base32hexPad: "t8him6pbeehp62r39f9ii0pbmclp7it38d5n6e89144======",
|
||||
Base32hexPadUpper: "T8HIM6PBEEHP62R39F9II0PBMCLP7IT38D5N6E89144======",
|
||||
Base36: "km552ng4dabi4neu1oo8l4i5mndwmpc3mkukwtxy9",
|
||||
Base36Upper: "KM552NG4DABI4NEU1OO8L4I5MNDWMPC3MKUKWTXY9",
|
||||
Base58BTC: "z36UQrhJq9fNDS7DiAHM9YXqDHMPfr4EMArvt",
|
||||
Base58Flickr: "Z36tpRGiQ9Endr7dHahm9xwQdhmoER4emaRVT",
|
||||
Base64: "mRGVjZW50cmFsaXplIGV2ZXJ5dGhpbmchISE",
|
||||
Base64url: "uRGVjZW50cmFsaXplIGV2ZXJ5dGhpbmchISE",
|
||||
Base64pad: "MRGVjZW50cmFsaXplIGV2ZXJ5dGhpbmchISE=",
|
||||
Base64urlPad: "URGVjZW50cmFsaXplIGV2ZXJ5dGhpbmchISE=",
|
||||
Base256Emoji: "🚀💛✋💃✋😻😈🥺🤤🍀🌟💐✋😅✋💦✋🥺🏃😈😴🌟😻😝👏👏👏",
|
||||
}
|
||||
|
||||
func testEncode(t *testing.T, encoding Encoding, bytes []byte, expected string) {
|
||||
actual, err := Encode(encoding, bytes)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
if actual != expected {
|
||||
t.Errorf("encoding failed for %c (%d / %s), expected: %s, got: %s", encoding, encoding, EncodingToStr[encoding], expected, actual)
|
||||
}
|
||||
}
|
||||
|
||||
func testDecode(t *testing.T, expectedEncoding Encoding, expectedBytes []byte, data string) {
|
||||
actualEncoding, actualBytes, err := Decode(data)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
if actualEncoding != expectedEncoding {
|
||||
t.Errorf("wrong encoding code, expected: %c (%d), got %c (%d)", expectedEncoding, expectedEncoding, actualEncoding, actualEncoding)
|
||||
}
|
||||
if !bytes.Equal(actualBytes, expectedBytes) {
|
||||
t.Errorf("decoding failed for %c (%d), expected: %v, got %v", actualEncoding, actualEncoding, expectedBytes, actualBytes)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncode(t *testing.T) {
|
||||
for encoding := range EncodingToStr {
|
||||
testEncode(t, encoding, sampleBytes, encodedSamples[encoding])
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecode(t *testing.T) {
|
||||
for encoding := range EncodingToStr {
|
||||
testDecode(t, encoding, sampleBytes, encodedSamples[encoding])
|
||||
}
|
||||
}
|
||||
|
||||
func TestRoundTrip(t *testing.T) {
|
||||
|
||||
for base := range EncodingToStr {
|
||||
if int(base) == 0 {
|
||||
// skip identity: any byte goes there
|
||||
continue
|
||||
}
|
||||
|
||||
_, _, err := Decode(string(rune(base)) + "\u00A0")
|
||||
if err == nil {
|
||||
t.Fatal(EncodingToStr[base] + " decode should fail on low-unicode")
|
||||
}
|
||||
|
||||
_, _, err = Decode(string(rune(base)) + "\u1F4A8")
|
||||
if err == nil {
|
||||
t.Fatal(EncodingToStr[base] + " decode should fail on emoji")
|
||||
}
|
||||
|
||||
_, _, err = Decode(string(rune(base)) + "!")
|
||||
if err == nil {
|
||||
t.Fatal(EncodingToStr[base] + " decode should fail on punctuation")
|
||||
}
|
||||
|
||||
_, _, err = Decode(string(rune(base)) + "\xA0")
|
||||
if err == nil {
|
||||
t.Fatal(EncodingToStr[base] + " decode should fail on high-latin1")
|
||||
}
|
||||
}
|
||||
|
||||
buf := make([]byte, 137+16) // sufficiently large prime number of bytes + another 16 to test leading 0s
|
||||
rand.Read(buf[16:])
|
||||
|
||||
for base := range EncodingToStr {
|
||||
|
||||
// test roundtrip from the full zero-prefixed buffer down to a single byte
|
||||
for i := 0; i < len(buf); i++ {
|
||||
|
||||
// use a copy to verify we are not overwriting the supplied buffer
|
||||
newBuf := make([]byte, len(buf)-i)
|
||||
copy(newBuf, buf[i:])
|
||||
|
||||
enc, err := Encode(base, newBuf)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@@ -20,16 +135,165 @@ func TestBase58RoundTrip(t *testing.T) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if e != Base58BTC {
|
||||
if e != base {
|
||||
t.Fatal("got wrong encoding out")
|
||||
}
|
||||
|
||||
if !bytes.Equal(buf, out) {
|
||||
t.Fatal("input wasnt the same as output", buf, out)
|
||||
if !bytes.Equal(newBuf, buf[i:]) {
|
||||
t.Fatal("the provided buffer was modified", buf[i:], out)
|
||||
}
|
||||
|
||||
_, _, err = Decode("")
|
||||
if !bytes.Equal(buf[i:], out) {
|
||||
t.Fatal("input wasnt the same as output", buf[i:], out)
|
||||
}
|
||||
|
||||
// When we have 3 leading zeroes, do a few extra tests
|
||||
// ( choice of leading zeroes is arbitrary - just cutting down on test permutations )
|
||||
|
||||
if i == 13 {
|
||||
|
||||
// if this is a case-insensitive codec semi-randomly swap case in enc and try again
|
||||
name := EncodingToStr[base]
|
||||
if name[len(name)-5:] == "upper" || Encodings[name+"upper"] > 0 {
|
||||
caseTamperedEnc := []byte(enc)
|
||||
|
||||
for _, j := range []int{3, 5, 8, 13, 21, 23, 29, 47, 52} {
|
||||
if caseTamperedEnc[j] >= 65 && caseTamperedEnc[j] <= 90 {
|
||||
caseTamperedEnc[j] += 32
|
||||
} else if caseTamperedEnc[j] >= 97 && caseTamperedEnc[j] <= 122 {
|
||||
caseTamperedEnc[j] -= 32
|
||||
}
|
||||
}
|
||||
|
||||
e, out, err := Decode(string(caseTamperedEnc))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if e != base {
|
||||
t.Fatal("got wrong encoding out")
|
||||
}
|
||||
if !bytes.Equal(buf[i:], out) {
|
||||
t.Fatal("input wasn't the same as output", buf[i:], out)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test that nothing overflows
|
||||
maxValueBuf := make([]byte, 131)
|
||||
for i := 0; i < len(maxValueBuf); i++ {
|
||||
maxValueBuf[i] = 0xFF
|
||||
}
|
||||
|
||||
for base := range EncodingToStr {
|
||||
|
||||
// test roundtrip from the complete buffer down to a single byte
|
||||
for i := 0; i < len(maxValueBuf); i++ {
|
||||
|
||||
enc, err := Encode(base, maxValueBuf[i:])
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
e, out, err := Decode(enc)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if e != base {
|
||||
t.Fatal("got wrong encoding out")
|
||||
}
|
||||
|
||||
if !bytes.Equal(maxValueBuf[i:], out) {
|
||||
t.Fatal("input wasn't the same as output", maxValueBuf[i:], out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_, _, err := Decode("")
|
||||
if err == nil {
|
||||
t.Fatal("shouldnt be able to decode empty string")
|
||||
t.Fatal("shouldn't be able to decode empty string")
|
||||
}
|
||||
}
|
||||
|
||||
var benchmarkBuf [36]byte // typical CID size
|
||||
var benchmarkCodecs []string
|
||||
|
||||
func init() {
|
||||
rand.Read(benchmarkBuf[:])
|
||||
|
||||
benchmarkCodecs = make([]string, 0, len(Encodings))
|
||||
for n := range Encodings {
|
||||
|
||||
// // Only bench b36 and b58
|
||||
// if len(n) < 6 || (n[4:6] != "36" && n[4:6] != "58") {
|
||||
// continue
|
||||
// }
|
||||
|
||||
benchmarkCodecs = append(benchmarkCodecs, n)
|
||||
}
|
||||
sort.Strings(benchmarkCodecs)
|
||||
}
|
||||
|
||||
func BenchmarkRoundTrip(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
|
||||
for _, name := range benchmarkCodecs {
|
||||
b.Run(name, func(b *testing.B) {
|
||||
base := Encodings[name]
|
||||
for i := 0; i < b.N; i++ {
|
||||
enc, err := Encode(base, benchmarkBuf[:])
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
e, out, err := Decode(enc)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
if e != base {
|
||||
b.Fatal("got wrong encoding out")
|
||||
}
|
||||
|
||||
if !bytes.Equal(benchmarkBuf[:], out) {
|
||||
b.Fatal("input wasnt the same as output", benchmarkBuf, out)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkEncode(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
|
||||
for _, name := range benchmarkCodecs {
|
||||
b.Run(name, func(b *testing.B) {
|
||||
base := Encodings[name]
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := Encode(base, benchmarkBuf[:])
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkDecode(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
|
||||
for _, name := range benchmarkCodecs {
|
||||
b.Run(name, func(b *testing.B) {
|
||||
enc, _ := Encode(Encodings[name], benchmarkBuf[:])
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _, err := Decode(enc)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
15
package.json
15
package.json
@@ -3,21 +3,8 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/multiformats/go-multibase"
|
||||
},
|
||||
"gx": {
|
||||
"dvcsimport": "github.com/multiformats/go-multibase"
|
||||
},
|
||||
"gxDependencies": [
|
||||
{
|
||||
"author": "whyrusleeping",
|
||||
"hash": "QmT8rehPR3F6bmwL6zjUN8XpiDBFFpMP2myPdC6ApsWfJf",
|
||||
"name": "go-base58",
|
||||
"version": "0.0.0"
|
||||
}
|
||||
],
|
||||
"gxVersion": "0.8.0",
|
||||
"language": "go",
|
||||
"license": "",
|
||||
"name": "go-multibase",
|
||||
"version": "0.2.1"
|
||||
"version": "0.3.0"
|
||||
}
|
||||
|
||||
|
||||
1
spec
Submodule
1
spec
Submodule
Submodule spec added at 4c8344e378
180
spec_test.go
Normal file
180
spec_test.go
Normal file
@@ -0,0 +1,180 @@
|
||||
package multibase
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func TestSpec(t *testing.T) {
|
||||
file, err := os.Open("spec/multibase.csv")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
reader := csv.NewReader(file)
|
||||
reader.LazyQuotes = false
|
||||
reader.FieldsPerRecord = 4
|
||||
reader.TrimLeadingSpace = true
|
||||
|
||||
values, err := reader.ReadAll()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
expectedEncodings := make(map[Encoding]string, len(values)-1)
|
||||
for _, v := range values[1:] {
|
||||
encoding := v[0]
|
||||
codeStr := v[1]
|
||||
|
||||
var code Encoding
|
||||
if strings.HasPrefix(codeStr, "0x") {
|
||||
i, err := strconv.ParseUint(codeStr[2:], 16, 64)
|
||||
if err != nil {
|
||||
t.Errorf("invalid multibase byte %q", codeStr)
|
||||
continue
|
||||
}
|
||||
code = Encoding(i)
|
||||
} else {
|
||||
codeRune, length := utf8.DecodeRuneInString(codeStr)
|
||||
if code == utf8.RuneError {
|
||||
t.Errorf("multibase %q wasn't valid utf8", codeStr)
|
||||
continue
|
||||
}
|
||||
if length != len(codeStr) {
|
||||
t.Errorf("multibase %q wasn't a single character", codeStr)
|
||||
continue
|
||||
}
|
||||
code = Encoding(codeRune)
|
||||
}
|
||||
expectedEncodings[code] = encoding
|
||||
}
|
||||
|
||||
for name, enc := range Encodings {
|
||||
expectedName, ok := expectedEncodings[enc]
|
||||
if !ok {
|
||||
t.Errorf("encoding %q (%c) not defined in the spec", name, enc)
|
||||
continue
|
||||
}
|
||||
if expectedName != name {
|
||||
t.Errorf("encoding %q (%c) has unexpected name %q", expectedName, enc, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
func TestSpecVectors(t *testing.T) {
|
||||
files, err := filepath.Glob("spec/tests/*.csv")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
for _, fname := range files {
|
||||
t.Run(fname, func(t *testing.T) {
|
||||
file, err := os.Open(fname)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
reader := csv.NewReader(file)
|
||||
reader.LazyQuotes = false
|
||||
reader.FieldsPerRecord = 2
|
||||
reader.TrimLeadingSpace = true
|
||||
|
||||
values, err := reader.ReadAll()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if len(values) == 0 {
|
||||
t.Error("no test values")
|
||||
return
|
||||
}
|
||||
header := values[0]
|
||||
|
||||
var decodeOnly bool
|
||||
switch header[0] {
|
||||
case "encoding":
|
||||
case "non-canonical encoding":
|
||||
decodeOnly = true
|
||||
default:
|
||||
t.Errorf("invalid test spec %q", fname)
|
||||
return
|
||||
}
|
||||
|
||||
testValue, err := strconv.Unquote("\"" + header[1] + "\"")
|
||||
if err != nil {
|
||||
t.Error("failed to unquote testcase:", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, testCase := range values[1:] {
|
||||
encodingName := testCase[0]
|
||||
expected := testCase[1]
|
||||
|
||||
t.Run(encodingName, func(t *testing.T) {
|
||||
encoder, err := EncoderByName(encodingName)
|
||||
if err != nil {
|
||||
t.Skipf("skipping %s: not supported", encodingName)
|
||||
return
|
||||
}
|
||||
if !decodeOnly {
|
||||
t.Logf("encoding %q with %s", testValue, encodingName)
|
||||
actual := encoder.Encode([]byte(testValue))
|
||||
if expected != actual {
|
||||
t.Errorf("expected %q, got %q", expected, actual)
|
||||
}
|
||||
}
|
||||
t.Logf("decoding %q", expected)
|
||||
encoding, decoded, err := Decode(expected)
|
||||
if err != nil {
|
||||
t.Error("failed to decode:", err)
|
||||
return
|
||||
}
|
||||
expectedEncoding := Encodings[encodingName]
|
||||
if encoding != expectedEncoding {
|
||||
t.Errorf("expected encoding to be %c, got %c", expectedEncoding, encoding)
|
||||
}
|
||||
if string(decoded) != testValue {
|
||||
t.Errorf("failed to decode %q to %q, got %q", expected, testValue, string(decoded))
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func FuzzDecode(f *testing.F) {
|
||||
files, err := filepath.Glob("spec/tests/*.csv")
|
||||
if err != nil {
|
||||
f.Fatal(err)
|
||||
}
|
||||
for _, fname := range files {
|
||||
func() {
|
||||
file, err := os.Open(fname)
|
||||
if err != nil {
|
||||
f.Fatal(err)
|
||||
}
|
||||
defer file.Close()
|
||||
reader := csv.NewReader(file)
|
||||
reader.LazyQuotes = false
|
||||
reader.FieldsPerRecord = 2
|
||||
reader.TrimLeadingSpace = true
|
||||
|
||||
values, err := reader.ReadAll()
|
||||
if err != nil {
|
||||
f.Fatal(err)
|
||||
}
|
||||
|
||||
for _, tc := range values[1:] {
|
||||
f.Add(tc[1])
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
f.Fuzz(func(_ *testing.T, data string) {
|
||||
Decode(data)
|
||||
})
|
||||
}
|
||||
3
version.json
Normal file
3
version.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"version": "v0.2.0"
|
||||
}
|
||||
Reference in New Issue
Block a user