Lots of progress made on the decoding side, we can parse a single line in to several types. Left to do: Encoding, better errors, and formally define valid attribute characters.

This commit is contained in:
David Arroyo 2013-07-14 11:27:12 -04:00
parent 751b403e15
commit a19bd721cd
7 changed files with 665 additions and 281 deletions

113
ndb.go
View File

@ -1,9 +1,24 @@
// Package ndb decodes and encodes simple strings of key=value pairs.
// Package ndb decodes and encodes simple strings of attribute=value pairs.
// The accepted format is based on Plan 9's ndb(6) format found at
// http://plan9.bell-labs.com/magic/man2html/6/ndb . Values containing
// white space must be quoted in single quotes. Two single quotes escape
// a literal single quote. Attributes must not contain white space. A
// value may contain any printable unicode character except for a new line.
// http://plan9.bell-labs.com/magic/man2html/6/ndb, with additional
// rules for quoting values containing white space.
//
// Attributes are UTF-8 encoded strings of any printable non-whitespace
// character, except for the equals sign ('='). Value strings may contain
// any printable character except for a new line. Values containing white
// space must be enclosed in single quotes. Single quotes can be escaped
// by doubling them, like so:
//
// * {"example1": "Let's go shopping"} is encoded as
// example1='Let''s go shopping'
// * {"example2": "Escape ' marks by doubling like this: ''"}
// example2='Escape '' marks by doubling like this: '''''
// * {"example3": "can't"}
// example3=can''t
//
// Tuples must be separated by at least one whitespace character. The same
// attribute may appear multiple times in an ndb string. When decoding an
// ndb string with repeated attributes, the destination type must be a slice.
package ndb
import (
@ -16,9 +31,11 @@ import (
"unicode/utf8"
)
// A SyntaxError contains the data that caused an error and the
// offset of the first byte that caused the syntax error. Data may
// only be valid until the next call to the Decode() method
// A SyntaxError occurs when malformed input, such as an unterminated
// quoted string, is received. It contains the UTF-8 encoded line that
// was being read and the position of the first byte that caused the
// syntax error. Data may only be valid until the next call to the
// Decode() method
type SyntaxError struct {
Data []byte
Offset int64
@ -67,28 +84,30 @@ type Encoder struct {
// A decoder wraps an io.Reader and decodes successive ndb strings
// into Go values using the Decode() function.
type Decoder struct {
src *textproto.Reader
pairbuf []pair
src *textproto.Reader
pairbuf []pair
finfo map[string][]int
havemulti bool
attrs map[string]struct{}
multi map[string]struct{}
}
// The Parse function reads an entire ndb string and unmarshals it
// into the Go value v. Parse will behave differently depending on
// the concrete type of v. Value v must be a reference type, either a
// pointer, map, or slice.
//
// * If v is a slice, Parse will decode all lines from the ndb
// input into array elements. Otherwise, Parse will decode only
// the first line.
//
// * If v is of the type (map[string] interface{}), Parse will
// populate v with key/value pairs, where value is decoded
// according to the concrete type of the map's value.
//
// * If v is a struct, Parse will populate struct fields whose
// names match the ndb attribute. Struct fields may be annotated
// with a tag of the form `ndb: name`, where name matches the
// attribute string in the ndb input.
//
// into the Go value v. Value v must be a pointer. Parse will behave
// differently depending on the type of value v points to.
//
// If v is a slice, Parse will decode all lines from the ndb input
// into slice elements. Otherwise, Parse will decode only the first
// line.
//
// If v is a map, Parse will populate v with key/value pairs, where
// value is decoded according to the concrete types of the map.
//
// If v is a struct, Parse will populate struct fields whose names
// match the ndb attribute. Struct fields may be annotated with a tag
// of the form `ndb:"name"`, where name matches the attribute string
// in the ndb input.
//
// Struct fields or map keys that do not match the ndb input are left
// unmodified. Ndb attributes that do not match any struct fields are
// silently dropped. If an ndb string cannot be converted to the
@ -104,22 +123,45 @@ func Parse(data []byte, v interface{}) error {
func NewDecoder(r io.Reader) *Decoder {
d := new(Decoder)
d.src = textproto.NewReader(bufio.NewReader(r))
d.attrs = make(map[string] struct{}, 8)
d.multi = make(map[string] struct{}, 8)
d.finfo = make(map[string] []int, 8)
return d
}
// The Decode method follows the same parsing rules as Parse(), but
// will read at most one ndb string. As such, slices or arrays are
// not valid types for v.
// reads its input from the Decoder's input stream.
func (d *Decoder) Decode(v interface{}) error {
val := reflect.ValueOf(v)
if val.Kind() != reflect.Ptr || val.IsNil() {
return &TypeError{val.Type()}
typ := reflect.TypeOf(v)
if typ.Kind() != reflect.Ptr {
return &TypeError{typ}
}
if p,err := d.getPairs(); err != nil {
if typ.Elem().Kind() == reflect.Slice {
return d.decodeSlice(val)
}
p,err := d.getPairs()
if err != nil {
return err
} else {
return d.saveData(p, val.Elem())
}
switch typ.Elem().Kind() {
default:
return &TypeError{val.Type()}
case reflect.Map:
if val.Elem().IsNil() {
val.Elem().Set(reflect.MakeMap(typ.Elem()))
}
return d.saveMap(p,val.Elem())
case reflect.Struct:
if val.IsNil() {
return &TypeError{nil}
}
return d.saveStruct(p,val.Elem())
}
return nil
}
// Emit encodes a value into an ndb string. Emit will use the String
@ -129,7 +171,8 @@ func (d *Decoder) Decode(v interface{}) error {
// the struct field, or the fields ndb annotation if it exists.
// Ndb attributes may not contain white space. Ndb values may contain
// white space but may not contain new lines. If Emit cannot produce
// valid ndb strings, an error is returned.
// valid ndb strings, an error is returned. No guarantee is made about
// the order of the tuples.
func Emit(v interface{}) ([]byte, error) {
return nil,nil
}

177
parse.go
View File

@ -1,177 +0,0 @@
package ndb
import (
"reflect"
"net/textproto"
"unicode"
"bytes"
"fmt"
)
type scanner struct {
src *textproto.Reader
}
type pair struct {
attr, val []byte
}
func (p pair) String() string {
return fmt.Sprintf("%s => %s", string(p.attr), string(p.val))
}
func errBadAttr(line []byte, offset int64) error {
return &SyntaxError { line, offset, "Invalid attribute name" }
}
func errUnterminated(line []byte, offset int64) error {
return &SyntaxError { line, offset, "Unterminated quoted string" }
}
func errBadUnicode(line []byte, offset int64) error {
return &SyntaxError { line, offset, "Invalid UTF8 input" }
}
func errNewline(line []byte, offset int64) error {
return &SyntaxError { line, offset, "Values may not contain new lines" }
}
func (d *Decoder) getPairs() ([]pair, error) {
var tuples [][]byte
d.pairbuf = d.pairbuf[0:0]
line, err := d.src.ReadContinuedLineBytes()
if err != nil {
return nil,err
}
tuples,err = lex(line)
if err != nil {
return nil,err
} else {
for _,t := range tuples {
d.pairbuf = append(d.pairbuf, parseTuple(t))
}
}
return d.pairbuf, nil
}
func (d *Decoder) saveData(p []pair, val reflect.Value) error {
return nil
}
func parseTuple(tuple []byte) pair {
var p pair
fmt.Printf("Split %s\n", string(tuple))
s := bytes.SplitN(tuple, []byte("="), 2)
p.attr = s[0]
if len(s) > 1 {
if len(s[1]) > 1 {
if s[1][0] == '\'' && len(s[1]) > 2 && s[1][len(s[1])-1] == '\'' {
s[1] = s[1][1:len(s[1])-1]
}
}
p.val = bytes.Replace(s[1], []byte("''"), []byte("'"), -1)
}
fmt.Println("Made ", p)
return p
}
type scanState []int
func (s *scanState) push(n int) {
*s = append(*s, n)
}
func (s scanState) top() int {
if len(s) > 0 {
return s[len(s)-1]
}
return scanNone
}
func (s *scanState) pop() int {
v := s.top()
if len(*s) > 0 {
*s = (*s)[0:len(*s)-1]
}
return v
}
const (
scanNone = iota
scanAttr
scanValue
scanValueStart
scanQuoteStart
scanQuoteString
)
func lex(line []byte) ([][]byte, error) {
var offset int64
state := make(scanState, 0, 3)
tuples := make([][]byte, 0, 10)
buf := bytes.NewReader(line)
var beg int64
for r,sz,err := buf.ReadRune(); err == nil; r,sz,err = buf.ReadRune() {
fmt.Printf("(%d,%c) %s|%s\n", state.top(), r, line[:offset], line[offset:])
if r == 0xFFFD && sz == 1 {
return nil, errBadUnicode(line, offset)
}
switch state.top() {
case scanNone:
if unicode.IsSpace(r) {
// skip
} else if unicode.IsLetter(r) || unicode.IsNumber(r) {
state.push(scanAttr)
beg = offset
} else {
return nil,errBadAttr(line, offset)
}
case scanAttr:
if unicode.IsSpace(r) {
state.pop()
tuples = append(tuples, line[beg:offset])
fmt.Println("Save", string(line[beg:offset]))
} else if r == '=' {
state.pop()
state.push(scanValueStart)
} else if !(unicode.IsLetter(r) || unicode.IsNumber(r)) {
return nil,errBadAttr(line, offset)
}
case scanValueStart:
if unicode.IsSpace(r) {
state.pop()
tuples = append(tuples, line[beg:offset])
fmt.Println("Save", string(line[beg:offset]))
} else if r == '\'' {
state.push(scanQuoteStart)
} else {
state.pop()
state.push(scanValue)
}
case scanValue:
if unicode.IsSpace(r) {
state.pop()
tuples = append(tuples, line[beg:offset])
fmt.Println("Save", string(line[beg:offset]))
}
case scanQuoteStart:
if r == '\'' {
state.pop()
} else {
state.pop()
state.push(scanQuoteString)
}
case scanQuoteString:
if r == '\'' {
state.pop()
} else if r == '\n' {
return nil,errNewline(line, offset)
}
}
offset += int64(sz)
}
switch state.top() {
case scanQuoteString, scanQuoteStart:
return nil,errUnterminated(line, offset)
case scanNone:
default:
tuples = append(tuples, line[beg:offset])
fmt.Println("Save", string(line[beg:offset]))
}
return tuples,nil
}

View File

@ -1,69 +0,0 @@
package ndb
import (
"testing"
"bytes"
)
var parseTests = []struct {
in []byte
out []pair
}{
{
in: []byte("key1=val1 key2=val2 key3=val3"),
out: []pair {
{[]byte("key1"),[]byte("val1")},
{[]byte("key2"),[]byte("val2")},
{[]byte("key3"),[]byte("val3")}},
},
{
in: []byte("title='Some value with spaces' width=340 height=200"),
out: []pair {
{[]byte("title"),[]byte("Some value with spaces")},
{[]byte("width"),[]byte("340")},
{[]byte("height"),[]byte("200")}},
},
{
in: []byte("title='Dave''s pasta' sq=Davis cost=$$"),
out: []pair {
{[]byte("title"),[]byte("Dave's pasta")},
{[]byte("sq"),[]byte("Davis")},
{[]byte("cost"),[]byte("$$")}},
},
{
in: []byte("action=''bradley key=jay mod=ctrl+alt+shift"),
out: []pair {
{[]byte("action"),[]byte("'bradley")},
{[]byte("key"),[]byte("jay")},
{[]byte("mod"),[]byte("ctrl+alt+shift")}},
},
{
in: []byte("action=reload key='' mod=ctrl+alt+shift"),
out: []pair {
{[]byte("action"),[]byte("reload")},
{[]byte("key"),[]byte("'")},
{[]byte("mod"),[]byte("ctrl+alt+shift")}},
},
}
func Test_parsing(t *testing.T) {
for i,tt := range parseTests {
d := NewDecoder(bytes.NewReader(tt.in))
p,err := d.getPairs()
if err != nil {
t.Error(err)
t.FailNow()
} else {
for j := range tt.out {
if j > len(p) || !match(p[j],tt.out[j]) {
t.Errorf("%d: getPairs %s => %v, want %v",i, tt.in, p, tt.out)
t.FailNow()
}
}
}
}
}
func match(p1, p2 pair) bool {
return (bytes.Compare(p1.attr, p2.attr) == 0) && (bytes.Compare(p1.val, p2.val) == 0)
}

364
read.go Normal file
View File

@ -0,0 +1,364 @@
package ndb
import (
"io"
"reflect"
"net/textproto"
"unicode"
"strconv"
"bytes"
"strings"
"fmt"
)
type scanner struct {
src *textproto.Reader
}
type pair struct {
attr, val []byte
}
func (p pair) String() string {
return fmt.Sprintf("%s => %s", string(p.attr), string(p.val))
}
func errBadAttr(line []byte, offset int64) error {
return &SyntaxError { line, offset, "Invalid attribute name" }
}
func errUnterminated(line []byte, offset int64) error {
return &SyntaxError { line, offset, "Unterminated quoted string" }
}
func errBadUnicode(line []byte, offset int64) error {
return &SyntaxError { line, offset, "Invalid UTF8 input" }
}
func errMissingSpace(line []byte, offset int64) error {
return &SyntaxError { line, offset, "Missing white space between tuples" }
}
func (d *Decoder) getPairs() ([]pair, error) {
line, err := d.src.ReadContinuedLineBytes()
if err != nil {
return nil,err
}
d.reset()
return d.parseLine(line)
}
func (d *Decoder) reset() {
d.pairbuf = d.pairbuf[0:0]
for k := range d.finfo {
delete(d.finfo, k)
}
for k := range d.multi {
delete(d.attrs, k)
delete(d.multi, k)
}
d.havemulti = false
}
func (d *Decoder) decodeSlice(val reflect.Value) error {
var err error
if val.Kind() != reflect.Ptr {
return &TypeError{val.Type()}
}
if val.Type().Elem().Kind() != reflect.Slice {
return &TypeError{val.Type()}
}
if val.Elem().IsNil() {
val.Elem().Set(reflect.MakeSlice(val.Type().Elem(), 0, 5))
}
add := reflect.New(val.Type().Elem().Elem())
for err = d.Decode(add.Interface()); err != nil; err = d.Decode(add.Interface()) {
s := reflect.Append(val.Elem(), add.Elem())
val.Elem().Set(s)
}
if err == io.EOF {
return nil
} else if err != nil {
return err
}
return nil
}
func (d *Decoder) saveMap(pairs []pair, val reflect.Value) error {
kv := reflect.New(val.Type().Key())
if d.havemulti {
if val.Type().Elem().Kind() != reflect.Slice {
return &TypeError{val.Type()}
}
vv := reflect.New(val.Type().Elem().Elem())
for _,p := range pairs {
if err := storeVal(kv, p.attr); err != nil {
return err
}
if err := storeVal(vv, p.val); err != nil {
return err
}
slot := val.MapIndex(kv.Elem())
if slot.Kind() == reflect.Invalid {
slot = reflect.MakeSlice(val.Type().Elem(), 0, 4)
}
slot = reflect.Append(slot, vv.Elem())
val.SetMapIndex(kv.Elem(), slot)
}
} else {
vv := reflect.New(val.Type().Elem())
for _,p := range pairs {
if err := storeVal(kv, p.attr); err != nil {
return err
}
if err := storeVal(vv, p.val); err != nil {
return err
}
val.SetMapIndex(kv.Elem(), vv.Elem())
}
}
return nil
}
func (d *Decoder) saveStruct(pairs []pair, val reflect.Value) error {
var tag string
typ := val.Type()
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
if !val.FieldByIndex(field.Index).CanSet() {
continue
}
tag = field.Tag.Get("ndb")
if tag != "" {
d.finfo[tag] = field.Index
} else {
d.finfo[field.Name] = field.Index
}
}
for _,p := range pairs {
if id,ok := d.finfo[string(p.attr)]; ok {
f := val.FieldByIndex(id)
if _,ok := d.multi[string(p.attr)]; ok {
if f.Kind() != reflect.Slice {
return &TypeError{f.Type()}
}
add := reflect.New(f.Type().Elem())
if err := storeVal(add, p.val); err != nil {
return err
}
f.Set(reflect.Append(f, add.Elem()))
} else if err := storeVal(f, p.val); err != nil {
return err
}
}
}
return nil
}
func storeVal(dst reflect.Value, src []byte) error {
if dst.Kind() == reflect.Ptr {
if dst.IsNil() {
dst.Set(reflect.New(dst.Type().Elem()))
}
dst = dst.Elem()
}
switch dst.Kind() {
default:
return &TypeError{dst.Type()}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
itmp, err := strconv.ParseInt(string(src), 10, dst.Type().Bits())
if err != nil {
return err
}
dst.SetInt(itmp)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
utmp, err := strconv.ParseUint(string(src), 10, dst.Type().Bits())
if err != nil {
return err
}
dst.SetUint(utmp)
case reflect.Float32, reflect.Float64:
ftmp, err := strconv.ParseFloat(string(src), dst.Type().Bits())
if err != nil {
return err
}
dst.SetFloat(ftmp)
case reflect.Bool:
value, err := strconv.ParseBool(strings.TrimSpace(string(src)))
if err != nil {
return err
}
dst.SetBool(value)
case reflect.String:
dst.SetString(string(src))
case reflect.Slice:
if len(src) == 0 {
src = []byte{}
}
dst.SetBytes(src)
}
return nil
}
type scanState []int
func (s *scanState) push(n int) {
*s = append(*s, n)
}
func (s scanState) top() int {
if len(s) > 0 {
return s[len(s)-1]
}
return scanNone
}
func (s *scanState) pop() int {
v := s.top()
if len(*s) > 0 {
*s = (*s)[0:len(*s)-1]
}
return v
}
const (
scanNone = iota
scanAttr
scanValue
scanValueStart
scanQuoteStart
scanQuoteValue
scanQuoteClose
)
// This is the main tokenizing function. For now it's a messy state machine.
// It could be cleaned up with better use of structures and methods, or
// by copying Rob Pike's Go lexing talk.
func (d *Decoder) parseLine(line []byte) ([]pair, error) {
var add pair
var beg,offset int64
var esc bool
state := make(scanState, 0, 3)
buf := bytes.NewReader(line)
for r,sz,err := buf.ReadRune(); err == nil; r,sz,err = buf.ReadRune() {
if r == 0xFFFD && sz == 1 {
return nil,errBadUnicode(line, offset)
}
switch state.top() {
case scanNone:
if unicode.IsSpace(r) {
// skip
} else if unicode.IsLetter(r) || unicode.IsNumber(r) {
state.push(scanAttr)
beg = offset
} else {
return nil,errBadAttr(line, offset)
}
case scanAttr:
if unicode.IsSpace(r) {
add.attr = line[beg:offset]
d.pairbuf = append(d.pairbuf, add)
if _,ok := d.attrs[string(add.attr)]; ok {
d.havemulti = true
d.multi[string(add.attr)] = struct{}{}
} else {
d.attrs[string(add.attr)] = struct{}{}
}
add.attr,add.val,esc = nil,nil,false
state.pop()
} else if r == '=' {
add.attr = line[beg:offset]
if _,ok := d.attrs[string(add.attr)]; ok {
d.havemulti = true
d.multi[string(add.attr)] = struct{}{}
} else {
d.attrs[string(add.attr)] = struct{}{}
}
state.pop()
state.push(scanValueStart)
} else if !(unicode.IsLetter(r) || unicode.IsNumber(r)) {
return nil,errBadAttr(line, offset)
}
case scanValueStart:
beg = offset
state.pop()
state.push(scanValue)
if r == '\'' {
state.push(scanQuoteStart)
break
}
fallthrough
case scanValue:
if unicode.IsSpace(r) {
state.pop()
add.val = line[beg:offset]
if esc {
add.val = bytes.Replace(add.val, []byte("''"), []byte("'"), -1)
}
d.pairbuf = append(d.pairbuf, add)
add.attr,add.val = nil,nil
}
case scanQuoteClose:
state.pop()
if r == '\'' {
esc = true
state.push(scanQuoteValue)
} else if unicode.IsSpace(r) {
state.pop()
add.val = line[beg:offset-1]
if esc {
add.val = bytes.Replace(add.val, []byte("''"), []byte("'"), -1)
}
d.pairbuf = append(d.pairbuf, add)
add.attr,add.val,esc = nil,nil,false
} else {
return nil,errMissingSpace(line, offset)
}
case scanQuoteStart:
state.pop()
if r != '\'' {
beg++
state.pop()
state.push(scanQuoteValue)
} else {
esc = true
}
case scanQuoteValue:
if r == '\'' {
state.pop()
state.push(scanQuoteClose)
} else if r == '\n' {
return nil,errUnterminated(line, offset)
}
}
offset += int64(sz)
}
switch state.top() {
case scanQuoteValue, scanQuoteStart:
return nil,errUnterminated(line, offset)
case scanAttr:
add.attr = line[beg:offset]
if _,ok := d.attrs[string(add.attr)]; ok {
d.havemulti = true
d.multi[string(add.attr)] = struct{}{}
} else {
d.attrs[string(add.attr)] = struct{}{}
}
d.pairbuf = append(d.pairbuf, add)
case scanValueStart:
beg = offset
fallthrough
case scanQuoteClose:
offset--
fallthrough
case scanValue:
add.val = line[beg:offset]
if esc {
add.val = bytes.Replace(add.val, []byte("''"), []byte("'"), -1)
}
d.pairbuf = append(d.pairbuf, add)
}
return d.pairbuf,nil
}

223
read_test.go Normal file
View File

@ -0,0 +1,223 @@
package ndb
import (
"testing"
"fmt"
)
type screenCfg struct {
Title string
Width, Height uint16
R,G,B,A uint16
}
type netCfg struct {
Host string `ndb:"hostname"`
Vlan []int `ndb:"vlan"`
Native int `ndb:"nativevlan"`
}
var multiMap = []struct {
in string
out map[string] []string
}{
{
in: "user=clive user=david user=trenton group=dirty-dozen",
out: map[string] []string {
"user": []string {"clive", "david", "trenton"},
"group": []string {"dirty-dozen"},
},
},
}
var advancedTests = []struct {
in string
out netCfg
}{
{ in: "hostname=p2-jbs537 vlan=66 vlan=35 nativevlan=218",
out: netCfg {
Host: "p2-jbs537",
Vlan: []int {66, 35},
Native: 218,
},
},
}
var structTests = []struct {
in string
out screenCfg
}{
{
in: "Title='Hollywood movie' Width=640 Height=400 A=8",
out: screenCfg {
Title: "Hollywood movie",
Width: 640,
Height: 400,
A: 8,
},
},
}
var mapTests = []struct {
in string
out map[string] string
}{
{
in: "ipnet=murrayhill ip=135.104.0.0 ipmask=255.255.0.0",
out: map[string] string {
"ipnet": "murray-hill",
"ip": "135.104.0.0",
"ipmask": "255.255.0.0",
},
},
}
func TestStruct(t *testing.T) {
var cfg screenCfg
for _,tt := range structTests {
if err := Parse([]byte(tt.in), &cfg); err != nil {
t.Error(err)
} else if cfg != tt.out {
t.Errorf("Got %v, wanted %v", cfg, tt.out)
}
t.Logf("%s == %v", tt.in, cfg)
}
}
func TestMap(t *testing.T) {
var net map[string] string
for _,tt := range mapTests {
if err := Parse([]byte(tt.in), &net); err != nil {
t.Error(err)
} else if fmt.Sprint(net) != fmt.Sprint(tt.out) {
t.Errorf("Got %v, wanted %v", net, tt.out)
}
t.Logf("%s == %v", tt.in, net)
}
}
func TestAdvanced(t *testing.T) {
var net netCfg
for _,tt := range advancedTests {
if err := Parse([]byte(tt.in), &net); err != nil {
t.Error(err)
} else if fmt.Sprint(tt.out) != fmt.Sprint(net) {
t.Errorf("Got %v, wanted %v", net, tt.out)
}
t.Logf("%s == %v", tt.in, net)
}
}
func TestMultiMap(t *testing.T) {
var m map[string] []string
for _,tt := range multiMap {
if err := Parse([]byte(tt.in), &m); err != nil {
t.Error(err)
} else if fmt.Sprint(tt.out) != fmt.Sprint(m) {
t.Errorf("Got %v, wanted %v", m, tt.out)
}
t.Logf("%s == %v", tt.in, m)
}
}
func netEqual(t *testing.T, n1, n2 netCfg) bool {
if len(n1.Vlan) != len(n2.Vlan) {
return false
}
for i := range n1.Vlan {
if n1.Vlan[i] != n2.Vlan[i] {
return false
}
}
return n1.Host == n2.Host && n1.Native == n2.Native
}
func mapEqual(t *testing.T, m1, m2 map[string] string) bool {
for k := range m1 {
if m1[k] != m2[k] {
t.Logf("%v != %v", m1[k], m2[k])
return false
}
}
return true
}
package ndb
import (
"testing"
"bytes"
)
var parseTests = []struct {
in []byte
out []pair
}{
{
in: []byte("key1=val1 key2=val2 key3=val3"),
out: []pair {
{[]byte("key1"),[]byte("val1")},
{[]byte("key2"),[]byte("val2")},
{[]byte("key3"),[]byte("val3")}},
},
{
in: []byte("title='Some value with spaces' width=340 height=200"),
out: []pair {
{[]byte("title"),[]byte("Some value with spaces")},
{[]byte("width"),[]byte("340")},
{[]byte("height"),[]byte("200")}},
},
{
in: []byte("title='Dave''s pasta' sq=Davis cost=$$"),
out: []pair {
{[]byte("title"),[]byte("Dave's pasta")},
{[]byte("sq"),[]byte("Davis")},
{[]byte("cost"),[]byte("$$")}},
},
{
in: []byte("action=''bradley key=jay mod=ctrl+alt+shift"),
out: []pair {
{[]byte("action"),[]byte("'bradley")},
{[]byte("key"),[]byte("jay")},
{[]byte("mod"),[]byte("ctrl+alt+shift")}},
},
{
in: []byte("action=reload key='' mod=ctrl+alt+shift"),
out: []pair {
{[]byte("action"),[]byte("reload")},
{[]byte("key"),[]byte("'")},
{[]byte("mod"),[]byte("ctrl+alt+shift")}},
},
{
in: []byte("s='spaces and '' quotes'"),
out: []pair {
{[]byte("s"),[]byte("spaces and ' quotes")}},
},
{
in: []byte("esc='Use '''' to escape a '''"),
out: []pair {
{[]byte("esc"),[]byte("Use '' to escape a '")}},
},
}
func Test_parsing(t *testing.T) {
for i,tt := range parseTests {
d := NewDecoder(bytes.NewReader(tt.in))
p,err := d.getPairs()
if err != nil {
t.Error(err)
t.FailNow()
} else {
for j := range tt.out {
if j > len(p) || !match(p[j],tt.out[j]) {
t.Errorf("%d: getPairs %s => %v, want %v",i, tt.in, p, tt.out)
t.FailNow()
}
}
}
}
}
func match(p1, p2 pair) bool {
return (bytes.Compare(p1.attr, p2.attr) == 0) && (bytes.Compare(p1.val, p2.val) == 0)
}

View File

0
write_test.go Normal file
View File