Remove some useless files.
This commit is contained in:
parent
30255dc679
commit
ae8829dbc8
259
rest/dav/file.go
259
rest/dav/file.go
@ -127,17 +127,6 @@ func (d Dir) Stat(ctx context.Context, name string) (os.FileInfo, error) {
|
|||||||
return os.Stat(name)
|
return os.Stat(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewMemFS returns a new in-memory FileSystem implementation.
|
|
||||||
func NewMemFS() FileSystem {
|
|
||||||
return &memFS{
|
|
||||||
root: memFSNode{
|
|
||||||
children: make(map[string]*memFSNode),
|
|
||||||
mode: 0660 | os.ModeDir,
|
|
||||||
modTime: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// A memFS implements FileSystem, storing all metadata and actual file data
|
// A memFS implements FileSystem, storing all metadata and actual file data
|
||||||
// in-memory. No limits on filesystem size are used, so it is not recommended
|
// in-memory. No limits on filesystem size are used, so it is not recommended
|
||||||
// this be used where the clients are untrusted.
|
// this be used where the clients are untrusted.
|
||||||
@ -331,57 +320,6 @@ func (fs *memFS) RemoveAll(ctx context.Context, name string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fs *memFS) Rename(ctx context.Context, oldName, newName string) error {
|
|
||||||
fs.mu.Lock()
|
|
||||||
defer fs.mu.Unlock()
|
|
||||||
|
|
||||||
oldName = SlashClean(oldName)
|
|
||||||
newName = SlashClean(newName)
|
|
||||||
if oldName == newName {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(newName, oldName+"/") {
|
|
||||||
// We can't rename oldName to be a sub-directory of itself.
|
|
||||||
return os.ErrInvalid
|
|
||||||
}
|
|
||||||
|
|
||||||
oDir, oFrag, err := fs.find("rename", oldName)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if oDir == nil {
|
|
||||||
// We can't rename from the root.
|
|
||||||
return os.ErrInvalid
|
|
||||||
}
|
|
||||||
|
|
||||||
nDir, nFrag, err := fs.find("rename", newName)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if nDir == nil {
|
|
||||||
// We can't rename to the root.
|
|
||||||
return os.ErrInvalid
|
|
||||||
}
|
|
||||||
|
|
||||||
oNode, ok := oDir.children[oFrag]
|
|
||||||
if !ok {
|
|
||||||
return os.ErrNotExist
|
|
||||||
}
|
|
||||||
if oNode.children != nil {
|
|
||||||
if nNode, ok := nDir.children[nFrag]; ok {
|
|
||||||
if nNode.children == nil {
|
|
||||||
return errNotADirectory
|
|
||||||
}
|
|
||||||
if len(nNode.children) != 0 {
|
|
||||||
return errDirectoryNotEmpty
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
delete(oDir.children, oFrag)
|
|
||||||
nDir.children[nFrag] = oNode
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *memFS) Stat(ctx context.Context, name string) (os.FileInfo, error) {
|
func (fs *memFS) Stat(ctx context.Context, name string) (os.FileInfo, error) {
|
||||||
fs.mu.Lock()
|
fs.mu.Lock()
|
||||||
defer fs.mu.Unlock()
|
defer fs.mu.Unlock()
|
||||||
@ -596,200 +534,3 @@ func (f *memFile) Write(p []byte) (int, error) {
|
|||||||
f.n.modTime = time.Now()
|
f.n.modTime = time.Now()
|
||||||
return lenp, nil
|
return lenp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// moveFiles moves files and/or directories from src to dst.
|
|
||||||
//
|
|
||||||
// See section 9.9.4 for when various HTTP status codes apply.
|
|
||||||
func moveFiles(ctx context.Context, fs FileSystem, src, dst string, overwrite bool) (status int, err error) {
|
|
||||||
created := false
|
|
||||||
if _, err := fs.Stat(ctx, dst); err != nil {
|
|
||||||
if !os.IsNotExist(err) {
|
|
||||||
return http.StatusForbidden, err
|
|
||||||
}
|
|
||||||
created = true
|
|
||||||
} else if overwrite {
|
|
||||||
// Section 9.9.3 says that "If a resource exists at the destination
|
|
||||||
// and the Overwrite header is "T", then prior to performing the move,
|
|
||||||
// the server must perform a DELETE with "Depth: infinity" on the
|
|
||||||
// destination resource.
|
|
||||||
if err := fs.RemoveAll(ctx, dst); err != nil {
|
|
||||||
return http.StatusForbidden, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return http.StatusPreconditionFailed, os.ErrExist
|
|
||||||
}
|
|
||||||
if err := fs.Rename(ctx, src, dst); err != nil {
|
|
||||||
return http.StatusForbidden, err
|
|
||||||
}
|
|
||||||
if created {
|
|
||||||
return http.StatusCreated, nil
|
|
||||||
}
|
|
||||||
return http.StatusNoContent, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func copyProps(dst, src File) error {
|
|
||||||
d, ok := dst.(DeadPropsHolder)
|
|
||||||
if !ok {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
s, ok := src.(DeadPropsHolder)
|
|
||||||
if !ok {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
m, err := s.DeadProps()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
props := make([]Property, 0, len(m))
|
|
||||||
for _, prop := range m {
|
|
||||||
props = append(props, prop)
|
|
||||||
}
|
|
||||||
_, err = d.Patch([]Proppatch{{Props: props}})
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// copyFiles copies files and/or directories from src to dst.
|
|
||||||
//
|
|
||||||
// See section 9.8.5 for when various HTTP status codes apply.
|
|
||||||
func copyFiles(ctx context.Context, fs FileSystem, src, dst string, overwrite bool, depth int, recursion int) (status int, err error) {
|
|
||||||
if recursion == 1000 {
|
|
||||||
return http.StatusInternalServerError, errRecursionTooDeep
|
|
||||||
}
|
|
||||||
recursion++
|
|
||||||
|
|
||||||
// TODO: section 9.8.3 says that "Note that an infinite-depth COPY of /A/
|
|
||||||
// into /A/B/ could lead to infinite recursion if not handled correctly."
|
|
||||||
|
|
||||||
srcFile, err := fs.OpenFile(ctx, src, os.O_RDONLY, 0)
|
|
||||||
if err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return http.StatusNotFound, err
|
|
||||||
}
|
|
||||||
return http.StatusInternalServerError, err
|
|
||||||
}
|
|
||||||
defer srcFile.Close()
|
|
||||||
srcStat, err := srcFile.Stat()
|
|
||||||
if err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return http.StatusNotFound, err
|
|
||||||
}
|
|
||||||
return http.StatusInternalServerError, err
|
|
||||||
}
|
|
||||||
srcPerm := srcStat.Mode() & os.ModePerm
|
|
||||||
|
|
||||||
created := false
|
|
||||||
if _, err := fs.Stat(ctx, dst); err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
created = true
|
|
||||||
} else {
|
|
||||||
return http.StatusForbidden, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if !overwrite {
|
|
||||||
return http.StatusPreconditionFailed, os.ErrExist
|
|
||||||
}
|
|
||||||
if err := fs.RemoveAll(ctx, dst); err != nil && !os.IsNotExist(err) {
|
|
||||||
return http.StatusForbidden, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if srcStat.IsDir() {
|
|
||||||
if err := fs.Mkdir(ctx, dst, srcPerm); err != nil {
|
|
||||||
return http.StatusForbidden, err
|
|
||||||
}
|
|
||||||
if depth == infiniteDepth {
|
|
||||||
children, err := srcFile.Readdir(-1)
|
|
||||||
if err != nil {
|
|
||||||
return http.StatusForbidden, err
|
|
||||||
}
|
|
||||||
for _, c := range children {
|
|
||||||
name := c.Name()
|
|
||||||
s := path.Join(src, name)
|
|
||||||
d := path.Join(dst, name)
|
|
||||||
cStatus, cErr := copyFiles(ctx, fs, s, d, overwrite, depth, recursion)
|
|
||||||
if cErr != nil {
|
|
||||||
// TODO: MultiStatus.
|
|
||||||
return cStatus, cErr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
|
||||||
dstFile, err := fs.OpenFile(ctx, dst, os.O_RDWR|os.O_CREATE|os.O_TRUNC, srcPerm)
|
|
||||||
if err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return http.StatusConflict, err
|
|
||||||
}
|
|
||||||
return http.StatusForbidden, err
|
|
||||||
|
|
||||||
}
|
|
||||||
_, copyErr := io.Copy(dstFile, srcFile)
|
|
||||||
propsErr := copyProps(dstFile, srcFile)
|
|
||||||
closeErr := dstFile.Close()
|
|
||||||
if copyErr != nil {
|
|
||||||
return http.StatusInternalServerError, copyErr
|
|
||||||
}
|
|
||||||
if propsErr != nil {
|
|
||||||
return http.StatusInternalServerError, propsErr
|
|
||||||
}
|
|
||||||
if closeErr != nil {
|
|
||||||
return http.StatusInternalServerError, closeErr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if created {
|
|
||||||
return http.StatusCreated, nil
|
|
||||||
}
|
|
||||||
return http.StatusNoContent, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// walkFS traverses filesystem fs starting at name up to depth levels.
|
|
||||||
//
|
|
||||||
// Allowed values for depth are 0, 1 or infiniteDepth. For each visited node,
|
|
||||||
// walkFS calls walkFn. If a visited file system node is a directory and
|
|
||||||
// walkFn returns filepath.SkipDir, walkFS will skip traversal of this node.
|
|
||||||
func walkFS(ctx context.Context, fs FileSystem, depth int, name string, info os.FileInfo, walkFn filepath.WalkFunc) error {
|
|
||||||
// This implementation is based on Walk's code in the standard path/filepath package.
|
|
||||||
err := walkFn(name, info, nil)
|
|
||||||
if err != nil {
|
|
||||||
if info.IsDir() && err == filepath.SkipDir {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if !info.IsDir() || depth == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if depth == 1 {
|
|
||||||
depth = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read directory names.
|
|
||||||
f, err := fs.OpenFile(ctx, name, os.O_RDONLY, 0)
|
|
||||||
if err != nil {
|
|
||||||
return walkFn(name, info, err)
|
|
||||||
}
|
|
||||||
fileInfos, err := f.Readdir(0)
|
|
||||||
f.Close()
|
|
||||||
if err != nil {
|
|
||||||
return walkFn(name, info, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, fileInfo := range fileInfos {
|
|
||||||
filename := path.Join(name, fileInfo.Name())
|
|
||||||
fileInfo, err := fs.Stat(ctx, filename)
|
|
||||||
if err != nil {
|
|
||||||
if err := walkFn(filename, fileInfo, err); err != nil && err != filepath.SkipDir {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
err = walkFS(ctx, fs, depth, filename, fileInfo, walkFn)
|
|
||||||
if err != nil {
|
|
||||||
if !fileInfo.IsDir() || err != filepath.SkipDir {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
173
rest/dav/if.go
173
rest/dav/if.go
@ -1,173 +0,0 @@
|
|||||||
// Copyright 2014 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package dav
|
|
||||||
|
|
||||||
// The If header is covered by Section 10.4.
|
|
||||||
// http://www.webdav.org/specs/rfc4918.html#HEADER_If
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ifHeader is a disjunction (OR) of ifLists.
|
|
||||||
type ifHeader struct {
|
|
||||||
lists []ifList
|
|
||||||
}
|
|
||||||
|
|
||||||
// ifList is a conjunction (AND) of Conditions, and an optional resource tag.
|
|
||||||
type ifList struct {
|
|
||||||
resourceTag string
|
|
||||||
conditions []Condition
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseIfHeader parses the "If: foo bar" HTTP header. The httpHeader string
|
|
||||||
// should omit the "If:" prefix and have any "\r\n"s collapsed to a " ", as is
|
|
||||||
// returned by req.Header.Get("If") for a http.Request req.
|
|
||||||
func parseIfHeader(httpHeader string) (h ifHeader, ok bool) {
|
|
||||||
s := strings.TrimSpace(httpHeader)
|
|
||||||
switch tokenType, _, _ := lex(s); tokenType {
|
|
||||||
case '(':
|
|
||||||
return parseNoTagLists(s)
|
|
||||||
case angleTokenType:
|
|
||||||
return parseTaggedLists(s)
|
|
||||||
default:
|
|
||||||
return ifHeader{}, false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseNoTagLists(s string) (h ifHeader, ok bool) {
|
|
||||||
for {
|
|
||||||
l, remaining, ok := parseList(s)
|
|
||||||
if !ok {
|
|
||||||
return ifHeader{}, false
|
|
||||||
}
|
|
||||||
h.lists = append(h.lists, l)
|
|
||||||
if remaining == "" {
|
|
||||||
return h, true
|
|
||||||
}
|
|
||||||
s = remaining
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseTaggedLists(s string) (h ifHeader, ok bool) {
|
|
||||||
resourceTag, n := "", 0
|
|
||||||
for first := true; ; first = false {
|
|
||||||
tokenType, tokenStr, remaining := lex(s)
|
|
||||||
switch tokenType {
|
|
||||||
case angleTokenType:
|
|
||||||
if !first && n == 0 {
|
|
||||||
return ifHeader{}, false
|
|
||||||
}
|
|
||||||
resourceTag, n = tokenStr, 0
|
|
||||||
s = remaining
|
|
||||||
case '(':
|
|
||||||
n++
|
|
||||||
l, remaining, ok := parseList(s)
|
|
||||||
if !ok {
|
|
||||||
return ifHeader{}, false
|
|
||||||
}
|
|
||||||
l.resourceTag = resourceTag
|
|
||||||
h.lists = append(h.lists, l)
|
|
||||||
if remaining == "" {
|
|
||||||
return h, true
|
|
||||||
}
|
|
||||||
s = remaining
|
|
||||||
default:
|
|
||||||
return ifHeader{}, false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseList(s string) (l ifList, remaining string, ok bool) {
|
|
||||||
tokenType, _, s := lex(s)
|
|
||||||
if tokenType != '(' {
|
|
||||||
return ifList{}, "", false
|
|
||||||
}
|
|
||||||
for {
|
|
||||||
tokenType, _, remaining = lex(s)
|
|
||||||
if tokenType == ')' {
|
|
||||||
if len(l.conditions) == 0 {
|
|
||||||
return ifList{}, "", false
|
|
||||||
}
|
|
||||||
return l, remaining, true
|
|
||||||
}
|
|
||||||
c, remaining, ok := parseCondition(s)
|
|
||||||
if !ok {
|
|
||||||
return ifList{}, "", false
|
|
||||||
}
|
|
||||||
l.conditions = append(l.conditions, c)
|
|
||||||
s = remaining
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseCondition(s string) (c Condition, remaining string, ok bool) {
|
|
||||||
tokenType, tokenStr, s := lex(s)
|
|
||||||
if tokenType == notTokenType {
|
|
||||||
c.Not = true
|
|
||||||
tokenType, tokenStr, s = lex(s)
|
|
||||||
}
|
|
||||||
switch tokenType {
|
|
||||||
case strTokenType, angleTokenType:
|
|
||||||
c.Token = tokenStr
|
|
||||||
case squareTokenType:
|
|
||||||
c.ETag = tokenStr
|
|
||||||
default:
|
|
||||||
return Condition{}, "", false
|
|
||||||
}
|
|
||||||
return c, s, true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Single-rune tokens like '(' or ')' have a token type equal to their rune.
|
|
||||||
// All other tokens have a negative token type.
|
|
||||||
const (
|
|
||||||
errTokenType = rune(-1)
|
|
||||||
eofTokenType = rune(-2)
|
|
||||||
strTokenType = rune(-3)
|
|
||||||
notTokenType = rune(-4)
|
|
||||||
angleTokenType = rune(-5)
|
|
||||||
squareTokenType = rune(-6)
|
|
||||||
)
|
|
||||||
|
|
||||||
func lex(s string) (tokenType rune, tokenStr string, remaining string) {
|
|
||||||
// The net/textproto Reader that parses the HTTP header will collapse
|
|
||||||
// Linear White Space that spans multiple "\r\n" lines to a single " ",
|
|
||||||
// so we don't need to look for '\r' or '\n'.
|
|
||||||
for len(s) > 0 && (s[0] == '\t' || s[0] == ' ') {
|
|
||||||
s = s[1:]
|
|
||||||
}
|
|
||||||
if len(s) == 0 {
|
|
||||||
return eofTokenType, "", ""
|
|
||||||
}
|
|
||||||
i := 0
|
|
||||||
loop:
|
|
||||||
for ; i < len(s); i++ {
|
|
||||||
switch s[i] {
|
|
||||||
case '\t', ' ', '(', ')', '<', '>', '[', ']':
|
|
||||||
break loop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if i != 0 {
|
|
||||||
tokenStr, remaining = s[:i], s[i:]
|
|
||||||
if tokenStr == "Not" {
|
|
||||||
return notTokenType, "", remaining
|
|
||||||
}
|
|
||||||
return strTokenType, tokenStr, remaining
|
|
||||||
}
|
|
||||||
|
|
||||||
j := 0
|
|
||||||
switch s[0] {
|
|
||||||
case '<':
|
|
||||||
j, tokenType = strings.IndexByte(s, '>'), angleTokenType
|
|
||||||
case '[':
|
|
||||||
j, tokenType = strings.IndexByte(s, ']'), squareTokenType
|
|
||||||
default:
|
|
||||||
return rune(s[0]), "", s[1:]
|
|
||||||
}
|
|
||||||
if j < 0 {
|
|
||||||
return errTokenType, "", ""
|
|
||||||
}
|
|
||||||
return tokenType, s[1:j], s[j+1:]
|
|
||||||
}
|
|
@ -1,322 +0,0 @@
|
|||||||
// Copyright 2014 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package dav
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestParseIfHeader(t *testing.T) {
|
|
||||||
// The "section x.y.z" test cases come from section x.y.z of the spec at
|
|
||||||
// http://www.webdav.org/specs/rfc4918.html
|
|
||||||
testCases := []struct {
|
|
||||||
desc string
|
|
||||||
input string
|
|
||||||
want ifHeader
|
|
||||||
}{{
|
|
||||||
"bad: empty",
|
|
||||||
``,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"bad: no parens",
|
|
||||||
`foobar`,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"bad: empty list #1",
|
|
||||||
`()`,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"bad: empty list #2",
|
|
||||||
`(a) (b c) () (d)`,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"bad: no list after resource #1",
|
|
||||||
`<foo>`,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"bad: no list after resource #2",
|
|
||||||
`<foo> <bar> (a)`,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"bad: no list after resource #3",
|
|
||||||
`<foo> (a) (b) <bar>`,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"bad: no-tag-list followed by tagged-list",
|
|
||||||
`(a) (b) <foo> (c)`,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"bad: unfinished list",
|
|
||||||
`(a`,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"bad: unfinished ETag",
|
|
||||||
`([b`,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"bad: unfinished Notted list",
|
|
||||||
`(Not a`,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"bad: double Not",
|
|
||||||
`(Not Not a)`,
|
|
||||||
ifHeader{},
|
|
||||||
}, {
|
|
||||||
"good: one list with a Token",
|
|
||||||
`(a)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `a`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"good: one list with an ETag",
|
|
||||||
`([a])`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
conditions: []Condition{{
|
|
||||||
ETag: `a`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"good: one list with three Nots",
|
|
||||||
`(Not a Not b Not [d])`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
conditions: []Condition{{
|
|
||||||
Not: true,
|
|
||||||
Token: `a`,
|
|
||||||
}, {
|
|
||||||
Not: true,
|
|
||||||
Token: `b`,
|
|
||||||
}, {
|
|
||||||
Not: true,
|
|
||||||
ETag: `d`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"good: two lists",
|
|
||||||
`(a) (b)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `a`,
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `b`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"good: two Notted lists",
|
|
||||||
`(Not a) (Not b)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
conditions: []Condition{{
|
|
||||||
Not: true,
|
|
||||||
Token: `a`,
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
conditions: []Condition{{
|
|
||||||
Not: true,
|
|
||||||
Token: `b`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 7.5.1",
|
|
||||||
`<http://www.example.com/users/f/fielding/index.html>
|
|
||||||
(<urn:uuid:f81d4fae-7dec-11d0-a765-00a0c91e6bf6>)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
resourceTag: `http://www.example.com/users/f/fielding/index.html`,
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `urn:uuid:f81d4fae-7dec-11d0-a765-00a0c91e6bf6`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 7.5.2 #1",
|
|
||||||
`(<urn:uuid:150852e2-3847-42d5-8cbe-0f4f296f26cf>)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `urn:uuid:150852e2-3847-42d5-8cbe-0f4f296f26cf`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 7.5.2 #2",
|
|
||||||
`<http://example.com/locked/>
|
|
||||||
(<urn:uuid:150852e2-3847-42d5-8cbe-0f4f296f26cf>)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
resourceTag: `http://example.com/locked/`,
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `urn:uuid:150852e2-3847-42d5-8cbe-0f4f296f26cf`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 7.5.2 #3",
|
|
||||||
`<http://example.com/locked/member>
|
|
||||||
(<urn:uuid:150852e2-3847-42d5-8cbe-0f4f296f26cf>)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
resourceTag: `http://example.com/locked/member`,
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `urn:uuid:150852e2-3847-42d5-8cbe-0f4f296f26cf`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 9.9.6",
|
|
||||||
`(<urn:uuid:fe184f2e-6eec-41d0-c765-01adc56e6bb4>)
|
|
||||||
(<urn:uuid:e454f3f3-acdc-452a-56c7-00a5c91e4b77>)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `urn:uuid:fe184f2e-6eec-41d0-c765-01adc56e6bb4`,
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `urn:uuid:e454f3f3-acdc-452a-56c7-00a5c91e4b77`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 9.10.8",
|
|
||||||
`(<urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4>)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 10.4.6",
|
|
||||||
`(<urn:uuid:181d4fae-7d8c-11d0-a765-00a0c91e6bf2>
|
|
||||||
["I am an ETag"])
|
|
||||||
(["I am another ETag"])`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `urn:uuid:181d4fae-7d8c-11d0-a765-00a0c91e6bf2`,
|
|
||||||
}, {
|
|
||||||
ETag: `"I am an ETag"`,
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
conditions: []Condition{{
|
|
||||||
ETag: `"I am another ETag"`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 10.4.7",
|
|
||||||
`(Not <urn:uuid:181d4fae-7d8c-11d0-a765-00a0c91e6bf2>
|
|
||||||
<urn:uuid:58f202ac-22cf-11d1-b12d-002035b29092>)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
conditions: []Condition{{
|
|
||||||
Not: true,
|
|
||||||
Token: `urn:uuid:181d4fae-7d8c-11d0-a765-00a0c91e6bf2`,
|
|
||||||
}, {
|
|
||||||
Token: `urn:uuid:58f202ac-22cf-11d1-b12d-002035b29092`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 10.4.8",
|
|
||||||
`(<urn:uuid:181d4fae-7d8c-11d0-a765-00a0c91e6bf2>)
|
|
||||||
(Not <DAV:no-lock>)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `urn:uuid:181d4fae-7d8c-11d0-a765-00a0c91e6bf2`,
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
conditions: []Condition{{
|
|
||||||
Not: true,
|
|
||||||
Token: `DAV:no-lock`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 10.4.9",
|
|
||||||
`</resource1>
|
|
||||||
(<urn:uuid:181d4fae-7d8c-11d0-a765-00a0c91e6bf2>
|
|
||||||
[W/"A weak ETag"]) (["strong ETag"])`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
resourceTag: `/resource1`,
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `urn:uuid:181d4fae-7d8c-11d0-a765-00a0c91e6bf2`,
|
|
||||||
}, {
|
|
||||||
ETag: `W/"A weak ETag"`,
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
resourceTag: `/resource1`,
|
|
||||||
conditions: []Condition{{
|
|
||||||
ETag: `"strong ETag"`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 10.4.10",
|
|
||||||
`<http://www.example.com/specs/>
|
|
||||||
(<urn:uuid:181d4fae-7d8c-11d0-a765-00a0c91e6bf2>)`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
resourceTag: `http://www.example.com/specs/`,
|
|
||||||
conditions: []Condition{{
|
|
||||||
Token: `urn:uuid:181d4fae-7d8c-11d0-a765-00a0c91e6bf2`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 10.4.11 #1",
|
|
||||||
`</specs/rfc2518.doc> (["4217"])`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
resourceTag: `/specs/rfc2518.doc`,
|
|
||||||
conditions: []Condition{{
|
|
||||||
ETag: `"4217"`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"section 10.4.11 #2",
|
|
||||||
`</specs/rfc2518.doc> (Not ["4217"])`,
|
|
||||||
ifHeader{
|
|
||||||
lists: []ifList{{
|
|
||||||
resourceTag: `/specs/rfc2518.doc`,
|
|
||||||
conditions: []Condition{{
|
|
||||||
Not: true,
|
|
||||||
ETag: `"4217"`,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}}
|
|
||||||
|
|
||||||
for _, tc := range testCases {
|
|
||||||
got, ok := parseIfHeader(strings.Replace(tc.input, "\n", "", -1))
|
|
||||||
if gotEmpty := reflect.DeepEqual(got, ifHeader{}); gotEmpty == ok {
|
|
||||||
t.Errorf("%s: should be different: empty header == %t, ok == %t", tc.desc, gotEmpty, ok)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(got, tc.want) {
|
|
||||||
t.Errorf("%s:\ngot %v\nwant %v", tc.desc, got, tc.want)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,11 +0,0 @@
|
|||||||
This is a fork of the encoding/xml package at ca1d6c4, the last commit before
|
|
||||||
https://go.googlesource.com/go/+/c0d6d33 "encoding/xml: restore Go 1.4 name
|
|
||||||
space behavior" made late in the lead-up to the Go 1.5 release.
|
|
||||||
|
|
||||||
The list of encoding/xml changes is at
|
|
||||||
https://go.googlesource.com/go/+log/master/src/encoding/xml
|
|
||||||
|
|
||||||
This fork is temporary, and I (nigeltao) expect to revert it after Go 1.6 is
|
|
||||||
released.
|
|
||||||
|
|
||||||
See http://golang.org/issue/11841
|
|
@ -1,56 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package xml
|
|
||||||
|
|
||||||
import "time"
|
|
||||||
|
|
||||||
var atomValue = &Feed{
|
|
||||||
XMLName: Name{"http://www.w3.org/2005/Atom", "feed"},
|
|
||||||
Title: "Example Feed",
|
|
||||||
Link: []Link{{Href: "http://example.org/"}},
|
|
||||||
Updated: ParseTime("2003-12-13T18:30:02Z"),
|
|
||||||
Author: Person{Name: "John Doe"},
|
|
||||||
Id: "urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6",
|
|
||||||
|
|
||||||
Entry: []Entry{
|
|
||||||
{
|
|
||||||
Title: "Atom-Powered Robots Run Amok",
|
|
||||||
Link: []Link{{Href: "http://example.org/2003/12/13/atom03"}},
|
|
||||||
Id: "urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a",
|
|
||||||
Updated: ParseTime("2003-12-13T18:30:02Z"),
|
|
||||||
Summary: NewText("Some text."),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
var atomXml = `` +
|
|
||||||
`<feed xmlns="http://www.w3.org/2005/Atom" updated="2003-12-13T18:30:02Z">` +
|
|
||||||
`<title>Example Feed</title>` +
|
|
||||||
`<id>urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6</id>` +
|
|
||||||
`<link href="http://example.org/"></link>` +
|
|
||||||
`<author><name>John Doe</name><uri></uri><email></email></author>` +
|
|
||||||
`<entry>` +
|
|
||||||
`<title>Atom-Powered Robots Run Amok</title>` +
|
|
||||||
`<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>` +
|
|
||||||
`<link href="http://example.org/2003/12/13/atom03"></link>` +
|
|
||||||
`<updated>2003-12-13T18:30:02Z</updated>` +
|
|
||||||
`<author><name></name><uri></uri><email></email></author>` +
|
|
||||||
`<summary>Some text.</summary>` +
|
|
||||||
`</entry>` +
|
|
||||||
`</feed>`
|
|
||||||
|
|
||||||
func ParseTime(str string) time.Time {
|
|
||||||
t, err := time.Parse(time.RFC3339, str)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
return t
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewText(text string) Text {
|
|
||||||
return Text{
|
|
||||||
Body: text,
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,151 +0,0 @@
|
|||||||
// Copyright 2012 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package xml_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/xml"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
func ExampleMarshalIndent() {
|
|
||||||
type Address struct {
|
|
||||||
City, State string
|
|
||||||
}
|
|
||||||
type Person struct {
|
|
||||||
XMLName xml.Name `xml:"person"`
|
|
||||||
Id int `xml:"id,attr"`
|
|
||||||
FirstName string `xml:"name>first"`
|
|
||||||
LastName string `xml:"name>last"`
|
|
||||||
Age int `xml:"age"`
|
|
||||||
Height float32 `xml:"height,omitempty"`
|
|
||||||
Married bool
|
|
||||||
Address
|
|
||||||
Comment string `xml:",comment"`
|
|
||||||
}
|
|
||||||
|
|
||||||
v := &Person{Id: 13, FirstName: "John", LastName: "Doe", Age: 42}
|
|
||||||
v.Comment = " Need more details. "
|
|
||||||
v.Address = Address{"Hanga Roa", "Easter Island"}
|
|
||||||
|
|
||||||
output, err := xml.MarshalIndent(v, " ", " ")
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("error: %v\n", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
os.Stdout.Write(output)
|
|
||||||
// Output:
|
|
||||||
// <person id="13">
|
|
||||||
// <name>
|
|
||||||
// <first>John</first>
|
|
||||||
// <last>Doe</last>
|
|
||||||
// </name>
|
|
||||||
// <age>42</age>
|
|
||||||
// <Married>false</Married>
|
|
||||||
// <City>Hanga Roa</City>
|
|
||||||
// <State>Easter Island</State>
|
|
||||||
// <!-- Need more details. -->
|
|
||||||
// </person>
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExampleEncoder() {
|
|
||||||
type Address struct {
|
|
||||||
City, State string
|
|
||||||
}
|
|
||||||
type Person struct {
|
|
||||||
XMLName xml.Name `xml:"person"`
|
|
||||||
Id int `xml:"id,attr"`
|
|
||||||
FirstName string `xml:"name>first"`
|
|
||||||
LastName string `xml:"name>last"`
|
|
||||||
Age int `xml:"age"`
|
|
||||||
Height float32 `xml:"height,omitempty"`
|
|
||||||
Married bool
|
|
||||||
Address
|
|
||||||
Comment string `xml:",comment"`
|
|
||||||
}
|
|
||||||
|
|
||||||
v := &Person{Id: 13, FirstName: "John", LastName: "Doe", Age: 42}
|
|
||||||
v.Comment = " Need more details. "
|
|
||||||
v.Address = Address{"Hanga Roa", "Easter Island"}
|
|
||||||
|
|
||||||
enc := xml.NewEncoder(os.Stdout)
|
|
||||||
enc.Indent(" ", " ")
|
|
||||||
if err := enc.Encode(v); err != nil {
|
|
||||||
fmt.Printf("error: %v\n", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Output:
|
|
||||||
// <person id="13">
|
|
||||||
// <name>
|
|
||||||
// <first>John</first>
|
|
||||||
// <last>Doe</last>
|
|
||||||
// </name>
|
|
||||||
// <age>42</age>
|
|
||||||
// <Married>false</Married>
|
|
||||||
// <City>Hanga Roa</City>
|
|
||||||
// <State>Easter Island</State>
|
|
||||||
// <!-- Need more details. -->
|
|
||||||
// </person>
|
|
||||||
}
|
|
||||||
|
|
||||||
// This example demonstrates unmarshaling an XML excerpt into a value with
|
|
||||||
// some preset fields. Note that the Phone field isn't modified and that
|
|
||||||
// the XML <Company> element is ignored. Also, the Groups field is assigned
|
|
||||||
// considering the element path provided in its tag.
|
|
||||||
func ExampleUnmarshal() {
|
|
||||||
type Email struct {
|
|
||||||
Where string `xml:"where,attr"`
|
|
||||||
Addr string
|
|
||||||
}
|
|
||||||
type Address struct {
|
|
||||||
City, State string
|
|
||||||
}
|
|
||||||
type Result struct {
|
|
||||||
XMLName xml.Name `xml:"Person"`
|
|
||||||
Name string `xml:"FullName"`
|
|
||||||
Phone string
|
|
||||||
Email []Email
|
|
||||||
Groups []string `xml:"Group>Value"`
|
|
||||||
Address
|
|
||||||
}
|
|
||||||
v := Result{Name: "none", Phone: "none"}
|
|
||||||
|
|
||||||
data := `
|
|
||||||
<Person>
|
|
||||||
<FullName>Grace R. Emlin</FullName>
|
|
||||||
<Company>Example Inc.</Company>
|
|
||||||
<Email where="home">
|
|
||||||
<Addr>gre@example.com</Addr>
|
|
||||||
</Email>
|
|
||||||
<Email where='work'>
|
|
||||||
<Addr>gre@work.com</Addr>
|
|
||||||
</Email>
|
|
||||||
<Group>
|
|
||||||
<Value>Friends</Value>
|
|
||||||
<Value>Squash</Value>
|
|
||||||
</Group>
|
|
||||||
<City>Hanga Roa</City>
|
|
||||||
<State>Easter Island</State>
|
|
||||||
</Person>
|
|
||||||
`
|
|
||||||
err := xml.Unmarshal([]byte(data), &v)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("error: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
fmt.Printf("XMLName: %#v\n", v.XMLName)
|
|
||||||
fmt.Printf("Name: %q\n", v.Name)
|
|
||||||
fmt.Printf("Phone: %q\n", v.Phone)
|
|
||||||
fmt.Printf("Email: %v\n", v.Email)
|
|
||||||
fmt.Printf("Groups: %v\n", v.Groups)
|
|
||||||
fmt.Printf("Address: %v\n", v.Address)
|
|
||||||
// Output:
|
|
||||||
// XMLName: xml.Name{Space:"", Local:"Person"}
|
|
||||||
// Name: "Grace R. Emlin"
|
|
||||||
// Phone: "none"
|
|
||||||
// Email: [{home gre@example.com} {work gre@work.com}]
|
|
||||||
// Groups: [Friends Squash]
|
|
||||||
// Address: {Hanga Roa Easter Island}
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
@ -1,744 +0,0 @@
|
|||||||
// Copyright 2009 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package xml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"reflect"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Stripped down Atom feed data structures.
|
|
||||||
|
|
||||||
func TestUnmarshalFeed(t *testing.T) {
|
|
||||||
var f Feed
|
|
||||||
if err := Unmarshal([]byte(atomFeedString), &f); err != nil {
|
|
||||||
t.Fatalf("Unmarshal: %s", err)
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(f, atomFeed) {
|
|
||||||
t.Fatalf("have %#v\nwant %#v", f, atomFeed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// hget http://codereview.appspot.com/rss/mine/rsc
|
|
||||||
const atomFeedString = `
|
|
||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="en-us" updated="2009-10-04T01:35:58+00:00"><title>Code Review - My issues</title><link href="http://codereview.appspot.com/" rel="alternate"></link><link href="http://codereview.appspot.com/rss/mine/rsc" rel="self"></link><id>http://codereview.appspot.com/</id><author><name>rietveld<></name></author><entry><title>rietveld: an attempt at pubsubhubbub
|
|
||||||
</title><link href="http://codereview.appspot.com/126085" rel="alternate"></link><updated>2009-10-04T01:35:58+00:00</updated><author><name>email-address-removed</name></author><id>urn:md5:134d9179c41f806be79b3a5f7877d19a</id><summary type="html">
|
|
||||||
An attempt at adding pubsubhubbub support to Rietveld.
|
|
||||||
http://code.google.com/p/pubsubhubbub
|
|
||||||
http://code.google.com/p/rietveld/issues/detail?id=155
|
|
||||||
|
|
||||||
The server side of the protocol is trivial:
|
|
||||||
1. add a &lt;link rel=&quot;hub&quot; href=&quot;hub-server&quot;&gt; tag to all
|
|
||||||
feeds that will be pubsubhubbubbed.
|
|
||||||
2. every time one of those feeds changes, tell the hub
|
|
||||||
with a simple POST request.
|
|
||||||
|
|
||||||
I have tested this by adding debug prints to a local hub
|
|
||||||
server and checking that the server got the right publish
|
|
||||||
requests.
|
|
||||||
|
|
||||||
I can&#39;t quite get the server to work, but I think the bug
|
|
||||||
is not in my code. I think that the server expects to be
|
|
||||||
able to grab the feed and see the feed&#39;s actual URL in
|
|
||||||
the link rel=&quot;self&quot;, but the default value for that drops
|
|
||||||
the :port from the URL, and I cannot for the life of me
|
|
||||||
figure out how to get the Atom generator deep inside
|
|
||||||
django not to do that, or even where it is doing that,
|
|
||||||
or even what code is running to generate the Atom feed.
|
|
||||||
(I thought I knew but I added some assert False statements
|
|
||||||
and it kept running!)
|
|
||||||
|
|
||||||
Ignoring that particular problem, I would appreciate
|
|
||||||
feedback on the right way to get the two values at
|
|
||||||
the top of feeds.py marked NOTE(rsc).
|
|
||||||
|
|
||||||
|
|
||||||
</summary></entry><entry><title>rietveld: correct tab handling
|
|
||||||
</title><link href="http://codereview.appspot.com/124106" rel="alternate"></link><updated>2009-10-03T23:02:17+00:00</updated><author><name>email-address-removed</name></author><id>urn:md5:0a2a4f19bb815101f0ba2904aed7c35a</id><summary type="html">
|
|
||||||
This fixes the buggy tab rendering that can be seen at
|
|
||||||
http://codereview.appspot.com/116075/diff/1/2
|
|
||||||
|
|
||||||
The fundamental problem was that the tab code was
|
|
||||||
not being told what column the text began in, so it
|
|
||||||
didn&#39;t know where to put the tab stops. Another problem
|
|
||||||
was that some of the code assumed that string byte
|
|
||||||
offsets were the same as column offsets, which is only
|
|
||||||
true if there are no tabs.
|
|
||||||
|
|
||||||
In the process of fixing this, I cleaned up the arguments
|
|
||||||
to Fold and ExpandTabs and renamed them Break and
|
|
||||||
_ExpandTabs so that I could be sure that I found all the
|
|
||||||
call sites. I also wanted to verify that ExpandTabs was
|
|
||||||
not being used from outside intra_region_diff.py.
|
|
||||||
|
|
||||||
|
|
||||||
</summary></entry></feed> `
|
|
||||||
|
|
||||||
type Feed struct {
|
|
||||||
XMLName Name `xml:"http://www.w3.org/2005/Atom feed"`
|
|
||||||
Title string `xml:"title"`
|
|
||||||
Id string `xml:"id"`
|
|
||||||
Link []Link `xml:"link"`
|
|
||||||
Updated time.Time `xml:"updated,attr"`
|
|
||||||
Author Person `xml:"author"`
|
|
||||||
Entry []Entry `xml:"entry"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Entry struct {
|
|
||||||
Title string `xml:"title"`
|
|
||||||
Id string `xml:"id"`
|
|
||||||
Link []Link `xml:"link"`
|
|
||||||
Updated time.Time `xml:"updated"`
|
|
||||||
Author Person `xml:"author"`
|
|
||||||
Summary Text `xml:"summary"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Link struct {
|
|
||||||
Rel string `xml:"rel,attr,omitempty"`
|
|
||||||
Href string `xml:"href,attr"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Person struct {
|
|
||||||
Name string `xml:"name"`
|
|
||||||
URI string `xml:"uri"`
|
|
||||||
Email string `xml:"email"`
|
|
||||||
InnerXML string `xml:",innerxml"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Text struct {
|
|
||||||
Type string `xml:"type,attr,omitempty"`
|
|
||||||
Body string `xml:",chardata"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var atomFeed = Feed{
|
|
||||||
XMLName: Name{"http://www.w3.org/2005/Atom", "feed"},
|
|
||||||
Title: "Code Review - My issues",
|
|
||||||
Link: []Link{
|
|
||||||
{Rel: "alternate", Href: "http://codereview.appspot.com/"},
|
|
||||||
{Rel: "self", Href: "http://codereview.appspot.com/rss/mine/rsc"},
|
|
||||||
},
|
|
||||||
Id: "http://codereview.appspot.com/",
|
|
||||||
Updated: ParseTime("2009-10-04T01:35:58+00:00"),
|
|
||||||
Author: Person{
|
|
||||||
Name: "rietveld<>",
|
|
||||||
InnerXML: "<name>rietveld<></name>",
|
|
||||||
},
|
|
||||||
Entry: []Entry{
|
|
||||||
{
|
|
||||||
Title: "rietveld: an attempt at pubsubhubbub\n",
|
|
||||||
Link: []Link{
|
|
||||||
{Rel: "alternate", Href: "http://codereview.appspot.com/126085"},
|
|
||||||
},
|
|
||||||
Updated: ParseTime("2009-10-04T01:35:58+00:00"),
|
|
||||||
Author: Person{
|
|
||||||
Name: "email-address-removed",
|
|
||||||
InnerXML: "<name>email-address-removed</name>",
|
|
||||||
},
|
|
||||||
Id: "urn:md5:134d9179c41f806be79b3a5f7877d19a",
|
|
||||||
Summary: Text{
|
|
||||||
Type: "html",
|
|
||||||
Body: `
|
|
||||||
An attempt at adding pubsubhubbub support to Rietveld.
|
|
||||||
http://code.google.com/p/pubsubhubbub
|
|
||||||
http://code.google.com/p/rietveld/issues/detail?id=155
|
|
||||||
|
|
||||||
The server side of the protocol is trivial:
|
|
||||||
1. add a <link rel="hub" href="hub-server"> tag to all
|
|
||||||
feeds that will be pubsubhubbubbed.
|
|
||||||
2. every time one of those feeds changes, tell the hub
|
|
||||||
with a simple POST request.
|
|
||||||
|
|
||||||
I have tested this by adding debug prints to a local hub
|
|
||||||
server and checking that the server got the right publish
|
|
||||||
requests.
|
|
||||||
|
|
||||||
I can't quite get the server to work, but I think the bug
|
|
||||||
is not in my code. I think that the server expects to be
|
|
||||||
able to grab the feed and see the feed's actual URL in
|
|
||||||
the link rel="self", but the default value for that drops
|
|
||||||
the :port from the URL, and I cannot for the life of me
|
|
||||||
figure out how to get the Atom generator deep inside
|
|
||||||
django not to do that, or even where it is doing that,
|
|
||||||
or even what code is running to generate the Atom feed.
|
|
||||||
(I thought I knew but I added some assert False statements
|
|
||||||
and it kept running!)
|
|
||||||
|
|
||||||
Ignoring that particular problem, I would appreciate
|
|
||||||
feedback on the right way to get the two values at
|
|
||||||
the top of feeds.py marked NOTE(rsc).
|
|
||||||
|
|
||||||
|
|
||||||
`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Title: "rietveld: correct tab handling\n",
|
|
||||||
Link: []Link{
|
|
||||||
{Rel: "alternate", Href: "http://codereview.appspot.com/124106"},
|
|
||||||
},
|
|
||||||
Updated: ParseTime("2009-10-03T23:02:17+00:00"),
|
|
||||||
Author: Person{
|
|
||||||
Name: "email-address-removed",
|
|
||||||
InnerXML: "<name>email-address-removed</name>",
|
|
||||||
},
|
|
||||||
Id: "urn:md5:0a2a4f19bb815101f0ba2904aed7c35a",
|
|
||||||
Summary: Text{
|
|
||||||
Type: "html",
|
|
||||||
Body: `
|
|
||||||
This fixes the buggy tab rendering that can be seen at
|
|
||||||
http://codereview.appspot.com/116075/diff/1/2
|
|
||||||
|
|
||||||
The fundamental problem was that the tab code was
|
|
||||||
not being told what column the text began in, so it
|
|
||||||
didn't know where to put the tab stops. Another problem
|
|
||||||
was that some of the code assumed that string byte
|
|
||||||
offsets were the same as column offsets, which is only
|
|
||||||
true if there are no tabs.
|
|
||||||
|
|
||||||
In the process of fixing this, I cleaned up the arguments
|
|
||||||
to Fold and ExpandTabs and renamed them Break and
|
|
||||||
_ExpandTabs so that I could be sure that I found all the
|
|
||||||
call sites. I also wanted to verify that ExpandTabs was
|
|
||||||
not being used from outside intra_region_diff.py.
|
|
||||||
|
|
||||||
|
|
||||||
`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const pathTestString = `
|
|
||||||
<Result>
|
|
||||||
<Before>1</Before>
|
|
||||||
<Items>
|
|
||||||
<Item1>
|
|
||||||
<Value>A</Value>
|
|
||||||
</Item1>
|
|
||||||
<Item2>
|
|
||||||
<Value>B</Value>
|
|
||||||
</Item2>
|
|
||||||
<Item1>
|
|
||||||
<Value>C</Value>
|
|
||||||
<Value>D</Value>
|
|
||||||
</Item1>
|
|
||||||
<_>
|
|
||||||
<Value>E</Value>
|
|
||||||
</_>
|
|
||||||
</Items>
|
|
||||||
<After>2</After>
|
|
||||||
</Result>
|
|
||||||
`
|
|
||||||
|
|
||||||
type PathTestItem struct {
|
|
||||||
Value string
|
|
||||||
}
|
|
||||||
|
|
||||||
type PathTestA struct {
|
|
||||||
Items []PathTestItem `xml:">Item1"`
|
|
||||||
Before, After string
|
|
||||||
}
|
|
||||||
|
|
||||||
type PathTestB struct {
|
|
||||||
Other []PathTestItem `xml:"Items>Item1"`
|
|
||||||
Before, After string
|
|
||||||
}
|
|
||||||
|
|
||||||
type PathTestC struct {
|
|
||||||
Values1 []string `xml:"Items>Item1>Value"`
|
|
||||||
Values2 []string `xml:"Items>Item2>Value"`
|
|
||||||
Before, After string
|
|
||||||
}
|
|
||||||
|
|
||||||
type PathTestSet struct {
|
|
||||||
Item1 []PathTestItem
|
|
||||||
}
|
|
||||||
|
|
||||||
type PathTestD struct {
|
|
||||||
Other PathTestSet `xml:"Items"`
|
|
||||||
Before, After string
|
|
||||||
}
|
|
||||||
|
|
||||||
type PathTestE struct {
|
|
||||||
Underline string `xml:"Items>_>Value"`
|
|
||||||
Before, After string
|
|
||||||
}
|
|
||||||
|
|
||||||
var pathTests = []interface{}{
|
|
||||||
&PathTestA{Items: []PathTestItem{{"A"}, {"D"}}, Before: "1", After: "2"},
|
|
||||||
&PathTestB{Other: []PathTestItem{{"A"}, {"D"}}, Before: "1", After: "2"},
|
|
||||||
&PathTestC{Values1: []string{"A", "C", "D"}, Values2: []string{"B"}, Before: "1", After: "2"},
|
|
||||||
&PathTestD{Other: PathTestSet{Item1: []PathTestItem{{"A"}, {"D"}}}, Before: "1", After: "2"},
|
|
||||||
&PathTestE{Underline: "E", Before: "1", After: "2"},
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUnmarshalPaths(t *testing.T) {
|
|
||||||
for _, pt := range pathTests {
|
|
||||||
v := reflect.New(reflect.TypeOf(pt).Elem()).Interface()
|
|
||||||
if err := Unmarshal([]byte(pathTestString), v); err != nil {
|
|
||||||
t.Fatalf("Unmarshal: %s", err)
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(v, pt) {
|
|
||||||
t.Fatalf("have %#v\nwant %#v", v, pt)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type BadPathTestA struct {
|
|
||||||
First string `xml:"items>item1"`
|
|
||||||
Other string `xml:"items>item2"`
|
|
||||||
Second string `xml:"items"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type BadPathTestB struct {
|
|
||||||
Other string `xml:"items>item2>value"`
|
|
||||||
First string `xml:"items>item1"`
|
|
||||||
Second string `xml:"items>item1>value"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type BadPathTestC struct {
|
|
||||||
First string
|
|
||||||
Second string `xml:"First"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type BadPathTestD struct {
|
|
||||||
BadPathEmbeddedA
|
|
||||||
BadPathEmbeddedB
|
|
||||||
}
|
|
||||||
|
|
||||||
type BadPathEmbeddedA struct {
|
|
||||||
First string
|
|
||||||
}
|
|
||||||
|
|
||||||
type BadPathEmbeddedB struct {
|
|
||||||
Second string `xml:"First"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var badPathTests = []struct {
|
|
||||||
v, e interface{}
|
|
||||||
}{
|
|
||||||
{&BadPathTestA{}, &TagPathError{reflect.TypeOf(BadPathTestA{}), "First", "items>item1", "Second", "items"}},
|
|
||||||
{&BadPathTestB{}, &TagPathError{reflect.TypeOf(BadPathTestB{}), "First", "items>item1", "Second", "items>item1>value"}},
|
|
||||||
{&BadPathTestC{}, &TagPathError{reflect.TypeOf(BadPathTestC{}), "First", "", "Second", "First"}},
|
|
||||||
{&BadPathTestD{}, &TagPathError{reflect.TypeOf(BadPathTestD{}), "First", "", "Second", "First"}},
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUnmarshalBadPaths(t *testing.T) {
|
|
||||||
for _, tt := range badPathTests {
|
|
||||||
err := Unmarshal([]byte(pathTestString), tt.v)
|
|
||||||
if !reflect.DeepEqual(err, tt.e) {
|
|
||||||
t.Fatalf("Unmarshal with %#v didn't fail properly:\nhave %#v,\nwant %#v", tt.v, err, tt.e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const OK = "OK"
|
|
||||||
const withoutNameTypeData = `
|
|
||||||
<?xml version="1.0" charset="utf-8"?>
|
|
||||||
<Test3 Attr="OK" />`
|
|
||||||
|
|
||||||
type TestThree struct {
|
|
||||||
XMLName Name `xml:"Test3"`
|
|
||||||
Attr string `xml:",attr"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUnmarshalWithoutNameType(t *testing.T) {
|
|
||||||
var x TestThree
|
|
||||||
if err := Unmarshal([]byte(withoutNameTypeData), &x); err != nil {
|
|
||||||
t.Fatalf("Unmarshal: %s", err)
|
|
||||||
}
|
|
||||||
if x.Attr != OK {
|
|
||||||
t.Fatalf("have %v\nwant %v", x.Attr, OK)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUnmarshalAttr(t *testing.T) {
|
|
||||||
type ParamVal struct {
|
|
||||||
Int int `xml:"int,attr"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ParamPtr struct {
|
|
||||||
Int *int `xml:"int,attr"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ParamStringPtr struct {
|
|
||||||
Int *string `xml:"int,attr"`
|
|
||||||
}
|
|
||||||
|
|
||||||
x := []byte(`<Param int="1" />`)
|
|
||||||
|
|
||||||
p1 := &ParamPtr{}
|
|
||||||
if err := Unmarshal(x, p1); err != nil {
|
|
||||||
t.Fatalf("Unmarshal: %s", err)
|
|
||||||
}
|
|
||||||
if p1.Int == nil {
|
|
||||||
t.Fatalf("Unmarshal failed in to *int field")
|
|
||||||
} else if *p1.Int != 1 {
|
|
||||||
t.Fatalf("Unmarshal with %s failed:\nhave %#v,\n want %#v", x, p1.Int, 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
p2 := &ParamVal{}
|
|
||||||
if err := Unmarshal(x, p2); err != nil {
|
|
||||||
t.Fatalf("Unmarshal: %s", err)
|
|
||||||
}
|
|
||||||
if p2.Int != 1 {
|
|
||||||
t.Fatalf("Unmarshal with %s failed:\nhave %#v,\n want %#v", x, p2.Int, 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
p3 := &ParamStringPtr{}
|
|
||||||
if err := Unmarshal(x, p3); err != nil {
|
|
||||||
t.Fatalf("Unmarshal: %s", err)
|
|
||||||
}
|
|
||||||
if p3.Int == nil {
|
|
||||||
t.Fatalf("Unmarshal failed in to *string field")
|
|
||||||
} else if *p3.Int != "1" {
|
|
||||||
t.Fatalf("Unmarshal with %s failed:\nhave %#v,\n want %#v", x, p3.Int, 1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Tables struct {
|
|
||||||
HTable string `xml:"http://www.w3.org/TR/html4/ table"`
|
|
||||||
FTable string `xml:"http://www.w3schools.com/furniture table"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var tables = []struct {
|
|
||||||
xml string
|
|
||||||
tab Tables
|
|
||||||
ns string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
xml: `<Tables>` +
|
|
||||||
`<table xmlns="http://www.w3.org/TR/html4/">hello</table>` +
|
|
||||||
`<table xmlns="http://www.w3schools.com/furniture">world</table>` +
|
|
||||||
`</Tables>`,
|
|
||||||
tab: Tables{"hello", "world"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
xml: `<Tables>` +
|
|
||||||
`<table xmlns="http://www.w3schools.com/furniture">world</table>` +
|
|
||||||
`<table xmlns="http://www.w3.org/TR/html4/">hello</table>` +
|
|
||||||
`</Tables>`,
|
|
||||||
tab: Tables{"hello", "world"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
xml: `<Tables xmlns:f="http://www.w3schools.com/furniture" xmlns:h="http://www.w3.org/TR/html4/">` +
|
|
||||||
`<f:table>world</f:table>` +
|
|
||||||
`<h:table>hello</h:table>` +
|
|
||||||
`</Tables>`,
|
|
||||||
tab: Tables{"hello", "world"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
xml: `<Tables>` +
|
|
||||||
`<table>bogus</table>` +
|
|
||||||
`</Tables>`,
|
|
||||||
tab: Tables{},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
xml: `<Tables>` +
|
|
||||||
`<table>only</table>` +
|
|
||||||
`</Tables>`,
|
|
||||||
tab: Tables{HTable: "only"},
|
|
||||||
ns: "http://www.w3.org/TR/html4/",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
xml: `<Tables>` +
|
|
||||||
`<table>only</table>` +
|
|
||||||
`</Tables>`,
|
|
||||||
tab: Tables{FTable: "only"},
|
|
||||||
ns: "http://www.w3schools.com/furniture",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
xml: `<Tables>` +
|
|
||||||
`<table>only</table>` +
|
|
||||||
`</Tables>`,
|
|
||||||
tab: Tables{},
|
|
||||||
ns: "something else entirely",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUnmarshalNS(t *testing.T) {
|
|
||||||
for i, tt := range tables {
|
|
||||||
var dst Tables
|
|
||||||
var err error
|
|
||||||
if tt.ns != "" {
|
|
||||||
d := NewDecoder(strings.NewReader(tt.xml))
|
|
||||||
d.DefaultSpace = tt.ns
|
|
||||||
err = d.Decode(&dst)
|
|
||||||
} else {
|
|
||||||
err = Unmarshal([]byte(tt.xml), &dst)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("#%d: Unmarshal: %v", i, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
want := tt.tab
|
|
||||||
if dst != want {
|
|
||||||
t.Errorf("#%d: dst=%+v, want %+v", i, dst, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRoundTrip(t *testing.T) {
|
|
||||||
// From issue 7535
|
|
||||||
const s = `<ex:element xmlns:ex="http://example.com/schema"></ex:element>`
|
|
||||||
in := bytes.NewBufferString(s)
|
|
||||||
for i := 0; i < 10; i++ {
|
|
||||||
out := &bytes.Buffer{}
|
|
||||||
d := NewDecoder(in)
|
|
||||||
e := NewEncoder(out)
|
|
||||||
|
|
||||||
for {
|
|
||||||
t, err := d.Token()
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("failed:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
e.EncodeToken(t)
|
|
||||||
}
|
|
||||||
e.Flush()
|
|
||||||
in = out
|
|
||||||
}
|
|
||||||
if got := in.String(); got != s {
|
|
||||||
t.Errorf("have: %q\nwant: %q\n", got, s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMarshalNS(t *testing.T) {
|
|
||||||
dst := Tables{"hello", "world"}
|
|
||||||
data, err := Marshal(&dst)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Marshal: %v", err)
|
|
||||||
}
|
|
||||||
want := `<Tables><table xmlns="http://www.w3.org/TR/html4/">hello</table><table xmlns="http://www.w3schools.com/furniture">world</table></Tables>`
|
|
||||||
str := string(data)
|
|
||||||
if str != want {
|
|
||||||
t.Errorf("have: %q\nwant: %q\n", str, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type TableAttrs struct {
|
|
||||||
TAttr TAttr
|
|
||||||
}
|
|
||||||
|
|
||||||
type TAttr struct {
|
|
||||||
HTable string `xml:"http://www.w3.org/TR/html4/ table,attr"`
|
|
||||||
FTable string `xml:"http://www.w3schools.com/furniture table,attr"`
|
|
||||||
Lang string `xml:"http://www.w3.org/XML/1998/namespace lang,attr,omitempty"`
|
|
||||||
Other1 string `xml:"http://golang.org/xml/ other,attr,omitempty"`
|
|
||||||
Other2 string `xml:"http://golang.org/xmlfoo/ other,attr,omitempty"`
|
|
||||||
Other3 string `xml:"http://golang.org/json/ other,attr,omitempty"`
|
|
||||||
Other4 string `xml:"http://golang.org/2/json/ other,attr,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var tableAttrs = []struct {
|
|
||||||
xml string
|
|
||||||
tab TableAttrs
|
|
||||||
ns string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
xml: `<TableAttrs xmlns:f="http://www.w3schools.com/furniture" xmlns:h="http://www.w3.org/TR/html4/"><TAttr ` +
|
|
||||||
`h:table="hello" f:table="world" ` +
|
|
||||||
`/></TableAttrs>`,
|
|
||||||
tab: TableAttrs{TAttr{HTable: "hello", FTable: "world"}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
xml: `<TableAttrs><TAttr xmlns:f="http://www.w3schools.com/furniture" xmlns:h="http://www.w3.org/TR/html4/" ` +
|
|
||||||
`h:table="hello" f:table="world" ` +
|
|
||||||
`/></TableAttrs>`,
|
|
||||||
tab: TableAttrs{TAttr{HTable: "hello", FTable: "world"}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
xml: `<TableAttrs><TAttr ` +
|
|
||||||
`h:table="hello" f:table="world" xmlns:f="http://www.w3schools.com/furniture" xmlns:h="http://www.w3.org/TR/html4/" ` +
|
|
||||||
`/></TableAttrs>`,
|
|
||||||
tab: TableAttrs{TAttr{HTable: "hello", FTable: "world"}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Default space does not apply to attribute names.
|
|
||||||
xml: `<TableAttrs xmlns="http://www.w3schools.com/furniture" xmlns:h="http://www.w3.org/TR/html4/"><TAttr ` +
|
|
||||||
`h:table="hello" table="world" ` +
|
|
||||||
`/></TableAttrs>`,
|
|
||||||
tab: TableAttrs{TAttr{HTable: "hello", FTable: ""}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Default space does not apply to attribute names.
|
|
||||||
xml: `<TableAttrs xmlns:f="http://www.w3schools.com/furniture"><TAttr xmlns="http://www.w3.org/TR/html4/" ` +
|
|
||||||
`table="hello" f:table="world" ` +
|
|
||||||
`/></TableAttrs>`,
|
|
||||||
tab: TableAttrs{TAttr{HTable: "", FTable: "world"}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
xml: `<TableAttrs><TAttr ` +
|
|
||||||
`table="bogus" ` +
|
|
||||||
`/></TableAttrs>`,
|
|
||||||
tab: TableAttrs{},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Default space does not apply to attribute names.
|
|
||||||
xml: `<TableAttrs xmlns:h="http://www.w3.org/TR/html4/"><TAttr ` +
|
|
||||||
`h:table="hello" table="world" ` +
|
|
||||||
`/></TableAttrs>`,
|
|
||||||
tab: TableAttrs{TAttr{HTable: "hello", FTable: ""}},
|
|
||||||
ns: "http://www.w3schools.com/furniture",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Default space does not apply to attribute names.
|
|
||||||
xml: `<TableAttrs xmlns:f="http://www.w3schools.com/furniture"><TAttr ` +
|
|
||||||
`table="hello" f:table="world" ` +
|
|
||||||
`/></TableAttrs>`,
|
|
||||||
tab: TableAttrs{TAttr{HTable: "", FTable: "world"}},
|
|
||||||
ns: "http://www.w3.org/TR/html4/",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
xml: `<TableAttrs><TAttr ` +
|
|
||||||
`table="bogus" ` +
|
|
||||||
`/></TableAttrs>`,
|
|
||||||
tab: TableAttrs{},
|
|
||||||
ns: "something else entirely",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUnmarshalNSAttr(t *testing.T) {
|
|
||||||
for i, tt := range tableAttrs {
|
|
||||||
var dst TableAttrs
|
|
||||||
var err error
|
|
||||||
if tt.ns != "" {
|
|
||||||
d := NewDecoder(strings.NewReader(tt.xml))
|
|
||||||
d.DefaultSpace = tt.ns
|
|
||||||
err = d.Decode(&dst)
|
|
||||||
} else {
|
|
||||||
err = Unmarshal([]byte(tt.xml), &dst)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("#%d: Unmarshal: %v", i, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
want := tt.tab
|
|
||||||
if dst != want {
|
|
||||||
t.Errorf("#%d: dst=%+v, want %+v", i, dst, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMarshalNSAttr(t *testing.T) {
|
|
||||||
src := TableAttrs{TAttr{"hello", "world", "en_US", "other1", "other2", "other3", "other4"}}
|
|
||||||
data, err := Marshal(&src)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Marshal: %v", err)
|
|
||||||
}
|
|
||||||
want := `<TableAttrs><TAttr xmlns:json_1="http://golang.org/2/json/" xmlns:json="http://golang.org/json/" xmlns:_xmlfoo="http://golang.org/xmlfoo/" xmlns:_xml="http://golang.org/xml/" xmlns:furniture="http://www.w3schools.com/furniture" xmlns:html4="http://www.w3.org/TR/html4/" html4:table="hello" furniture:table="world" xml:lang="en_US" _xml:other="other1" _xmlfoo:other="other2" json:other="other3" json_1:other="other4"></TAttr></TableAttrs>`
|
|
||||||
str := string(data)
|
|
||||||
if str != want {
|
|
||||||
t.Errorf("Marshal:\nhave: %#q\nwant: %#q\n", str, want)
|
|
||||||
}
|
|
||||||
|
|
||||||
var dst TableAttrs
|
|
||||||
if err := Unmarshal(data, &dst); err != nil {
|
|
||||||
t.Errorf("Unmarshal: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if dst != src {
|
|
||||||
t.Errorf("Unmarshal = %q, want %q", dst, src)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type MyCharData struct {
|
|
||||||
body string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *MyCharData) UnmarshalXML(d *Decoder, start StartElement) error {
|
|
||||||
for {
|
|
||||||
t, err := d.Token()
|
|
||||||
if err == io.EOF { // found end of element
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if char, ok := t.(CharData); ok {
|
|
||||||
m.body += string(char)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ Unmarshaler = (*MyCharData)(nil)
|
|
||||||
|
|
||||||
func (m *MyCharData) UnmarshalXMLAttr(attr Attr) error {
|
|
||||||
panic("must not call")
|
|
||||||
}
|
|
||||||
|
|
||||||
type MyAttr struct {
|
|
||||||
attr string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *MyAttr) UnmarshalXMLAttr(attr Attr) error {
|
|
||||||
m.attr = attr.Value
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ UnmarshalerAttr = (*MyAttr)(nil)
|
|
||||||
|
|
||||||
type MyStruct struct {
|
|
||||||
Data *MyCharData
|
|
||||||
Attr *MyAttr `xml:",attr"`
|
|
||||||
|
|
||||||
Data2 MyCharData
|
|
||||||
Attr2 MyAttr `xml:",attr"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUnmarshaler(t *testing.T) {
|
|
||||||
xml := `<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<MyStruct Attr="attr1" Attr2="attr2">
|
|
||||||
<Data>hello <!-- comment -->world</Data>
|
|
||||||
<Data2>howdy <!-- comment -->world</Data2>
|
|
||||||
</MyStruct>
|
|
||||||
`
|
|
||||||
|
|
||||||
var m MyStruct
|
|
||||||
if err := Unmarshal([]byte(xml), &m); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if m.Data == nil || m.Attr == nil || m.Data.body != "hello world" || m.Attr.attr != "attr1" || m.Data2.body != "howdy world" || m.Attr2.attr != "attr2" {
|
|
||||||
t.Errorf("m=%#+v\n", m)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Pea struct {
|
|
||||||
Cotelydon string
|
|
||||||
}
|
|
||||||
|
|
||||||
type Pod struct {
|
|
||||||
Pea interface{} `xml:"Pea"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://golang.org/issue/6836
|
|
||||||
func TestUnmarshalIntoInterface(t *testing.T) {
|
|
||||||
pod := new(Pod)
|
|
||||||
pod.Pea = new(Pea)
|
|
||||||
xml := `<Pod><Pea><Cotelydon>Green stuff</Cotelydon></Pea></Pod>`
|
|
||||||
err := Unmarshal([]byte(xml), pod)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to unmarshal %q: %v", xml, err)
|
|
||||||
}
|
|
||||||
pea, ok := pod.Pea.(*Pea)
|
|
||||||
if !ok {
|
|
||||||
t.Fatalf("unmarshalled into wrong type: have %T want *Pea", pod.Pea)
|
|
||||||
}
|
|
||||||
have, want := pea.Cotelydon, "Green stuff"
|
|
||||||
if have != want {
|
|
||||||
t.Errorf("failed to unmarshal into interface, have %q want %q", have, want)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,752 +0,0 @@
|
|||||||
// Copyright 2009 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package xml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"reflect"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
const testInput = `
|
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
|
||||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
|
||||||
<body xmlns:foo="ns1" xmlns="ns2" xmlns:tag="ns3" ` +
|
|
||||||
"\r\n\t" + ` >
|
|
||||||
<hello lang="en">World <>'" 白鵬翔</hello>
|
|
||||||
<query>&何; &is-it;</query>
|
|
||||||
<goodbye />
|
|
||||||
<outer foo:attr="value" xmlns:tag="ns4">
|
|
||||||
<inner/>
|
|
||||||
</outer>
|
|
||||||
<tag:name>
|
|
||||||
<![CDATA[Some text here.]]>
|
|
||||||
</tag:name>
|
|
||||||
</body><!-- missing final newline -->`
|
|
||||||
|
|
||||||
var testEntity = map[string]string{"何": "What", "is-it": "is it?"}
|
|
||||||
|
|
||||||
var rawTokens = []Token{
|
|
||||||
CharData("\n"),
|
|
||||||
ProcInst{"xml", []byte(`version="1.0" encoding="UTF-8"`)},
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
|
||||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"`),
|
|
||||||
CharData("\n"),
|
|
||||||
StartElement{Name{"", "body"}, []Attr{{Name{"xmlns", "foo"}, "ns1"}, {Name{"", "xmlns"}, "ns2"}, {Name{"xmlns", "tag"}, "ns3"}}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"", "hello"}, []Attr{{Name{"", "lang"}, "en"}}},
|
|
||||||
CharData("World <>'\" 白鵬翔"),
|
|
||||||
EndElement{Name{"", "hello"}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"", "query"}, []Attr{}},
|
|
||||||
CharData("What is it?"),
|
|
||||||
EndElement{Name{"", "query"}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"", "goodbye"}, []Attr{}},
|
|
||||||
EndElement{Name{"", "goodbye"}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"", "outer"}, []Attr{{Name{"foo", "attr"}, "value"}, {Name{"xmlns", "tag"}, "ns4"}}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"", "inner"}, []Attr{}},
|
|
||||||
EndElement{Name{"", "inner"}},
|
|
||||||
CharData("\n "),
|
|
||||||
EndElement{Name{"", "outer"}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"tag", "name"}, []Attr{}},
|
|
||||||
CharData("\n "),
|
|
||||||
CharData("Some text here."),
|
|
||||||
CharData("\n "),
|
|
||||||
EndElement{Name{"tag", "name"}},
|
|
||||||
CharData("\n"),
|
|
||||||
EndElement{Name{"", "body"}},
|
|
||||||
Comment(" missing final newline "),
|
|
||||||
}
|
|
||||||
|
|
||||||
var cookedTokens = []Token{
|
|
||||||
CharData("\n"),
|
|
||||||
ProcInst{"xml", []byte(`version="1.0" encoding="UTF-8"`)},
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
|
||||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"`),
|
|
||||||
CharData("\n"),
|
|
||||||
StartElement{Name{"ns2", "body"}, []Attr{{Name{"xmlns", "foo"}, "ns1"}, {Name{"", "xmlns"}, "ns2"}, {Name{"xmlns", "tag"}, "ns3"}}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"ns2", "hello"}, []Attr{{Name{"", "lang"}, "en"}}},
|
|
||||||
CharData("World <>'\" 白鵬翔"),
|
|
||||||
EndElement{Name{"ns2", "hello"}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"ns2", "query"}, []Attr{}},
|
|
||||||
CharData("What is it?"),
|
|
||||||
EndElement{Name{"ns2", "query"}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"ns2", "goodbye"}, []Attr{}},
|
|
||||||
EndElement{Name{"ns2", "goodbye"}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"ns2", "outer"}, []Attr{{Name{"ns1", "attr"}, "value"}, {Name{"xmlns", "tag"}, "ns4"}}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"ns2", "inner"}, []Attr{}},
|
|
||||||
EndElement{Name{"ns2", "inner"}},
|
|
||||||
CharData("\n "),
|
|
||||||
EndElement{Name{"ns2", "outer"}},
|
|
||||||
CharData("\n "),
|
|
||||||
StartElement{Name{"ns3", "name"}, []Attr{}},
|
|
||||||
CharData("\n "),
|
|
||||||
CharData("Some text here."),
|
|
||||||
CharData("\n "),
|
|
||||||
EndElement{Name{"ns3", "name"}},
|
|
||||||
CharData("\n"),
|
|
||||||
EndElement{Name{"ns2", "body"}},
|
|
||||||
Comment(" missing final newline "),
|
|
||||||
}
|
|
||||||
|
|
||||||
const testInputAltEncoding = `
|
|
||||||
<?xml version="1.0" encoding="x-testing-uppercase"?>
|
|
||||||
<TAG>VALUE</TAG>`
|
|
||||||
|
|
||||||
var rawTokensAltEncoding = []Token{
|
|
||||||
CharData("\n"),
|
|
||||||
ProcInst{"xml", []byte(`version="1.0" encoding="x-testing-uppercase"`)},
|
|
||||||
CharData("\n"),
|
|
||||||
StartElement{Name{"", "tag"}, []Attr{}},
|
|
||||||
CharData("value"),
|
|
||||||
EndElement{Name{"", "tag"}},
|
|
||||||
}
|
|
||||||
|
|
||||||
var xmlInput = []string{
|
|
||||||
// unexpected EOF cases
|
|
||||||
"<",
|
|
||||||
"<t",
|
|
||||||
"<t ",
|
|
||||||
"<t/",
|
|
||||||
"<!",
|
|
||||||
"<!-",
|
|
||||||
"<!--",
|
|
||||||
"<!--c-",
|
|
||||||
"<!--c--",
|
|
||||||
"<!d",
|
|
||||||
"<t></",
|
|
||||||
"<t></t",
|
|
||||||
"<?",
|
|
||||||
"<?p",
|
|
||||||
"<t a",
|
|
||||||
"<t a=",
|
|
||||||
"<t a='",
|
|
||||||
"<t a=''",
|
|
||||||
"<t/><![",
|
|
||||||
"<t/><![C",
|
|
||||||
"<t/><![CDATA[d",
|
|
||||||
"<t/><![CDATA[d]",
|
|
||||||
"<t/><![CDATA[d]]",
|
|
||||||
|
|
||||||
// other Syntax errors
|
|
||||||
"<>",
|
|
||||||
"<t/a",
|
|
||||||
"<0 />",
|
|
||||||
"<?0 >",
|
|
||||||
// "<!0 >", // let the Token() caller handle
|
|
||||||
"</0>",
|
|
||||||
"<t 0=''>",
|
|
||||||
"<t a='&'>",
|
|
||||||
"<t a='<'>",
|
|
||||||
"<t> c;</t>",
|
|
||||||
"<t a>",
|
|
||||||
"<t a=>",
|
|
||||||
"<t a=v>",
|
|
||||||
// "<![CDATA[d]]>", // let the Token() caller handle
|
|
||||||
"<t></e>",
|
|
||||||
"<t></>",
|
|
||||||
"<t></t!",
|
|
||||||
"<t>cdata]]></t>",
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRawToken(t *testing.T) {
|
|
||||||
d := NewDecoder(strings.NewReader(testInput))
|
|
||||||
d.Entity = testEntity
|
|
||||||
testRawToken(t, d, testInput, rawTokens)
|
|
||||||
}
|
|
||||||
|
|
||||||
const nonStrictInput = `
|
|
||||||
<tag>non&entity</tag>
|
|
||||||
<tag>&unknown;entity</tag>
|
|
||||||
<tag>{</tag>
|
|
||||||
<tag>&#zzz;</tag>
|
|
||||||
<tag>&なまえ3;</tag>
|
|
||||||
<tag><-gt;</tag>
|
|
||||||
<tag>&;</tag>
|
|
||||||
<tag>&0a;</tag>
|
|
||||||
`
|
|
||||||
|
|
||||||
var nonStringEntity = map[string]string{"": "oops!", "0a": "oops!"}
|
|
||||||
|
|
||||||
var nonStrictTokens = []Token{
|
|
||||||
CharData("\n"),
|
|
||||||
StartElement{Name{"", "tag"}, []Attr{}},
|
|
||||||
CharData("non&entity"),
|
|
||||||
EndElement{Name{"", "tag"}},
|
|
||||||
CharData("\n"),
|
|
||||||
StartElement{Name{"", "tag"}, []Attr{}},
|
|
||||||
CharData("&unknown;entity"),
|
|
||||||
EndElement{Name{"", "tag"}},
|
|
||||||
CharData("\n"),
|
|
||||||
StartElement{Name{"", "tag"}, []Attr{}},
|
|
||||||
CharData("{"),
|
|
||||||
EndElement{Name{"", "tag"}},
|
|
||||||
CharData("\n"),
|
|
||||||
StartElement{Name{"", "tag"}, []Attr{}},
|
|
||||||
CharData("&#zzz;"),
|
|
||||||
EndElement{Name{"", "tag"}},
|
|
||||||
CharData("\n"),
|
|
||||||
StartElement{Name{"", "tag"}, []Attr{}},
|
|
||||||
CharData("&なまえ3;"),
|
|
||||||
EndElement{Name{"", "tag"}},
|
|
||||||
CharData("\n"),
|
|
||||||
StartElement{Name{"", "tag"}, []Attr{}},
|
|
||||||
CharData("<-gt;"),
|
|
||||||
EndElement{Name{"", "tag"}},
|
|
||||||
CharData("\n"),
|
|
||||||
StartElement{Name{"", "tag"}, []Attr{}},
|
|
||||||
CharData("&;"),
|
|
||||||
EndElement{Name{"", "tag"}},
|
|
||||||
CharData("\n"),
|
|
||||||
StartElement{Name{"", "tag"}, []Attr{}},
|
|
||||||
CharData("&0a;"),
|
|
||||||
EndElement{Name{"", "tag"}},
|
|
||||||
CharData("\n"),
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNonStrictRawToken(t *testing.T) {
|
|
||||||
d := NewDecoder(strings.NewReader(nonStrictInput))
|
|
||||||
d.Strict = false
|
|
||||||
testRawToken(t, d, nonStrictInput, nonStrictTokens)
|
|
||||||
}
|
|
||||||
|
|
||||||
type downCaser struct {
|
|
||||||
t *testing.T
|
|
||||||
r io.ByteReader
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *downCaser) ReadByte() (c byte, err error) {
|
|
||||||
c, err = d.r.ReadByte()
|
|
||||||
if c >= 'A' && c <= 'Z' {
|
|
||||||
c += 'a' - 'A'
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *downCaser) Read(p []byte) (int, error) {
|
|
||||||
d.t.Fatalf("unexpected Read call on downCaser reader")
|
|
||||||
panic("unreachable")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRawTokenAltEncoding(t *testing.T) {
|
|
||||||
d := NewDecoder(strings.NewReader(testInputAltEncoding))
|
|
||||||
d.CharsetReader = func(charset string, input io.Reader) (io.Reader, error) {
|
|
||||||
if charset != "x-testing-uppercase" {
|
|
||||||
t.Fatalf("unexpected charset %q", charset)
|
|
||||||
}
|
|
||||||
return &downCaser{t, input.(io.ByteReader)}, nil
|
|
||||||
}
|
|
||||||
testRawToken(t, d, testInputAltEncoding, rawTokensAltEncoding)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRawTokenAltEncodingNoConverter(t *testing.T) {
|
|
||||||
d := NewDecoder(strings.NewReader(testInputAltEncoding))
|
|
||||||
token, err := d.RawToken()
|
|
||||||
if token == nil {
|
|
||||||
t.Fatalf("expected a token on first RawToken call")
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
token, err = d.RawToken()
|
|
||||||
if token != nil {
|
|
||||||
t.Errorf("expected a nil token; got %#v", token)
|
|
||||||
}
|
|
||||||
if err == nil {
|
|
||||||
t.Fatalf("expected an error on second RawToken call")
|
|
||||||
}
|
|
||||||
const encoding = "x-testing-uppercase"
|
|
||||||
if !strings.Contains(err.Error(), encoding) {
|
|
||||||
t.Errorf("expected error to contain %q; got error: %v",
|
|
||||||
encoding, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func testRawToken(t *testing.T, d *Decoder, raw string, rawTokens []Token) {
|
|
||||||
lastEnd := int64(0)
|
|
||||||
for i, want := range rawTokens {
|
|
||||||
start := d.InputOffset()
|
|
||||||
have, err := d.RawToken()
|
|
||||||
end := d.InputOffset()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("token %d: unexpected error: %s", i, err)
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(have, want) {
|
|
||||||
var shave, swant string
|
|
||||||
if _, ok := have.(CharData); ok {
|
|
||||||
shave = fmt.Sprintf("CharData(%q)", have)
|
|
||||||
} else {
|
|
||||||
shave = fmt.Sprintf("%#v", have)
|
|
||||||
}
|
|
||||||
if _, ok := want.(CharData); ok {
|
|
||||||
swant = fmt.Sprintf("CharData(%q)", want)
|
|
||||||
} else {
|
|
||||||
swant = fmt.Sprintf("%#v", want)
|
|
||||||
}
|
|
||||||
t.Errorf("token %d = %s, want %s", i, shave, swant)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check that InputOffset returned actual token.
|
|
||||||
switch {
|
|
||||||
case start < lastEnd:
|
|
||||||
t.Errorf("token %d: position [%d,%d) for %T is before previous token", i, start, end, have)
|
|
||||||
case start >= end:
|
|
||||||
// Special case: EndElement can be synthesized.
|
|
||||||
if start == end && end == lastEnd {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
t.Errorf("token %d: position [%d,%d) for %T is empty", i, start, end, have)
|
|
||||||
case end > int64(len(raw)):
|
|
||||||
t.Errorf("token %d: position [%d,%d) for %T extends beyond input", i, start, end, have)
|
|
||||||
default:
|
|
||||||
text := raw[start:end]
|
|
||||||
if strings.ContainsAny(text, "<>") && (!strings.HasPrefix(text, "<") || !strings.HasSuffix(text, ">")) {
|
|
||||||
t.Errorf("token %d: misaligned raw token %#q for %T", i, text, have)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
lastEnd = end
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure that directives (specifically !DOCTYPE) include the complete
|
|
||||||
// text of any nested directives, noting that < and > do not change
|
|
||||||
// nesting depth if they are in single or double quotes.
|
|
||||||
|
|
||||||
var nestedDirectivesInput = `
|
|
||||||
<!DOCTYPE [<!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#">]>
|
|
||||||
<!DOCTYPE [<!ENTITY xlt ">">]>
|
|
||||||
<!DOCTYPE [<!ENTITY xlt "<">]>
|
|
||||||
<!DOCTYPE [<!ENTITY xlt '>'>]>
|
|
||||||
<!DOCTYPE [<!ENTITY xlt '<'>]>
|
|
||||||
<!DOCTYPE [<!ENTITY xlt '">'>]>
|
|
||||||
<!DOCTYPE [<!ENTITY xlt "'<">]>
|
|
||||||
`
|
|
||||||
|
|
||||||
var nestedDirectivesTokens = []Token{
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE [<!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#">]`),
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE [<!ENTITY xlt ">">]`),
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE [<!ENTITY xlt "<">]`),
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE [<!ENTITY xlt '>'>]`),
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE [<!ENTITY xlt '<'>]`),
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE [<!ENTITY xlt '">'>]`),
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE [<!ENTITY xlt "'<">]`),
|
|
||||||
CharData("\n"),
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNestedDirectives(t *testing.T) {
|
|
||||||
d := NewDecoder(strings.NewReader(nestedDirectivesInput))
|
|
||||||
|
|
||||||
for i, want := range nestedDirectivesTokens {
|
|
||||||
have, err := d.Token()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("token %d: unexpected error: %s", i, err)
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(have, want) {
|
|
||||||
t.Errorf("token %d = %#v want %#v", i, have, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestToken(t *testing.T) {
|
|
||||||
d := NewDecoder(strings.NewReader(testInput))
|
|
||||||
d.Entity = testEntity
|
|
||||||
|
|
||||||
for i, want := range cookedTokens {
|
|
||||||
have, err := d.Token()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("token %d: unexpected error: %s", i, err)
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(have, want) {
|
|
||||||
t.Errorf("token %d = %#v want %#v", i, have, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSyntax(t *testing.T) {
|
|
||||||
for i := range xmlInput {
|
|
||||||
d := NewDecoder(strings.NewReader(xmlInput[i]))
|
|
||||||
var err error
|
|
||||||
for _, err = d.Token(); err == nil; _, err = d.Token() {
|
|
||||||
}
|
|
||||||
if _, ok := err.(*SyntaxError); !ok {
|
|
||||||
t.Fatalf(`xmlInput "%s": expected SyntaxError not received`, xmlInput[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type allScalars struct {
|
|
||||||
True1 bool
|
|
||||||
True2 bool
|
|
||||||
False1 bool
|
|
||||||
False2 bool
|
|
||||||
Int int
|
|
||||||
Int8 int8
|
|
||||||
Int16 int16
|
|
||||||
Int32 int32
|
|
||||||
Int64 int64
|
|
||||||
Uint int
|
|
||||||
Uint8 uint8
|
|
||||||
Uint16 uint16
|
|
||||||
Uint32 uint32
|
|
||||||
Uint64 uint64
|
|
||||||
Uintptr uintptr
|
|
||||||
Float32 float32
|
|
||||||
Float64 float64
|
|
||||||
String string
|
|
||||||
PtrString *string
|
|
||||||
}
|
|
||||||
|
|
||||||
var all = allScalars{
|
|
||||||
True1: true,
|
|
||||||
True2: true,
|
|
||||||
False1: false,
|
|
||||||
False2: false,
|
|
||||||
Int: 1,
|
|
||||||
Int8: -2,
|
|
||||||
Int16: 3,
|
|
||||||
Int32: -4,
|
|
||||||
Int64: 5,
|
|
||||||
Uint: 6,
|
|
||||||
Uint8: 7,
|
|
||||||
Uint16: 8,
|
|
||||||
Uint32: 9,
|
|
||||||
Uint64: 10,
|
|
||||||
Uintptr: 11,
|
|
||||||
Float32: 13.0,
|
|
||||||
Float64: 14.0,
|
|
||||||
String: "15",
|
|
||||||
PtrString: &sixteen,
|
|
||||||
}
|
|
||||||
|
|
||||||
var sixteen = "16"
|
|
||||||
|
|
||||||
const testScalarsInput = `<allscalars>
|
|
||||||
<True1>true</True1>
|
|
||||||
<True2>1</True2>
|
|
||||||
<False1>false</False1>
|
|
||||||
<False2>0</False2>
|
|
||||||
<Int>1</Int>
|
|
||||||
<Int8>-2</Int8>
|
|
||||||
<Int16>3</Int16>
|
|
||||||
<Int32>-4</Int32>
|
|
||||||
<Int64>5</Int64>
|
|
||||||
<Uint>6</Uint>
|
|
||||||
<Uint8>7</Uint8>
|
|
||||||
<Uint16>8</Uint16>
|
|
||||||
<Uint32>9</Uint32>
|
|
||||||
<Uint64>10</Uint64>
|
|
||||||
<Uintptr>11</Uintptr>
|
|
||||||
<Float>12.0</Float>
|
|
||||||
<Float32>13.0</Float32>
|
|
||||||
<Float64>14.0</Float64>
|
|
||||||
<String>15</String>
|
|
||||||
<PtrString>16</PtrString>
|
|
||||||
</allscalars>`
|
|
||||||
|
|
||||||
func TestAllScalars(t *testing.T) {
|
|
||||||
var a allScalars
|
|
||||||
err := Unmarshal([]byte(testScalarsInput), &a)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(a, all) {
|
|
||||||
t.Errorf("have %+v want %+v", a, all)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type item struct {
|
|
||||||
Field_a string
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIssue569(t *testing.T) {
|
|
||||||
data := `<item><Field_a>abcd</Field_a></item>`
|
|
||||||
var i item
|
|
||||||
err := Unmarshal([]byte(data), &i)
|
|
||||||
|
|
||||||
if err != nil || i.Field_a != "abcd" {
|
|
||||||
t.Fatal("Expecting abcd")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUnquotedAttrs(t *testing.T) {
|
|
||||||
data := "<tag attr=azAZ09:-_\t>"
|
|
||||||
d := NewDecoder(strings.NewReader(data))
|
|
||||||
d.Strict = false
|
|
||||||
token, err := d.Token()
|
|
||||||
if _, ok := err.(*SyntaxError); ok {
|
|
||||||
t.Errorf("Unexpected error: %v", err)
|
|
||||||
}
|
|
||||||
if token.(StartElement).Name.Local != "tag" {
|
|
||||||
t.Errorf("Unexpected tag name: %v", token.(StartElement).Name.Local)
|
|
||||||
}
|
|
||||||
attr := token.(StartElement).Attr[0]
|
|
||||||
if attr.Value != "azAZ09:-_" {
|
|
||||||
t.Errorf("Unexpected attribute value: %v", attr.Value)
|
|
||||||
}
|
|
||||||
if attr.Name.Local != "attr" {
|
|
||||||
t.Errorf("Unexpected attribute name: %v", attr.Name.Local)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestValuelessAttrs(t *testing.T) {
|
|
||||||
tests := [][3]string{
|
|
||||||
{"<p nowrap>", "p", "nowrap"},
|
|
||||||
{"<p nowrap >", "p", "nowrap"},
|
|
||||||
{"<input checked/>", "input", "checked"},
|
|
||||||
{"<input checked />", "input", "checked"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
d := NewDecoder(strings.NewReader(test[0]))
|
|
||||||
d.Strict = false
|
|
||||||
token, err := d.Token()
|
|
||||||
if _, ok := err.(*SyntaxError); ok {
|
|
||||||
t.Errorf("Unexpected error: %v", err)
|
|
||||||
}
|
|
||||||
if token.(StartElement).Name.Local != test[1] {
|
|
||||||
t.Errorf("Unexpected tag name: %v", token.(StartElement).Name.Local)
|
|
||||||
}
|
|
||||||
attr := token.(StartElement).Attr[0]
|
|
||||||
if attr.Value != test[2] {
|
|
||||||
t.Errorf("Unexpected attribute value: %v", attr.Value)
|
|
||||||
}
|
|
||||||
if attr.Name.Local != test[2] {
|
|
||||||
t.Errorf("Unexpected attribute name: %v", attr.Name.Local)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCopyTokenCharData(t *testing.T) {
|
|
||||||
data := []byte("same data")
|
|
||||||
var tok1 Token = CharData(data)
|
|
||||||
tok2 := CopyToken(tok1)
|
|
||||||
if !reflect.DeepEqual(tok1, tok2) {
|
|
||||||
t.Error("CopyToken(CharData) != CharData")
|
|
||||||
}
|
|
||||||
data[1] = 'o'
|
|
||||||
if reflect.DeepEqual(tok1, tok2) {
|
|
||||||
t.Error("CopyToken(CharData) uses same buffer.")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCopyTokenStartElement(t *testing.T) {
|
|
||||||
elt := StartElement{Name{"", "hello"}, []Attr{{Name{"", "lang"}, "en"}}}
|
|
||||||
var tok1 Token = elt
|
|
||||||
tok2 := CopyToken(tok1)
|
|
||||||
if tok1.(StartElement).Attr[0].Value != "en" {
|
|
||||||
t.Error("CopyToken overwrote Attr[0]")
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(tok1, tok2) {
|
|
||||||
t.Error("CopyToken(StartElement) != StartElement")
|
|
||||||
}
|
|
||||||
tok1.(StartElement).Attr[0] = Attr{Name{"", "lang"}, "de"}
|
|
||||||
if reflect.DeepEqual(tok1, tok2) {
|
|
||||||
t.Error("CopyToken(CharData) uses same buffer.")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSyntaxErrorLineNum(t *testing.T) {
|
|
||||||
testInput := "<P>Foo<P>\n\n<P>Bar</>\n"
|
|
||||||
d := NewDecoder(strings.NewReader(testInput))
|
|
||||||
var err error
|
|
||||||
for _, err = d.Token(); err == nil; _, err = d.Token() {
|
|
||||||
}
|
|
||||||
synerr, ok := err.(*SyntaxError)
|
|
||||||
if !ok {
|
|
||||||
t.Error("Expected SyntaxError.")
|
|
||||||
}
|
|
||||||
if synerr.Line != 3 {
|
|
||||||
t.Error("SyntaxError didn't have correct line number.")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTrailingRawToken(t *testing.T) {
|
|
||||||
input := `<FOO></FOO> `
|
|
||||||
d := NewDecoder(strings.NewReader(input))
|
|
||||||
var err error
|
|
||||||
for _, err = d.RawToken(); err == nil; _, err = d.RawToken() {
|
|
||||||
}
|
|
||||||
if err != io.EOF {
|
|
||||||
t.Fatalf("d.RawToken() = _, %v, want _, io.EOF", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTrailingToken(t *testing.T) {
|
|
||||||
input := `<FOO></FOO> `
|
|
||||||
d := NewDecoder(strings.NewReader(input))
|
|
||||||
var err error
|
|
||||||
for _, err = d.Token(); err == nil; _, err = d.Token() {
|
|
||||||
}
|
|
||||||
if err != io.EOF {
|
|
||||||
t.Fatalf("d.Token() = _, %v, want _, io.EOF", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestEntityInsideCDATA(t *testing.T) {
|
|
||||||
input := `<test><![CDATA[ &val=foo ]]></test>`
|
|
||||||
d := NewDecoder(strings.NewReader(input))
|
|
||||||
var err error
|
|
||||||
for _, err = d.Token(); err == nil; _, err = d.Token() {
|
|
||||||
}
|
|
||||||
if err != io.EOF {
|
|
||||||
t.Fatalf("d.Token() = _, %v, want _, io.EOF", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var characterTests = []struct {
|
|
||||||
in string
|
|
||||||
err string
|
|
||||||
}{
|
|
||||||
{"\x12<doc/>", "illegal character code U+0012"},
|
|
||||||
{"<?xml version=\"1.0\"?>\x0b<doc/>", "illegal character code U+000B"},
|
|
||||||
{"\xef\xbf\xbe<doc/>", "illegal character code U+FFFE"},
|
|
||||||
{"<?xml version=\"1.0\"?><doc>\r\n<hiya/>\x07<toots/></doc>", "illegal character code U+0007"},
|
|
||||||
{"<?xml version=\"1.0\"?><doc \x12='value'>what's up</doc>", "expected attribute name in element"},
|
|
||||||
{"<doc>&abc\x01;</doc>", "invalid character entity &abc (no semicolon)"},
|
|
||||||
{"<doc>&\x01;</doc>", "invalid character entity & (no semicolon)"},
|
|
||||||
{"<doc>&\xef\xbf\xbe;</doc>", "invalid character entity &\uFFFE;"},
|
|
||||||
{"<doc>&hello;</doc>", "invalid character entity &hello;"},
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDisallowedCharacters(t *testing.T) {
|
|
||||||
|
|
||||||
for i, tt := range characterTests {
|
|
||||||
d := NewDecoder(strings.NewReader(tt.in))
|
|
||||||
var err error
|
|
||||||
|
|
||||||
for err == nil {
|
|
||||||
_, err = d.Token()
|
|
||||||
}
|
|
||||||
synerr, ok := err.(*SyntaxError)
|
|
||||||
if !ok {
|
|
||||||
t.Fatalf("input %d d.Token() = _, %v, want _, *SyntaxError", i, err)
|
|
||||||
}
|
|
||||||
if synerr.Msg != tt.err {
|
|
||||||
t.Fatalf("input %d synerr.Msg wrong: want %q, got %q", i, tt.err, synerr.Msg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type procInstEncodingTest struct {
|
|
||||||
expect, got string
|
|
||||||
}
|
|
||||||
|
|
||||||
var procInstTests = []struct {
|
|
||||||
input string
|
|
||||||
expect [2]string
|
|
||||||
}{
|
|
||||||
{`version="1.0" encoding="utf-8"`, [2]string{"1.0", "utf-8"}},
|
|
||||||
{`version="1.0" encoding='utf-8'`, [2]string{"1.0", "utf-8"}},
|
|
||||||
{`version="1.0" encoding='utf-8' `, [2]string{"1.0", "utf-8"}},
|
|
||||||
{`version="1.0" encoding=utf-8`, [2]string{"1.0", ""}},
|
|
||||||
{`encoding="FOO" `, [2]string{"", "FOO"}},
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestProcInstEncoding(t *testing.T) {
|
|
||||||
for _, test := range procInstTests {
|
|
||||||
if got := procInst("version", test.input); got != test.expect[0] {
|
|
||||||
t.Errorf("procInst(version, %q) = %q; want %q", test.input, got, test.expect[0])
|
|
||||||
}
|
|
||||||
if got := procInst("encoding", test.input); got != test.expect[1] {
|
|
||||||
t.Errorf("procInst(encoding, %q) = %q; want %q", test.input, got, test.expect[1])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure that directives with comments include the complete
|
|
||||||
// text of any nested directives.
|
|
||||||
|
|
||||||
var directivesWithCommentsInput = `
|
|
||||||
<!DOCTYPE [<!-- a comment --><!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#">]>
|
|
||||||
<!DOCTYPE [<!ENTITY go "Golang"><!-- a comment-->]>
|
|
||||||
<!DOCTYPE <!-> <!> <!----> <!-->--> <!--->--> [<!ENTITY go "Golang"><!-- a comment-->]>
|
|
||||||
`
|
|
||||||
|
|
||||||
var directivesWithCommentsTokens = []Token{
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE [<!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#">]`),
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE [<!ENTITY go "Golang">]`),
|
|
||||||
CharData("\n"),
|
|
||||||
Directive(`DOCTYPE <!-> <!> [<!ENTITY go "Golang">]`),
|
|
||||||
CharData("\n"),
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDirectivesWithComments(t *testing.T) {
|
|
||||||
d := NewDecoder(strings.NewReader(directivesWithCommentsInput))
|
|
||||||
|
|
||||||
for i, want := range directivesWithCommentsTokens {
|
|
||||||
have, err := d.Token()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("token %d: unexpected error: %s", i, err)
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(have, want) {
|
|
||||||
t.Errorf("token %d = %#v want %#v", i, have, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Writer whose Write method always returns an error.
|
|
||||||
type errWriter struct{}
|
|
||||||
|
|
||||||
func (errWriter) Write(p []byte) (n int, err error) { return 0, fmt.Errorf("unwritable") }
|
|
||||||
|
|
||||||
func TestEscapeTextIOErrors(t *testing.T) {
|
|
||||||
expectErr := "unwritable"
|
|
||||||
err := EscapeText(errWriter{}, []byte{'A'})
|
|
||||||
|
|
||||||
if err == nil || err.Error() != expectErr {
|
|
||||||
t.Errorf("have %v, want %v", err, expectErr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestEscapeTextInvalidChar(t *testing.T) {
|
|
||||||
input := []byte("A \x00 terminated string.")
|
|
||||||
expected := "A \uFFFD terminated string."
|
|
||||||
|
|
||||||
buff := new(bytes.Buffer)
|
|
||||||
if err := EscapeText(buff, input); err != nil {
|
|
||||||
t.Fatalf("have %v, want nil", err)
|
|
||||||
}
|
|
||||||
text := buff.String()
|
|
||||||
|
|
||||||
if text != expected {
|
|
||||||
t.Errorf("have %v, want %v", text, expected)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIssue5880(t *testing.T) {
|
|
||||||
type T []byte
|
|
||||||
data, err := Marshal(T{192, 168, 0, 1})
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("Marshal error: %v", err)
|
|
||||||
}
|
|
||||||
if !utf8.Valid(data) {
|
|
||||||
t.Errorf("Marshal generated invalid UTF-8: %x", data)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,94 +0,0 @@
|
|||||||
// Copyright 2015 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build ignore
|
|
||||||
|
|
||||||
/*
|
|
||||||
This program is a server for the WebDAV 'litmus' compliance test at
|
|
||||||
http://www.webdav.org/neon/litmus/
|
|
||||||
To run the test:
|
|
||||||
|
|
||||||
go run litmus_test_server.go
|
|
||||||
|
|
||||||
and separately, from the downloaded litmus-xxx directory:
|
|
||||||
|
|
||||||
make URL=http://localhost:9999/ check
|
|
||||||
*/
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"flag"
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
|
|
||||||
"golang.org/x/net/webdav"
|
|
||||||
)
|
|
||||||
|
|
||||||
var port = flag.Int("port", 9999, "server port")
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
flag.Parse()
|
|
||||||
log.SetFlags(0)
|
|
||||||
h := &webdav.Handler{
|
|
||||||
FileSystem: webdav.NewMemFS(),
|
|
||||||
LockSystem: webdav.NewMemLS(),
|
|
||||||
Logger: func(r *http.Request, err error) {
|
|
||||||
litmus := r.Header.Get("X-Litmus")
|
|
||||||
if len(litmus) > 19 {
|
|
||||||
litmus = litmus[:16] + "..."
|
|
||||||
}
|
|
||||||
|
|
||||||
switch r.Method {
|
|
||||||
case "COPY", "MOVE":
|
|
||||||
dst := ""
|
|
||||||
if u, err := url.Parse(r.Header.Get("Destination")); err == nil {
|
|
||||||
dst = u.Path
|
|
||||||
}
|
|
||||||
o := r.Header.Get("Overwrite")
|
|
||||||
log.Printf("%-20s%-10s%-30s%-30so=%-2s%v", litmus, r.Method, r.URL.Path, dst, o, err)
|
|
||||||
default:
|
|
||||||
log.Printf("%-20s%-10s%-30s%v", litmus, r.Method, r.URL.Path, err)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// The next line would normally be:
|
|
||||||
// http.Handle("/", h)
|
|
||||||
// but we wrap that HTTP handler h to cater for a special case.
|
|
||||||
//
|
|
||||||
// The propfind_invalid2 litmus test case expects an empty namespace prefix
|
|
||||||
// declaration to be an error. The FAQ in the webdav litmus test says:
|
|
||||||
//
|
|
||||||
// "What does the "propfind_invalid2" test check for?...
|
|
||||||
//
|
|
||||||
// If a request was sent with an XML body which included an empty namespace
|
|
||||||
// prefix declaration (xmlns:ns1=""), then the server must reject that with
|
|
||||||
// a "400 Bad Request" response, as it is invalid according to the XML
|
|
||||||
// Namespace specification."
|
|
||||||
//
|
|
||||||
// On the other hand, the Go standard library's encoding/xml package
|
|
||||||
// accepts an empty xmlns namespace, as per the discussion at
|
|
||||||
// https://github.com/golang/go/issues/8068
|
|
||||||
//
|
|
||||||
// Empty namespaces seem disallowed in the second (2006) edition of the XML
|
|
||||||
// standard, but allowed in a later edition. The grammar differs between
|
|
||||||
// http://www.w3.org/TR/2006/REC-xml-names-20060816/#ns-decl and
|
|
||||||
// http://www.w3.org/TR/REC-xml-names/#dt-prefix
|
|
||||||
//
|
|
||||||
// Thus, we assume that the propfind_invalid2 test is obsolete, and
|
|
||||||
// hard-code the 400 Bad Request response that the test expects.
|
|
||||||
http.Handle("/", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
||||||
if r.Header.Get("X-Litmus") == "props: 3 (propfind_invalid2)" {
|
|
||||||
http.Error(w, "400 Bad Request", http.StatusBadRequest)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
h.ServeHTTP(w, r)
|
|
||||||
}))
|
|
||||||
|
|
||||||
addr := fmt.Sprintf(":%d", *port)
|
|
||||||
log.Printf("Serving %v", addr)
|
|
||||||
log.Fatal(http.ListenAndServe(addr, nil))
|
|
||||||
}
|
|
@ -415,31 +415,3 @@ func (b *byExpiry) Pop() interface{} {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const infiniteTimeout = -1
|
const infiniteTimeout = -1
|
||||||
|
|
||||||
// parseTimeout parses the Timeout HTTP header, as per section 10.7. If s is
|
|
||||||
// empty, an infiniteTimeout is returned.
|
|
||||||
func parseTimeout(s string) (time.Duration, error) {
|
|
||||||
if s == "" {
|
|
||||||
return infiniteTimeout, nil
|
|
||||||
}
|
|
||||||
if i := strings.IndexByte(s, ','); i >= 0 {
|
|
||||||
s = s[:i]
|
|
||||||
}
|
|
||||||
s = strings.TrimSpace(s)
|
|
||||||
if s == "Infinite" {
|
|
||||||
return infiniteTimeout, nil
|
|
||||||
}
|
|
||||||
const pre = "Second-"
|
|
||||||
if !strings.HasPrefix(s, pre) {
|
|
||||||
return 0, errInvalidTimeout
|
|
||||||
}
|
|
||||||
s = s[len(pre):]
|
|
||||||
if s == "" || s[0] < '0' || '9' < s[0] {
|
|
||||||
return 0, errInvalidTimeout
|
|
||||||
}
|
|
||||||
n, err := strconv.ParseInt(s, 10, 64)
|
|
||||||
if err != nil || 1<<32-1 < n {
|
|
||||||
return 0, errInvalidTimeout
|
|
||||||
}
|
|
||||||
return time.Duration(n) * time.Second, nil
|
|
||||||
}
|
|
||||||
|
@ -1,731 +0,0 @@
|
|||||||
// Copyright 2014 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package dav
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"math/rand"
|
|
||||||
"path"
|
|
||||||
"reflect"
|
|
||||||
"sort"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestWalkToRoot(t *testing.T) {
|
|
||||||
testCases := []struct {
|
|
||||||
name string
|
|
||||||
want []string
|
|
||||||
}{{
|
|
||||||
"/a/b/c/d",
|
|
||||||
[]string{
|
|
||||||
"/a/b/c/d",
|
|
||||||
"/a/b/c",
|
|
||||||
"/a/b",
|
|
||||||
"/a",
|
|
||||||
"/",
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"/a",
|
|
||||||
[]string{
|
|
||||||
"/a",
|
|
||||||
"/",
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
"/",
|
|
||||||
[]string{
|
|
||||||
"/",
|
|
||||||
},
|
|
||||||
}}
|
|
||||||
|
|
||||||
for _, tc := range testCases {
|
|
||||||
var got []string
|
|
||||||
if !walkToRoot(tc.name, func(name0 string, first bool) bool {
|
|
||||||
if first != (len(got) == 0) {
|
|
||||||
t.Errorf("name=%q: first=%t but len(got)==%d", tc.name, first, len(got))
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
got = append(got, name0)
|
|
||||||
return true
|
|
||||||
}) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(got, tc.want) {
|
|
||||||
t.Errorf("name=%q:\ngot %q\nwant %q", tc.name, got, tc.want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var lockTestDurations = []time.Duration{
|
|
||||||
infiniteTimeout, // infiniteTimeout means to never expire.
|
|
||||||
0, // A zero duration means to expire immediately.
|
|
||||||
100 * time.Hour, // A very large duration will not expire in these tests.
|
|
||||||
}
|
|
||||||
|
|
||||||
// lockTestNames are the names of a set of mutually compatible locks. For each
|
|
||||||
// name fragment:
|
|
||||||
// - _ means no explicit lock.
|
|
||||||
// - i means an infinite-depth lock,
|
|
||||||
// - z means a zero-depth lock,
|
|
||||||
var lockTestNames = []string{
|
|
||||||
"/_/_/_/_/z",
|
|
||||||
"/_/_/i",
|
|
||||||
"/_/z",
|
|
||||||
"/_/z/i",
|
|
||||||
"/_/z/z",
|
|
||||||
"/_/z/_/i",
|
|
||||||
"/_/z/_/z",
|
|
||||||
"/i",
|
|
||||||
"/z",
|
|
||||||
"/z/_/i",
|
|
||||||
"/z/_/z",
|
|
||||||
}
|
|
||||||
|
|
||||||
func lockTestZeroDepth(name string) bool {
|
|
||||||
switch name[len(name)-1] {
|
|
||||||
case 'i':
|
|
||||||
return false
|
|
||||||
case 'z':
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
panic(fmt.Sprintf("lock name %q did not end with 'i' or 'z'", name))
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMemLSCanCreate(t *testing.T) {
|
|
||||||
now := time.Unix(0, 0)
|
|
||||||
m := NewMemLS().(*memLS)
|
|
||||||
|
|
||||||
for _, name := range lockTestNames {
|
|
||||||
_, err := m.Create(now, LockDetails{
|
|
||||||
Root: name,
|
|
||||||
Duration: infiniteTimeout,
|
|
||||||
ZeroDepth: lockTestZeroDepth(name),
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("creating lock for %q: %v", name, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
wantCanCreate := func(name string, zeroDepth bool) bool {
|
|
||||||
for _, n := range lockTestNames {
|
|
||||||
switch {
|
|
||||||
case n == name:
|
|
||||||
// An existing lock has the same name as the proposed lock.
|
|
||||||
return false
|
|
||||||
case strings.HasPrefix(n, name):
|
|
||||||
// An existing lock would be a child of the proposed lock,
|
|
||||||
// which conflicts if the proposed lock has infinite depth.
|
|
||||||
if !zeroDepth {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
case strings.HasPrefix(name, n):
|
|
||||||
// An existing lock would be an ancestor of the proposed lock,
|
|
||||||
// which conflicts if the ancestor has infinite depth.
|
|
||||||
if n[len(n)-1] == 'i' {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
var check func(int, string)
|
|
||||||
check = func(recursion int, name string) {
|
|
||||||
for _, zeroDepth := range []bool{false, true} {
|
|
||||||
got := m.canCreate(name, zeroDepth)
|
|
||||||
want := wantCanCreate(name, zeroDepth)
|
|
||||||
if got != want {
|
|
||||||
t.Errorf("canCreate name=%q zeroDepth=%t: got %t, want %t", name, zeroDepth, got, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if recursion == 6 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if name != "/" {
|
|
||||||
name += "/"
|
|
||||||
}
|
|
||||||
for _, c := range "_iz" {
|
|
||||||
check(recursion+1, name+string(c))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
check(0, "/")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMemLSLookup(t *testing.T) {
|
|
||||||
now := time.Unix(0, 0)
|
|
||||||
m := NewMemLS().(*memLS)
|
|
||||||
|
|
||||||
badToken := m.nextToken()
|
|
||||||
t.Logf("badToken=%q", badToken)
|
|
||||||
|
|
||||||
for _, name := range lockTestNames {
|
|
||||||
token, err := m.Create(now, LockDetails{
|
|
||||||
Root: name,
|
|
||||||
Duration: infiniteTimeout,
|
|
||||||
ZeroDepth: lockTestZeroDepth(name),
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("creating lock for %q: %v", name, err)
|
|
||||||
}
|
|
||||||
t.Logf("%-15q -> node=%p token=%q", name, m.byName[name], token)
|
|
||||||
}
|
|
||||||
|
|
||||||
baseNames := append([]string{"/a", "/b/c"}, lockTestNames...)
|
|
||||||
for _, baseName := range baseNames {
|
|
||||||
for _, suffix := range []string{"", "/0", "/1/2/3"} {
|
|
||||||
name := baseName + suffix
|
|
||||||
|
|
||||||
goodToken := ""
|
|
||||||
base := m.byName[baseName]
|
|
||||||
if base != nil && (suffix == "" || !lockTestZeroDepth(baseName)) {
|
|
||||||
goodToken = base.token
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, token := range []string{badToken, goodToken} {
|
|
||||||
if token == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
got := m.lookup(name, Condition{Token: token})
|
|
||||||
want := base
|
|
||||||
if token == badToken {
|
|
||||||
want = nil
|
|
||||||
}
|
|
||||||
if got != want {
|
|
||||||
t.Errorf("name=%-20qtoken=%q (bad=%t): got %p, want %p",
|
|
||||||
name, token, token == badToken, got, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMemLSConfirm(t *testing.T) {
|
|
||||||
now := time.Unix(0, 0)
|
|
||||||
m := NewMemLS().(*memLS)
|
|
||||||
alice, err := m.Create(now, LockDetails{
|
|
||||||
Root: "/alice",
|
|
||||||
Duration: infiniteTimeout,
|
|
||||||
ZeroDepth: false,
|
|
||||||
})
|
|
||||||
tweedle, err := m.Create(now, LockDetails{
|
|
||||||
Root: "/tweedle",
|
|
||||||
Duration: infiniteTimeout,
|
|
||||||
ZeroDepth: false,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Create: %v", err)
|
|
||||||
}
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("Create: inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test a mismatch between name and condition.
|
|
||||||
_, err = m.Confirm(now, "/tweedle/dee", "", Condition{Token: alice})
|
|
||||||
if err != ErrConfirmationFailed {
|
|
||||||
t.Fatalf("Confirm (mismatch): got %v, want ErrConfirmationFailed", err)
|
|
||||||
}
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("Confirm (mismatch): inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test two names (that fall under the same lock) in the one Confirm call.
|
|
||||||
release, err := m.Confirm(now, "/tweedle/dee", "/tweedle/dum", Condition{Token: tweedle})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Confirm (twins): %v", err)
|
|
||||||
}
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("Confirm (twins): inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
release()
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("release (twins): inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test the same two names in overlapping Confirm / release calls.
|
|
||||||
releaseDee, err := m.Confirm(now, "/tweedle/dee", "", Condition{Token: tweedle})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Confirm (sequence #0): %v", err)
|
|
||||||
}
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("Confirm (sequence #0): inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = m.Confirm(now, "/tweedle/dum", "", Condition{Token: tweedle})
|
|
||||||
if err != ErrConfirmationFailed {
|
|
||||||
t.Fatalf("Confirm (sequence #1): got %v, want ErrConfirmationFailed", err)
|
|
||||||
}
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("Confirm (sequence #1): inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
releaseDee()
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("release (sequence #2): inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
releaseDum, err := m.Confirm(now, "/tweedle/dum", "", Condition{Token: tweedle})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Confirm (sequence #3): %v", err)
|
|
||||||
}
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("Confirm (sequence #3): inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test that you can't unlock a held lock.
|
|
||||||
err = m.Unlock(now, tweedle)
|
|
||||||
if err != ErrLocked {
|
|
||||||
t.Fatalf("Unlock (sequence #4): got %v, want ErrLocked", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
releaseDum()
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("release (sequence #5): inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = m.Unlock(now, tweedle)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Unlock (sequence #6): %v", err)
|
|
||||||
}
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("Unlock (sequence #6): inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMemLSNonCanonicalRoot(t *testing.T) {
|
|
||||||
now := time.Unix(0, 0)
|
|
||||||
m := NewMemLS().(*memLS)
|
|
||||||
token, err := m.Create(now, LockDetails{
|
|
||||||
Root: "/foo/./bar//",
|
|
||||||
Duration: 1 * time.Second,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Create: %v", err)
|
|
||||||
}
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("Create: inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
if err := m.Unlock(now, token); err != nil {
|
|
||||||
t.Fatalf("Unlock: %v", err)
|
|
||||||
}
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("Unlock: inconsistent state: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMemLSExpiry(t *testing.T) {
|
|
||||||
m := NewMemLS().(*memLS)
|
|
||||||
testCases := []string{
|
|
||||||
"setNow 0",
|
|
||||||
"create /a.5",
|
|
||||||
"want /a.5",
|
|
||||||
"create /c.6",
|
|
||||||
"want /a.5 /c.6",
|
|
||||||
"create /a/b.7",
|
|
||||||
"want /a.5 /a/b.7 /c.6",
|
|
||||||
"setNow 4",
|
|
||||||
"want /a.5 /a/b.7 /c.6",
|
|
||||||
"setNow 5",
|
|
||||||
"want /a/b.7 /c.6",
|
|
||||||
"setNow 6",
|
|
||||||
"want /a/b.7",
|
|
||||||
"setNow 7",
|
|
||||||
"want ",
|
|
||||||
"setNow 8",
|
|
||||||
"want ",
|
|
||||||
"create /a.12",
|
|
||||||
"create /b.13",
|
|
||||||
"create /c.15",
|
|
||||||
"create /a/d.16",
|
|
||||||
"want /a.12 /a/d.16 /b.13 /c.15",
|
|
||||||
"refresh /a.14",
|
|
||||||
"want /a.14 /a/d.16 /b.13 /c.15",
|
|
||||||
"setNow 12",
|
|
||||||
"want /a.14 /a/d.16 /b.13 /c.15",
|
|
||||||
"setNow 13",
|
|
||||||
"want /a.14 /a/d.16 /c.15",
|
|
||||||
"setNow 14",
|
|
||||||
"want /a/d.16 /c.15",
|
|
||||||
"refresh /a/d.20",
|
|
||||||
"refresh /c.20",
|
|
||||||
"want /a/d.20 /c.20",
|
|
||||||
"setNow 20",
|
|
||||||
"want ",
|
|
||||||
}
|
|
||||||
|
|
||||||
tokens := map[string]string{}
|
|
||||||
zTime := time.Unix(0, 0)
|
|
||||||
now := zTime
|
|
||||||
for i, tc := range testCases {
|
|
||||||
j := strings.IndexByte(tc, ' ')
|
|
||||||
if j < 0 {
|
|
||||||
t.Fatalf("test case #%d %q: invalid command", i, tc)
|
|
||||||
}
|
|
||||||
op, arg := tc[:j], tc[j+1:]
|
|
||||||
switch op {
|
|
||||||
default:
|
|
||||||
t.Fatalf("test case #%d %q: invalid operation %q", i, tc, op)
|
|
||||||
|
|
||||||
case "create", "refresh":
|
|
||||||
parts := strings.Split(arg, ".")
|
|
||||||
if len(parts) != 2 {
|
|
||||||
t.Fatalf("test case #%d %q: invalid create", i, tc)
|
|
||||||
}
|
|
||||||
root := parts[0]
|
|
||||||
d, err := strconv.Atoi(parts[1])
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("test case #%d %q: invalid duration", i, tc)
|
|
||||||
}
|
|
||||||
dur := time.Unix(0, 0).Add(time.Duration(d) * time.Second).Sub(now)
|
|
||||||
|
|
||||||
switch op {
|
|
||||||
case "create":
|
|
||||||
token, err := m.Create(now, LockDetails{
|
|
||||||
Root: root,
|
|
||||||
Duration: dur,
|
|
||||||
ZeroDepth: true,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("test case #%d %q: Create: %v", i, tc, err)
|
|
||||||
}
|
|
||||||
tokens[root] = token
|
|
||||||
|
|
||||||
case "refresh":
|
|
||||||
token := tokens[root]
|
|
||||||
if token == "" {
|
|
||||||
t.Fatalf("test case #%d %q: no token for %q", i, tc, root)
|
|
||||||
}
|
|
||||||
got, err := m.Refresh(now, token, dur)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("test case #%d %q: Refresh: %v", i, tc, err)
|
|
||||||
}
|
|
||||||
want := LockDetails{
|
|
||||||
Root: root,
|
|
||||||
Duration: dur,
|
|
||||||
ZeroDepth: true,
|
|
||||||
}
|
|
||||||
if got != want {
|
|
||||||
t.Fatalf("test case #%d %q:\ngot %v\nwant %v", i, tc, got, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case "setNow":
|
|
||||||
d, err := strconv.Atoi(arg)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("test case #%d %q: invalid duration", i, tc)
|
|
||||||
}
|
|
||||||
now = time.Unix(0, 0).Add(time.Duration(d) * time.Second)
|
|
||||||
|
|
||||||
case "want":
|
|
||||||
m.mu.Lock()
|
|
||||||
m.collectExpiredNodes(now)
|
|
||||||
got := make([]string, 0, len(m.byToken))
|
|
||||||
for _, n := range m.byToken {
|
|
||||||
got = append(got, fmt.Sprintf("%s.%d",
|
|
||||||
n.details.Root, n.expiry.Sub(zTime)/time.Second))
|
|
||||||
}
|
|
||||||
m.mu.Unlock()
|
|
||||||
sort.Strings(got)
|
|
||||||
want := []string{}
|
|
||||||
if arg != "" {
|
|
||||||
want = strings.Split(arg, " ")
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(got, want) {
|
|
||||||
t.Fatalf("test case #%d %q:\ngot %q\nwant %q", i, tc, got, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("test case #%d %q: inconsistent state: %v", i, tc, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMemLS(t *testing.T) {
|
|
||||||
now := time.Unix(0, 0)
|
|
||||||
m := NewMemLS().(*memLS)
|
|
||||||
rng := rand.New(rand.NewSource(0))
|
|
||||||
tokens := map[string]string{}
|
|
||||||
nConfirm, nCreate, nRefresh, nUnlock := 0, 0, 0, 0
|
|
||||||
const N = 2000
|
|
||||||
|
|
||||||
for i := 0; i < N; i++ {
|
|
||||||
name := lockTestNames[rng.Intn(len(lockTestNames))]
|
|
||||||
duration := lockTestDurations[rng.Intn(len(lockTestDurations))]
|
|
||||||
confirmed, unlocked := false, false
|
|
||||||
|
|
||||||
// If the name was already locked, we randomly confirm/release, refresh
|
|
||||||
// or unlock it. Otherwise, we create a lock.
|
|
||||||
token := tokens[name]
|
|
||||||
if token != "" {
|
|
||||||
switch rng.Intn(3) {
|
|
||||||
case 0:
|
|
||||||
confirmed = true
|
|
||||||
nConfirm++
|
|
||||||
release, err := m.Confirm(now, name, "", Condition{Token: token})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("iteration #%d: Confirm %q: %v", i, name, err)
|
|
||||||
}
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("iteration #%d: inconsistent state: %v", i, err)
|
|
||||||
}
|
|
||||||
release()
|
|
||||||
|
|
||||||
case 1:
|
|
||||||
nRefresh++
|
|
||||||
if _, err := m.Refresh(now, token, duration); err != nil {
|
|
||||||
t.Fatalf("iteration #%d: Refresh %q: %v", i, name, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
case 2:
|
|
||||||
unlocked = true
|
|
||||||
nUnlock++
|
|
||||||
if err := m.Unlock(now, token); err != nil {
|
|
||||||
t.Fatalf("iteration #%d: Unlock %q: %v", i, name, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
|
||||||
nCreate++
|
|
||||||
var err error
|
|
||||||
token, err = m.Create(now, LockDetails{
|
|
||||||
Root: name,
|
|
||||||
Duration: duration,
|
|
||||||
ZeroDepth: lockTestZeroDepth(name),
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("iteration #%d: Create %q: %v", i, name, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !confirmed {
|
|
||||||
if duration == 0 || unlocked {
|
|
||||||
// A zero-duration lock should expire immediately and is
|
|
||||||
// effectively equivalent to being unlocked.
|
|
||||||
tokens[name] = ""
|
|
||||||
} else {
|
|
||||||
tokens[name] = token
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := m.consistent(); err != nil {
|
|
||||||
t.Fatalf("iteration #%d: inconsistent state: %v", i, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if nConfirm < N/10 {
|
|
||||||
t.Fatalf("too few Confirm calls: got %d, want >= %d", nConfirm, N/10)
|
|
||||||
}
|
|
||||||
if nCreate < N/10 {
|
|
||||||
t.Fatalf("too few Create calls: got %d, want >= %d", nCreate, N/10)
|
|
||||||
}
|
|
||||||
if nRefresh < N/10 {
|
|
||||||
t.Fatalf("too few Refresh calls: got %d, want >= %d", nRefresh, N/10)
|
|
||||||
}
|
|
||||||
if nUnlock < N/10 {
|
|
||||||
t.Fatalf("too few Unlock calls: got %d, want >= %d", nUnlock, N/10)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *memLS) consistent() error {
|
|
||||||
m.mu.Lock()
|
|
||||||
defer m.mu.Unlock()
|
|
||||||
|
|
||||||
// If m.byName is non-empty, then it must contain an entry for the root "/",
|
|
||||||
// and its refCount should equal the number of locked nodes.
|
|
||||||
if len(m.byName) > 0 {
|
|
||||||
n := m.byName["/"]
|
|
||||||
if n == nil {
|
|
||||||
return fmt.Errorf(`non-empty m.byName does not contain the root "/"`)
|
|
||||||
}
|
|
||||||
if n.refCount != len(m.byToken) {
|
|
||||||
return fmt.Errorf("root node refCount=%d, differs from len(m.byToken)=%d", n.refCount, len(m.byToken))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for name, n := range m.byName {
|
|
||||||
// The map keys should be consistent with the node's copy of the key.
|
|
||||||
if n.details.Root != name {
|
|
||||||
return fmt.Errorf("node name %q != byName map key %q", n.details.Root, name)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A name must be clean, and start with a "/".
|
|
||||||
if len(name) == 0 || name[0] != '/' {
|
|
||||||
return fmt.Errorf(`node name %q does not start with "/"`, name)
|
|
||||||
}
|
|
||||||
if name != path.Clean(name) {
|
|
||||||
return fmt.Errorf(`node name %q is not clean`, name)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A node's refCount should be positive.
|
|
||||||
if n.refCount <= 0 {
|
|
||||||
return fmt.Errorf("non-positive refCount for node at name %q", name)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A node's refCount should be the number of self-or-descendents that
|
|
||||||
// are locked (i.e. have a non-empty token).
|
|
||||||
var list []string
|
|
||||||
for name0, n0 := range m.byName {
|
|
||||||
// All of lockTestNames' name fragments are one byte long: '_', 'i' or 'z',
|
|
||||||
// so strings.HasPrefix is equivalent to self-or-descendent name match.
|
|
||||||
// We don't have to worry about "/foo/bar" being a false positive match
|
|
||||||
// for "/foo/b".
|
|
||||||
if strings.HasPrefix(name0, name) && n0.token != "" {
|
|
||||||
list = append(list, name0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if n.refCount != len(list) {
|
|
||||||
sort.Strings(list)
|
|
||||||
return fmt.Errorf("node at name %q has refCount %d but locked self-or-descendents are %q (len=%d)",
|
|
||||||
name, n.refCount, list, len(list))
|
|
||||||
}
|
|
||||||
|
|
||||||
// A node n is in m.byToken if it has a non-empty token.
|
|
||||||
if n.token != "" {
|
|
||||||
if _, ok := m.byToken[n.token]; !ok {
|
|
||||||
return fmt.Errorf("node at name %q has token %q but not in m.byToken", name, n.token)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// A node n is in m.byExpiry if it has a non-negative byExpiryIndex.
|
|
||||||
if n.byExpiryIndex >= 0 {
|
|
||||||
if n.byExpiryIndex >= len(m.byExpiry) {
|
|
||||||
return fmt.Errorf("node at name %q has byExpiryIndex %d but m.byExpiry has length %d", name, n.byExpiryIndex, len(m.byExpiry))
|
|
||||||
}
|
|
||||||
if n != m.byExpiry[n.byExpiryIndex] {
|
|
||||||
return fmt.Errorf("node at name %q has byExpiryIndex %d but that indexes a different node", name, n.byExpiryIndex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for token, n := range m.byToken {
|
|
||||||
// The map keys should be consistent with the node's copy of the key.
|
|
||||||
if n.token != token {
|
|
||||||
return fmt.Errorf("node token %q != byToken map key %q", n.token, token)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Every node in m.byToken is in m.byName.
|
|
||||||
if _, ok := m.byName[n.details.Root]; !ok {
|
|
||||||
return fmt.Errorf("node at name %q in m.byToken but not in m.byName", n.details.Root)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, n := range m.byExpiry {
|
|
||||||
// The slice indices should be consistent with the node's copy of the index.
|
|
||||||
if n.byExpiryIndex != i {
|
|
||||||
return fmt.Errorf("node byExpiryIndex %d != byExpiry slice index %d", n.byExpiryIndex, i)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Every node in m.byExpiry is in m.byName.
|
|
||||||
if _, ok := m.byName[n.details.Root]; !ok {
|
|
||||||
return fmt.Errorf("node at name %q in m.byExpiry but not in m.byName", n.details.Root)
|
|
||||||
}
|
|
||||||
|
|
||||||
// No node in m.byExpiry should be held.
|
|
||||||
if n.held {
|
|
||||||
return fmt.Errorf("node at name %q in m.byExpiry is held", n.details.Root)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestParseTimeout(t *testing.T) {
|
|
||||||
testCases := []struct {
|
|
||||||
s string
|
|
||||||
want time.Duration
|
|
||||||
wantErr error
|
|
||||||
}{{
|
|
||||||
"",
|
|
||||||
infiniteTimeout,
|
|
||||||
nil,
|
|
||||||
}, {
|
|
||||||
"Infinite",
|
|
||||||
infiniteTimeout,
|
|
||||||
nil,
|
|
||||||
}, {
|
|
||||||
"Infinitesimal",
|
|
||||||
0,
|
|
||||||
errInvalidTimeout,
|
|
||||||
}, {
|
|
||||||
"infinite",
|
|
||||||
0,
|
|
||||||
errInvalidTimeout,
|
|
||||||
}, {
|
|
||||||
"Second-0",
|
|
||||||
0 * time.Second,
|
|
||||||
nil,
|
|
||||||
}, {
|
|
||||||
"Second-123",
|
|
||||||
123 * time.Second,
|
|
||||||
nil,
|
|
||||||
}, {
|
|
||||||
" Second-456 ",
|
|
||||||
456 * time.Second,
|
|
||||||
nil,
|
|
||||||
}, {
|
|
||||||
"Second-4100000000",
|
|
||||||
4100000000 * time.Second,
|
|
||||||
nil,
|
|
||||||
}, {
|
|
||||||
"junk",
|
|
||||||
0,
|
|
||||||
errInvalidTimeout,
|
|
||||||
}, {
|
|
||||||
"Second-",
|
|
||||||
0,
|
|
||||||
errInvalidTimeout,
|
|
||||||
}, {
|
|
||||||
"Second--1",
|
|
||||||
0,
|
|
||||||
errInvalidTimeout,
|
|
||||||
}, {
|
|
||||||
"Second--123",
|
|
||||||
0,
|
|
||||||
errInvalidTimeout,
|
|
||||||
}, {
|
|
||||||
"Second-+123",
|
|
||||||
0,
|
|
||||||
errInvalidTimeout,
|
|
||||||
}, {
|
|
||||||
"Second-0x123",
|
|
||||||
0,
|
|
||||||
errInvalidTimeout,
|
|
||||||
}, {
|
|
||||||
"second-123",
|
|
||||||
0,
|
|
||||||
errInvalidTimeout,
|
|
||||||
}, {
|
|
||||||
"Second-4294967295",
|
|
||||||
4294967295 * time.Second,
|
|
||||||
nil,
|
|
||||||
}, {
|
|
||||||
// Section 10.7 says that "The timeout value for TimeType "Second"
|
|
||||||
// must not be greater than 2^32-1."
|
|
||||||
"Second-4294967296",
|
|
||||||
0,
|
|
||||||
errInvalidTimeout,
|
|
||||||
}, {
|
|
||||||
// This test case comes from section 9.10.9 of the spec. It says,
|
|
||||||
//
|
|
||||||
// "In this request, the client has specified that it desires an
|
|
||||||
// infinite-length lock, if available, otherwise a timeout of 4.1
|
|
||||||
// billion seconds, if available."
|
|
||||||
//
|
|
||||||
// The Go WebDAV package always supports infinite length locks,
|
|
||||||
// and ignores the fallback after the comma.
|
|
||||||
"Infinite, Second-4100000000",
|
|
||||||
infiniteTimeout,
|
|
||||||
nil,
|
|
||||||
}}
|
|
||||||
|
|
||||||
for _, tc := range testCases {
|
|
||||||
got, gotErr := parseTimeout(tc.s)
|
|
||||||
if got != tc.want || gotErr != tc.wantErr {
|
|
||||||
t.Errorf("parsing %q:\ngot %v, %v\nwant %v, %v", tc.s, got, gotErr, tc.want, tc.wantErr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -272,70 +272,6 @@ func allprop(ctx context.Context, fs FileSystem, ls LockSystem, name string, inc
|
|||||||
return props(ctx, fs, ls, name, pnames)
|
return props(ctx, fs, ls, name, pnames)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Patch patches the properties of resource name. The return values are
|
|
||||||
// constrained in the same manner as DeadPropsHolder.Patch.
|
|
||||||
func patch(ctx context.Context, fs FileSystem, ls LockSystem, name string, patches []Proppatch) ([]Propstat, error) {
|
|
||||||
conflict := false
|
|
||||||
loop:
|
|
||||||
for _, patch := range patches {
|
|
||||||
for _, p := range patch.Props {
|
|
||||||
if _, ok := liveProps[p.XMLName]; ok {
|
|
||||||
conflict = true
|
|
||||||
break loop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if conflict {
|
|
||||||
pstatForbidden := Propstat{
|
|
||||||
Status: http.StatusForbidden,
|
|
||||||
XMLError: `<D:cannot-modify-protected-property xmlns:D="DAV:"/>`,
|
|
||||||
}
|
|
||||||
pstatFailedDep := Propstat{
|
|
||||||
Status: StatusFailedDependency,
|
|
||||||
}
|
|
||||||
for _, patch := range patches {
|
|
||||||
for _, p := range patch.Props {
|
|
||||||
if _, ok := liveProps[p.XMLName]; ok {
|
|
||||||
pstatForbidden.Props = append(pstatForbidden.Props, Property{XMLName: p.XMLName})
|
|
||||||
} else {
|
|
||||||
pstatFailedDep.Props = append(pstatFailedDep.Props, Property{XMLName: p.XMLName})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return makePropstats(pstatForbidden, pstatFailedDep), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
f, err := fs.OpenFile(ctx, name, os.O_RDWR, 0)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
if dph, ok := f.(DeadPropsHolder); ok {
|
|
||||||
ret, err := dph.Patch(patches)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_propstat says that
|
|
||||||
// "The contents of the prop XML element must only list the names of
|
|
||||||
// properties to which the result in the status element applies."
|
|
||||||
for _, pstat := range ret {
|
|
||||||
for i, p := range pstat.Props {
|
|
||||||
pstat.Props[i] = Property{XMLName: p.XMLName}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
// The file doesn't implement the optional DeadPropsHolder interface, so
|
|
||||||
// all patches are forbidden.
|
|
||||||
pstat := Propstat{Status: http.StatusForbidden}
|
|
||||||
for _, patch := range patches {
|
|
||||||
for _, p := range patch.Props {
|
|
||||||
pstat.Props = append(pstat.Props, Property{XMLName: p.XMLName})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return []Propstat{pstat}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func EscapeXML(s string) string {
|
func EscapeXML(s string) string {
|
||||||
for i := 0; i < len(s); i++ {
|
for i := 0; i < len(s); i++ {
|
||||||
// As an optimization, if s contains only ASCII letters, digits or a
|
// As an optimization, if s contains only ASCII letters, digits or a
|
||||||
|
@ -1,716 +0,0 @@
|
|||||||
// Copyright 2015 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package dav
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/xml"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"os"
|
|
||||||
"reflect"
|
|
||||||
"regexp"
|
|
||||||
"sort"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestMemPS(t *testing.T) {
|
|
||||||
ctx := context.Background()
|
|
||||||
// calcProps calculates the getlastmodified and getetag DAV: property
|
|
||||||
// values in pstats for resource name in file-system fs.
|
|
||||||
calcProps := func(name string, fs FileSystem, ls LockSystem, pstats []Propstat) error {
|
|
||||||
fi, err := fs.Stat(ctx, name)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, pst := range pstats {
|
|
||||||
for i, p := range pst.Props {
|
|
||||||
switch p.XMLName {
|
|
||||||
case xml.Name{Space: "DAV:", Local: "getlastmodified"}:
|
|
||||||
p.InnerXML = []byte(fi.ModTime().UTC().Format(http.TimeFormat))
|
|
||||||
pst.Props[i] = p
|
|
||||||
case xml.Name{Space: "DAV:", Local: "getetag"}:
|
|
||||||
if fi.IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
etag, err := findETag(ctx, fs, ls, name, fi)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
p.InnerXML = []byte(etag)
|
|
||||||
pst.Props[i] = p
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
lockEntry = `` +
|
|
||||||
`<D:lockentry xmlns:D="DAV:">` +
|
|
||||||
`<D:lockscope><D:exclusive/></D:lockscope>` +
|
|
||||||
`<D:locktype><D:write/></D:locktype>` +
|
|
||||||
`</D:lockentry>`
|
|
||||||
statForbiddenError = `<D:cannot-modify-protected-property xmlns:D="DAV:"/>`
|
|
||||||
)
|
|
||||||
|
|
||||||
type propOp struct {
|
|
||||||
op string
|
|
||||||
name string
|
|
||||||
pnames []xml.Name
|
|
||||||
patches []Proppatch
|
|
||||||
wantPnames []xml.Name
|
|
||||||
wantPropstats []Propstat
|
|
||||||
}
|
|
||||||
|
|
||||||
testCases := []struct {
|
|
||||||
desc string
|
|
||||||
noDeadProps bool
|
|
||||||
buildfs []string
|
|
||||||
propOp []propOp
|
|
||||||
}{{
|
|
||||||
desc: "propname",
|
|
||||||
buildfs: []string{"mkdir /dir", "touch /file"},
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "propname",
|
|
||||||
name: "/dir",
|
|
||||||
wantPnames: []xml.Name{
|
|
||||||
{Space: "DAV:", Local: "resourcetype"},
|
|
||||||
{Space: "DAV:", Local: "displayname"},
|
|
||||||
{Space: "DAV:", Local: "supportedlock"},
|
|
||||||
{Space: "DAV:", Local: "getlastmodified"},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
op: "propname",
|
|
||||||
name: "/file",
|
|
||||||
wantPnames: []xml.Name{
|
|
||||||
{Space: "DAV:", Local: "resourcetype"},
|
|
||||||
{Space: "DAV:", Local: "displayname"},
|
|
||||||
{Space: "DAV:", Local: "getcontentlength"},
|
|
||||||
{Space: "DAV:", Local: "getlastmodified"},
|
|
||||||
{Space: "DAV:", Local: "getcontenttype"},
|
|
||||||
{Space: "DAV:", Local: "getetag"},
|
|
||||||
{Space: "DAV:", Local: "supportedlock"},
|
|
||||||
},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "allprop dir and file",
|
|
||||||
buildfs: []string{"mkdir /dir", "write /file foobarbaz"},
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "allprop",
|
|
||||||
name: "/dir",
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "resourcetype"},
|
|
||||||
InnerXML: []byte(`<D:collection xmlns:D="DAV:"/>`),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "displayname"},
|
|
||||||
InnerXML: []byte("dir"),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getlastmodified"},
|
|
||||||
InnerXML: nil, // Calculated during test.
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "supportedlock"},
|
|
||||||
InnerXML: []byte(lockEntry),
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "allprop",
|
|
||||||
name: "/file",
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "resourcetype"},
|
|
||||||
InnerXML: []byte(""),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "displayname"},
|
|
||||||
InnerXML: []byte("file"),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getcontentlength"},
|
|
||||||
InnerXML: []byte("9"),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getlastmodified"},
|
|
||||||
InnerXML: nil, // Calculated during test.
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getcontenttype"},
|
|
||||||
InnerXML: []byte("text/plain; charset=utf-8"),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getetag"},
|
|
||||||
InnerXML: nil, // Calculated during test.
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "supportedlock"},
|
|
||||||
InnerXML: []byte(lockEntry),
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "allprop",
|
|
||||||
name: "/file",
|
|
||||||
pnames: []xml.Name{
|
|
||||||
{"DAV:", "resourcetype"},
|
|
||||||
{"foo", "bar"},
|
|
||||||
},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "resourcetype"},
|
|
||||||
InnerXML: []byte(""),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "displayname"},
|
|
||||||
InnerXML: []byte("file"),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getcontentlength"},
|
|
||||||
InnerXML: []byte("9"),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getlastmodified"},
|
|
||||||
InnerXML: nil, // Calculated during test.
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getcontenttype"},
|
|
||||||
InnerXML: []byte("text/plain; charset=utf-8"),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getetag"},
|
|
||||||
InnerXML: nil, // Calculated during test.
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "supportedlock"},
|
|
||||||
InnerXML: []byte(lockEntry),
|
|
||||||
}}}, {
|
|
||||||
Status: http.StatusNotFound,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}}},
|
|
||||||
},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "propfind DAV:resourcetype",
|
|
||||||
buildfs: []string{"mkdir /dir", "touch /file"},
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "propfind",
|
|
||||||
name: "/dir",
|
|
||||||
pnames: []xml.Name{{"DAV:", "resourcetype"}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "resourcetype"},
|
|
||||||
InnerXML: []byte(`<D:collection xmlns:D="DAV:"/>`),
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "propfind",
|
|
||||||
name: "/file",
|
|
||||||
pnames: []xml.Name{{"DAV:", "resourcetype"}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "resourcetype"},
|
|
||||||
InnerXML: []byte(""),
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "propfind unsupported DAV properties",
|
|
||||||
buildfs: []string{"mkdir /dir"},
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "propfind",
|
|
||||||
name: "/dir",
|
|
||||||
pnames: []xml.Name{{"DAV:", "getcontentlanguage"}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusNotFound,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getcontentlanguage"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "propfind",
|
|
||||||
name: "/dir",
|
|
||||||
pnames: []xml.Name{{"DAV:", "creationdate"}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusNotFound,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "creationdate"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "propfind getetag for files but not for directories",
|
|
||||||
buildfs: []string{"mkdir /dir", "touch /file"},
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "propfind",
|
|
||||||
name: "/dir",
|
|
||||||
pnames: []xml.Name{{"DAV:", "getetag"}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusNotFound,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getetag"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "propfind",
|
|
||||||
name: "/file",
|
|
||||||
pnames: []xml.Name{{"DAV:", "getetag"}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getetag"},
|
|
||||||
InnerXML: nil, // Calculated during test.
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "proppatch property on no-dead-properties file system",
|
|
||||||
buildfs: []string{"mkdir /dir"},
|
|
||||||
noDeadProps: true,
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "proppatch",
|
|
||||||
name: "/dir",
|
|
||||||
patches: []Proppatch{{
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusForbidden,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "proppatch",
|
|
||||||
name: "/dir",
|
|
||||||
patches: []Proppatch{{
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getetag"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusForbidden,
|
|
||||||
XMLError: statForbiddenError,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "getetag"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "proppatch dead property",
|
|
||||||
buildfs: []string{"mkdir /dir"},
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "proppatch",
|
|
||||||
name: "/dir",
|
|
||||||
patches: []Proppatch{{
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
InnerXML: []byte("baz"),
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "propfind",
|
|
||||||
name: "/dir",
|
|
||||||
pnames: []xml.Name{{Space: "foo", Local: "bar"}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
InnerXML: []byte("baz"),
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "proppatch dead property with failed dependency",
|
|
||||||
buildfs: []string{"mkdir /dir"},
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "proppatch",
|
|
||||||
name: "/dir",
|
|
||||||
patches: []Proppatch{{
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
InnerXML: []byte("baz"),
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "displayname"},
|
|
||||||
InnerXML: []byte("xxx"),
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusForbidden,
|
|
||||||
XMLError: statForbiddenError,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "DAV:", Local: "displayname"},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
Status: StatusFailedDependency,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "propfind",
|
|
||||||
name: "/dir",
|
|
||||||
pnames: []xml.Name{{Space: "foo", Local: "bar"}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusNotFound,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "proppatch remove dead property",
|
|
||||||
buildfs: []string{"mkdir /dir"},
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "proppatch",
|
|
||||||
name: "/dir",
|
|
||||||
patches: []Proppatch{{
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
InnerXML: []byte("baz"),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "spam", Local: "ham"},
|
|
||||||
InnerXML: []byte("eggs"),
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "spam", Local: "ham"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "propfind",
|
|
||||||
name: "/dir",
|
|
||||||
pnames: []xml.Name{
|
|
||||||
{Space: "foo", Local: "bar"},
|
|
||||||
{Space: "spam", Local: "ham"},
|
|
||||||
},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
InnerXML: []byte("baz"),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "spam", Local: "ham"},
|
|
||||||
InnerXML: []byte("eggs"),
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "proppatch",
|
|
||||||
name: "/dir",
|
|
||||||
patches: []Proppatch{{
|
|
||||||
Remove: true,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "propfind",
|
|
||||||
name: "/dir",
|
|
||||||
pnames: []xml.Name{
|
|
||||||
{Space: "foo", Local: "bar"},
|
|
||||||
{Space: "spam", Local: "ham"},
|
|
||||||
},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusNotFound,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "spam", Local: "ham"},
|
|
||||||
InnerXML: []byte("eggs"),
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "propname with dead property",
|
|
||||||
buildfs: []string{"touch /file"},
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "proppatch",
|
|
||||||
name: "/file",
|
|
||||||
patches: []Proppatch{{
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
InnerXML: []byte("baz"),
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
op: "propname",
|
|
||||||
name: "/file",
|
|
||||||
wantPnames: []xml.Name{
|
|
||||||
{Space: "DAV:", Local: "resourcetype"},
|
|
||||||
{Space: "DAV:", Local: "displayname"},
|
|
||||||
{Space: "DAV:", Local: "getcontentlength"},
|
|
||||||
{Space: "DAV:", Local: "getlastmodified"},
|
|
||||||
{Space: "DAV:", Local: "getcontenttype"},
|
|
||||||
{Space: "DAV:", Local: "getetag"},
|
|
||||||
{Space: "DAV:", Local: "supportedlock"},
|
|
||||||
{Space: "foo", Local: "bar"},
|
|
||||||
},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "proppatch remove unknown dead property",
|
|
||||||
buildfs: []string{"mkdir /dir"},
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "proppatch",
|
|
||||||
name: "/dir",
|
|
||||||
patches: []Proppatch{{
|
|
||||||
Remove: true,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusOK,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "bad: propfind unknown property",
|
|
||||||
buildfs: []string{"mkdir /dir"},
|
|
||||||
propOp: []propOp{{
|
|
||||||
op: "propfind",
|
|
||||||
name: "/dir",
|
|
||||||
pnames: []xml.Name{{"foo:", "bar"}},
|
|
||||||
wantPropstats: []Propstat{{
|
|
||||||
Status: http.StatusNotFound,
|
|
||||||
Props: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "foo:", Local: "bar"},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}}
|
|
||||||
|
|
||||||
for _, tc := range testCases {
|
|
||||||
fs, err := buildTestFS(tc.buildfs)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("%s: cannot create test filesystem: %v", tc.desc, err)
|
|
||||||
}
|
|
||||||
if tc.noDeadProps {
|
|
||||||
fs = noDeadPropsFS{fs}
|
|
||||||
}
|
|
||||||
ls := NewMemLS()
|
|
||||||
for _, op := range tc.propOp {
|
|
||||||
desc := fmt.Sprintf("%s: %s %s", tc.desc, op.op, op.name)
|
|
||||||
if err = calcProps(op.name, fs, ls, op.wantPropstats); err != nil {
|
|
||||||
t.Fatalf("%s: calcProps: %v", desc, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Call property system.
|
|
||||||
var propstats []Propstat
|
|
||||||
switch op.op {
|
|
||||||
case "propname":
|
|
||||||
pnames, err := Propnames(ctx, fs, ls, op.name)
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("%s: got error %v, want nil", desc, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
sort.Sort(byXMLName(pnames))
|
|
||||||
sort.Sort(byXMLName(op.wantPnames))
|
|
||||||
if !reflect.DeepEqual(pnames, op.wantPnames) {
|
|
||||||
t.Errorf("%s: pnames\ngot %q\nwant %q", desc, pnames, op.wantPnames)
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
case "allprop":
|
|
||||||
propstats, err = allprop(ctx, fs, ls, op.name, op.pnames)
|
|
||||||
case "propfind":
|
|
||||||
propstats, err = props(ctx, fs, ls, op.name, op.pnames)
|
|
||||||
case "proppatch":
|
|
||||||
propstats, err = patch(ctx, fs, ls, op.name, op.patches)
|
|
||||||
default:
|
|
||||||
t.Fatalf("%s: %s not implemented", desc, op.op)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("%s: got error %v, want nil", desc, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// Compare return values from allprop, propfind or proppatch.
|
|
||||||
for _, pst := range propstats {
|
|
||||||
sort.Sort(byPropname(pst.Props))
|
|
||||||
}
|
|
||||||
for _, pst := range op.wantPropstats {
|
|
||||||
sort.Sort(byPropname(pst.Props))
|
|
||||||
}
|
|
||||||
sort.Sort(byStatus(propstats))
|
|
||||||
sort.Sort(byStatus(op.wantPropstats))
|
|
||||||
if !reflect.DeepEqual(propstats, op.wantPropstats) {
|
|
||||||
t.Errorf("%s: propstat\ngot %q\nwant %q", desc, propstats, op.wantPropstats)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func cmpXMLName(a, b xml.Name) bool {
|
|
||||||
if a.Space != b.Space {
|
|
||||||
return a.Space < b.Space
|
|
||||||
}
|
|
||||||
return a.Local < b.Local
|
|
||||||
}
|
|
||||||
|
|
||||||
type byXMLName []xml.Name
|
|
||||||
|
|
||||||
func (b byXMLName) Len() int { return len(b) }
|
|
||||||
func (b byXMLName) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
|
||||||
func (b byXMLName) Less(i, j int) bool { return cmpXMLName(b[i], b[j]) }
|
|
||||||
|
|
||||||
type byPropname []Property
|
|
||||||
|
|
||||||
func (b byPropname) Len() int { return len(b) }
|
|
||||||
func (b byPropname) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
|
||||||
func (b byPropname) Less(i, j int) bool { return cmpXMLName(b[i].XMLName, b[j].XMLName) }
|
|
||||||
|
|
||||||
type byStatus []Propstat
|
|
||||||
|
|
||||||
func (b byStatus) Len() int { return len(b) }
|
|
||||||
func (b byStatus) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
|
||||||
func (b byStatus) Less(i, j int) bool { return b[i].Status < b[j].Status }
|
|
||||||
|
|
||||||
type noDeadPropsFS struct {
|
|
||||||
FileSystem
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs noDeadPropsFS) OpenFile(ctx context.Context, name string, flag int, perm os.FileMode) (File, error) {
|
|
||||||
f, err := fs.FileSystem.OpenFile(ctx, name, flag, perm)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return noDeadPropsFile{f}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// noDeadPropsFile wraps a File but strips any optional DeadPropsHolder methods
|
|
||||||
// provided by the underlying File implementation.
|
|
||||||
type noDeadPropsFile struct {
|
|
||||||
f File
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f noDeadPropsFile) Close() error { return f.f.Close() }
|
|
||||||
func (f noDeadPropsFile) Read(p []byte) (int, error) { return f.f.Read(p) }
|
|
||||||
func (f noDeadPropsFile) Readdir(count int) ([]os.FileInfo, error) { return f.f.Readdir(count) }
|
|
||||||
func (f noDeadPropsFile) Seek(off int64, whence int) (int64, error) { return f.f.Seek(off, whence) }
|
|
||||||
func (f noDeadPropsFile) Stat() (os.FileInfo, error) { return f.f.Stat() }
|
|
||||||
func (f noDeadPropsFile) Write(p []byte) (int, error) { return f.f.Write(p) }
|
|
||||||
|
|
||||||
type overrideContentType struct {
|
|
||||||
os.FileInfo
|
|
||||||
contentType string
|
|
||||||
err error
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *overrideContentType) ContentType(ctx context.Context) (string, error) {
|
|
||||||
return o.contentType, o.err
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestFindContentTypeOverride(t *testing.T) {
|
|
||||||
fs, err := buildTestFS([]string{"touch /file"})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("cannot create test filesystem: %v", err)
|
|
||||||
}
|
|
||||||
ctx := context.Background()
|
|
||||||
fi, err := fs.Stat(ctx, "/file")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("cannot Stat /file: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check non overridden case
|
|
||||||
originalContentType, err := findContentType(ctx, fs, nil, "/file", fi)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("findContentType /file failed: %v", err)
|
|
||||||
}
|
|
||||||
if originalContentType != "text/plain; charset=utf-8" {
|
|
||||||
t.Fatalf("ContentType wrong want %q got %q", "text/plain; charset=utf-8", originalContentType)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now try overriding the ContentType
|
|
||||||
o := &overrideContentType{fi, "OverriddenContentType", nil}
|
|
||||||
ContentType, err := findContentType(ctx, fs, nil, "/file", o)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("findContentType /file failed: %v", err)
|
|
||||||
}
|
|
||||||
if ContentType != o.contentType {
|
|
||||||
t.Fatalf("ContentType wrong want %q got %q", o.contentType, ContentType)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now return ErrNotImplemented and check we get the original content type
|
|
||||||
o = &overrideContentType{fi, "OverriddenContentType", ErrNotImplemented}
|
|
||||||
ContentType, err = findContentType(ctx, fs, nil, "/file", o)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("findContentType /file failed: %v", err)
|
|
||||||
}
|
|
||||||
if ContentType != originalContentType {
|
|
||||||
t.Fatalf("ContentType wrong want %q got %q", originalContentType, ContentType)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type overrideETag struct {
|
|
||||||
os.FileInfo
|
|
||||||
eTag string
|
|
||||||
err error
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *overrideETag) ETag(ctx context.Context) (string, error) {
|
|
||||||
return o.eTag, o.err
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestFindETagOverride(t *testing.T) {
|
|
||||||
fs, err := buildTestFS([]string{"touch /file"})
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("cannot create test filesystem: %v", err)
|
|
||||||
}
|
|
||||||
ctx := context.Background()
|
|
||||||
fi, err := fs.Stat(ctx, "/file")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("cannot Stat /file: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check non overridden case
|
|
||||||
originalETag, err := findETag(ctx, fs, nil, "/file", fi)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("findETag /file failed: %v", err)
|
|
||||||
}
|
|
||||||
matchETag := regexp.MustCompile(`^"-?[0-9a-f]{6,}"$`)
|
|
||||||
if !matchETag.MatchString(originalETag) {
|
|
||||||
t.Fatalf("ETag wrong, wanted something matching %v got %q", matchETag, originalETag)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now try overriding the ETag
|
|
||||||
o := &overrideETag{fi, `"OverriddenETag"`, nil}
|
|
||||||
ETag, err := findETag(ctx, fs, nil, "/file", o)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("findETag /file failed: %v", err)
|
|
||||||
}
|
|
||||||
if ETag != o.eTag {
|
|
||||||
t.Fatalf("ETag wrong want %q got %q", o.eTag, ETag)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now return ErrNotImplemented and check we get the original Etag
|
|
||||||
o = &overrideETag{fi, `"OverriddenETag"`, ErrNotImplemented}
|
|
||||||
ETag, err = findETag(ctx, fs, nil, "/file", o)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("findETag /file failed: %v", err)
|
|
||||||
}
|
|
||||||
if ETag != originalETag {
|
|
||||||
t.Fatalf("ETag wrong want %q got %q", originalETag, ETag)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,712 +0,0 @@
|
|||||||
// Copyright 2014 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// Package webdav provides a WebDAV server implementation.
|
|
||||||
package dav
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Handler struct {
|
|
||||||
// Prefix is the URL path prefix to strip from WebDAV resource paths.
|
|
||||||
Prefix string
|
|
||||||
// FileSystem is the virtual file system.
|
|
||||||
FileSystem FileSystem
|
|
||||||
// LockSystem is the lock management system.
|
|
||||||
LockSystem LockSystem
|
|
||||||
// Logger is an optional error logger. If non-nil, it will be called
|
|
||||||
// for all HTTP requests.
|
|
||||||
Logger func(*http.Request, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) stripPrefix(p string) (string, int, error) {
|
|
||||||
if this.Prefix == "" {
|
|
||||||
return p, http.StatusOK, nil
|
|
||||||
}
|
|
||||||
if r := strings.TrimPrefix(p, this.Prefix); len(r) < len(p) {
|
|
||||||
return r, http.StatusOK, nil
|
|
||||||
}
|
|
||||||
return p, http.StatusNotFound, errPrefixMismatch
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|
||||||
status, err := http.StatusBadRequest, errUnsupportedMethod
|
|
||||||
if this.FileSystem == nil {
|
|
||||||
status, err = http.StatusInternalServerError, errNoFileSystem
|
|
||||||
} else if this.LockSystem == nil {
|
|
||||||
status, err = http.StatusInternalServerError, errNoLockSystem
|
|
||||||
} else {
|
|
||||||
switch r.Method {
|
|
||||||
case "OPTIONS":
|
|
||||||
status, err = this.handleOptions(w, r)
|
|
||||||
case "GET", "HEAD", "POST":
|
|
||||||
status, err = this.handleGetHeadPost(w, r)
|
|
||||||
case "DELETE":
|
|
||||||
status, err = this.handleDelete(w, r)
|
|
||||||
case "PUT":
|
|
||||||
status, err = this.handlePut(w, r)
|
|
||||||
case "MKCOL":
|
|
||||||
status, err = this.handleMkcol(w, r)
|
|
||||||
case "COPY", "MOVE":
|
|
||||||
status, err = this.handleCopyMove(w, r)
|
|
||||||
case "LOCK":
|
|
||||||
status, err = this.handleLock(w, r)
|
|
||||||
case "UNLOCK":
|
|
||||||
status, err = this.handleUnlock(w, r)
|
|
||||||
case "PROPFIND":
|
|
||||||
status, err = this.handlePropfind(w, r)
|
|
||||||
case "PROPPATCH":
|
|
||||||
status, err = this.handleProppatch(w, r)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if status != 0 {
|
|
||||||
w.WriteHeader(status)
|
|
||||||
if status != http.StatusNoContent {
|
|
||||||
w.Write([]byte(StatusText(status)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if this.Logger != nil {
|
|
||||||
this.Logger(r, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) lock(now time.Time, root string) (token string, status int, err error) {
|
|
||||||
token, err = this.LockSystem.Create(now, LockDetails{
|
|
||||||
Root: root,
|
|
||||||
Duration: infiniteTimeout,
|
|
||||||
ZeroDepth: true,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
if err == ErrLocked {
|
|
||||||
return "", StatusLocked, err
|
|
||||||
}
|
|
||||||
return "", http.StatusInternalServerError, err
|
|
||||||
}
|
|
||||||
return token, 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) confirmLocks(r *http.Request, src, dst string) (release func(), status int, err error) {
|
|
||||||
hdr := r.Header.Get("If")
|
|
||||||
if hdr == "" {
|
|
||||||
// An empty If header means that the client hasn't previously created locks.
|
|
||||||
// Even if this client doesn't care about locks, we still need to check that
|
|
||||||
// the resources aren't locked by another client, so we create temporary
|
|
||||||
// locks that would conflict with another client's locks. These temporary
|
|
||||||
// locks are unlocked at the end of the HTTP request.
|
|
||||||
now, srcToken, dstToken := time.Now(), "", ""
|
|
||||||
if src != "" {
|
|
||||||
srcToken, status, err = this.lock(now, src)
|
|
||||||
if err != nil {
|
|
||||||
return nil, status, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if dst != "" {
|
|
||||||
dstToken, status, err = this.lock(now, dst)
|
|
||||||
if err != nil {
|
|
||||||
if srcToken != "" {
|
|
||||||
this.LockSystem.Unlock(now, srcToken)
|
|
||||||
}
|
|
||||||
return nil, status, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return func() {
|
|
||||||
if dstToken != "" {
|
|
||||||
this.LockSystem.Unlock(now, dstToken)
|
|
||||||
}
|
|
||||||
if srcToken != "" {
|
|
||||||
this.LockSystem.Unlock(now, srcToken)
|
|
||||||
}
|
|
||||||
}, 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
ih, ok := parseIfHeader(hdr)
|
|
||||||
if !ok {
|
|
||||||
return nil, http.StatusBadRequest, errInvalidIfHeader
|
|
||||||
}
|
|
||||||
// ih is a disjunction (OR) of ifLists, so any ifList will do.
|
|
||||||
for _, l := range ih.lists {
|
|
||||||
lsrc := l.resourceTag
|
|
||||||
if lsrc == "" {
|
|
||||||
lsrc = src
|
|
||||||
} else {
|
|
||||||
u, err := url.Parse(lsrc)
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if u.Host != r.Host {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
lsrc, status, err = this.stripPrefix(u.Path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, status, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
release, err = this.LockSystem.Confirm(time.Now(), lsrc, dst, l.conditions...)
|
|
||||||
if err == ErrConfirmationFailed {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return nil, http.StatusInternalServerError, err
|
|
||||||
}
|
|
||||||
return release, 0, nil
|
|
||||||
}
|
|
||||||
// Section 10.4.1 says that "If this header is evaluated and all state lists
|
|
||||||
// fail, then the request must fail with a 412 (Precondition Failed) status."
|
|
||||||
// We follow the spec even though the cond_put_corrupt_token test case from
|
|
||||||
// the litmus test warns on seeing a 412 instead of a 423 (Locked).
|
|
||||||
return nil, http.StatusPreconditionFailed, ErrLocked
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) handleOptions(w http.ResponseWriter, r *http.Request) (status int, err error) {
|
|
||||||
reqPath, status, err := this.stripPrefix(r.URL.Path)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
ctx := r.Context()
|
|
||||||
allow := "OPTIONS, LOCK, PUT, MKCOL"
|
|
||||||
if fi, err := this.FileSystem.Stat(ctx, reqPath); err == nil {
|
|
||||||
if fi.IsDir() {
|
|
||||||
allow = "OPTIONS, LOCK, DELETE, PROPPATCH, COPY, MOVE, UNLOCK, PROPFIND"
|
|
||||||
} else {
|
|
||||||
allow = "OPTIONS, LOCK, GET, HEAD, POST, DELETE, PROPPATCH, COPY, MOVE, UNLOCK, PROPFIND, PUT"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
w.Header().Set("Allow", allow)
|
|
||||||
// http://www.webdav.org/specs/rfc4918.html#dav.compliance.classes
|
|
||||||
w.Header().Set("DAV", "1, 2")
|
|
||||||
// http://msdn.microsoft.com/en-au/library/cc250217.aspx
|
|
||||||
w.Header().Set("MS-Author-Via", "DAV")
|
|
||||||
return 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) handleGetHeadPost(w http.ResponseWriter, r *http.Request) (status int, err error) {
|
|
||||||
reqPath, status, err := this.stripPrefix(r.URL.Path)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
// TODO: check locks for read-only access??
|
|
||||||
ctx := r.Context()
|
|
||||||
f, err := this.FileSystem.OpenFile(ctx, reqPath, os.O_RDONLY, 0)
|
|
||||||
if err != nil {
|
|
||||||
return http.StatusNotFound, err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
fi, err := f.Stat()
|
|
||||||
if err != nil {
|
|
||||||
return http.StatusNotFound, err
|
|
||||||
}
|
|
||||||
if fi.IsDir() {
|
|
||||||
return http.StatusMethodNotAllowed, nil
|
|
||||||
}
|
|
||||||
etag, err := findETag(ctx, this.FileSystem, this.LockSystem, reqPath, fi)
|
|
||||||
if err != nil {
|
|
||||||
return http.StatusInternalServerError, err
|
|
||||||
}
|
|
||||||
w.Header().Set("ETag", etag)
|
|
||||||
// Let ServeContent determine the Content-Type header.
|
|
||||||
http.ServeContent(w, r, reqPath, fi.ModTime(), f)
|
|
||||||
return 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) handleDelete(w http.ResponseWriter, r *http.Request) (status int, err error) {
|
|
||||||
reqPath, status, err := this.stripPrefix(r.URL.Path)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
release, status, err := this.confirmLocks(r, reqPath, "")
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
ctx := r.Context()
|
|
||||||
|
|
||||||
// TODO: return MultiStatus where appropriate.
|
|
||||||
|
|
||||||
// "godoc os RemoveAll" says that "If the path does not exist, RemoveAll
|
|
||||||
// returns nil (no error)." WebDAV semantics are that it should return a
|
|
||||||
// "404 Not Found". We therefore have to Stat before we RemoveAll.
|
|
||||||
if _, err := this.FileSystem.Stat(ctx, reqPath); err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return http.StatusNotFound, err
|
|
||||||
}
|
|
||||||
return http.StatusMethodNotAllowed, err
|
|
||||||
}
|
|
||||||
if err := this.FileSystem.RemoveAll(ctx, reqPath); err != nil {
|
|
||||||
return http.StatusMethodNotAllowed, err
|
|
||||||
}
|
|
||||||
return http.StatusNoContent, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) handlePut(w http.ResponseWriter, r *http.Request) (status int, err error) {
|
|
||||||
reqPath, status, err := this.stripPrefix(r.URL.Path)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
release, status, err := this.confirmLocks(r, reqPath, "")
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
defer release()
|
|
||||||
// TODO(rost): Support the If-Match, If-None-Match headers? See bradfitz'
|
|
||||||
// comments in http.checkEtag.
|
|
||||||
ctx := r.Context()
|
|
||||||
|
|
||||||
f, err := this.FileSystem.OpenFile(ctx, reqPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666)
|
|
||||||
if err != nil {
|
|
||||||
return http.StatusNotFound, err
|
|
||||||
}
|
|
||||||
_, copyErr := io.Copy(f, r.Body)
|
|
||||||
fi, statErr := f.Stat()
|
|
||||||
closeErr := f.Close()
|
|
||||||
// TODO(rost): Returning 405 Method Not Allowed might not be appropriate.
|
|
||||||
if copyErr != nil {
|
|
||||||
return http.StatusMethodNotAllowed, copyErr
|
|
||||||
}
|
|
||||||
if statErr != nil {
|
|
||||||
return http.StatusMethodNotAllowed, statErr
|
|
||||||
}
|
|
||||||
if closeErr != nil {
|
|
||||||
return http.StatusMethodNotAllowed, closeErr
|
|
||||||
}
|
|
||||||
etag, err := findETag(ctx, this.FileSystem, this.LockSystem, reqPath, fi)
|
|
||||||
if err != nil {
|
|
||||||
return http.StatusInternalServerError, err
|
|
||||||
}
|
|
||||||
w.Header().Set("ETag", etag)
|
|
||||||
return http.StatusCreated, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) handleMkcol(w http.ResponseWriter, r *http.Request) (status int, err error) {
|
|
||||||
reqPath, status, err := this.stripPrefix(r.URL.Path)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
release, status, err := this.confirmLocks(r, reqPath, "")
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
ctx := r.Context()
|
|
||||||
|
|
||||||
if r.ContentLength > 0 {
|
|
||||||
return http.StatusUnsupportedMediaType, nil
|
|
||||||
}
|
|
||||||
if err := this.FileSystem.Mkdir(ctx, reqPath, 0777); err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return http.StatusConflict, err
|
|
||||||
}
|
|
||||||
return http.StatusMethodNotAllowed, err
|
|
||||||
}
|
|
||||||
return http.StatusCreated, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) handleCopyMove(w http.ResponseWriter, r *http.Request) (status int, err error) {
|
|
||||||
hdr := r.Header.Get("Destination")
|
|
||||||
if hdr == "" {
|
|
||||||
return http.StatusBadRequest, errInvalidDestination
|
|
||||||
}
|
|
||||||
u, err := url.Parse(hdr)
|
|
||||||
if err != nil {
|
|
||||||
return http.StatusBadRequest, errInvalidDestination
|
|
||||||
}
|
|
||||||
if u.Host != r.Host {
|
|
||||||
return http.StatusBadGateway, errInvalidDestination
|
|
||||||
}
|
|
||||||
|
|
||||||
src, status, err := this.stripPrefix(r.URL.Path)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
|
|
||||||
dst, status, err := this.stripPrefix(u.Path)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if dst == "" {
|
|
||||||
return http.StatusBadGateway, errInvalidDestination
|
|
||||||
}
|
|
||||||
if dst == src {
|
|
||||||
return http.StatusForbidden, errDestinationEqualsSource
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx := r.Context()
|
|
||||||
|
|
||||||
if r.Method == "COPY" {
|
|
||||||
// Section 7.5.1 says that a COPY only needs to lock the destination,
|
|
||||||
// not both destination and source. Strictly speaking, this is racy,
|
|
||||||
// even though a COPY doesn't modify the source, if a concurrent
|
|
||||||
// operation modifies the source. However, the litmus test explicitly
|
|
||||||
// checks that COPYing a locked-by-another source is OK.
|
|
||||||
release, status, err := this.confirmLocks(r, "", dst)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
// Section 9.8.3 says that "The COPY method on a collection without a Depth
|
|
||||||
// header must act as if a Depth header with value "infinity" was included".
|
|
||||||
depth := infiniteDepth
|
|
||||||
if hdr := r.Header.Get("Depth"); hdr != "" {
|
|
||||||
depth = parseDepth(hdr)
|
|
||||||
if depth != 0 && depth != infiniteDepth {
|
|
||||||
// Section 9.8.3 says that "A client may submit a Depth header on a
|
|
||||||
// COPY on a collection with a value of "0" or "infinity"."
|
|
||||||
return http.StatusBadRequest, errInvalidDepth
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return copyFiles(ctx, this.FileSystem, src, dst, r.Header.Get("Overwrite") != "F", depth, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
release, status, err := this.confirmLocks(r, src, dst)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
// Section 9.9.2 says that "The MOVE method on a collection must act as if
|
|
||||||
// a "Depth: infinity" header was used on it. A client must not submit a
|
|
||||||
// Depth header on a MOVE on a collection with any value but "infinity"."
|
|
||||||
if hdr := r.Header.Get("Depth"); hdr != "" {
|
|
||||||
if parseDepth(hdr) != infiniteDepth {
|
|
||||||
return http.StatusBadRequest, errInvalidDepth
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return moveFiles(ctx, this.FileSystem, src, dst, r.Header.Get("Overwrite") == "T")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) handleLock(w http.ResponseWriter, r *http.Request) (retStatus int, retErr error) {
|
|
||||||
duration, err := parseTimeout(r.Header.Get("Timeout"))
|
|
||||||
if err != nil {
|
|
||||||
return http.StatusBadRequest, err
|
|
||||||
}
|
|
||||||
li, status, err := ReadLockInfo(r.Body)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx := r.Context()
|
|
||||||
token, ld, now, created := "", LockDetails{}, time.Now(), false
|
|
||||||
if li == (LockInfo{}) {
|
|
||||||
// An empty lockInfo means to refresh the lock.
|
|
||||||
ih, ok := parseIfHeader(r.Header.Get("If"))
|
|
||||||
if !ok {
|
|
||||||
return http.StatusBadRequest, errInvalidIfHeader
|
|
||||||
}
|
|
||||||
if len(ih.lists) == 1 && len(ih.lists[0].conditions) == 1 {
|
|
||||||
token = ih.lists[0].conditions[0].Token
|
|
||||||
}
|
|
||||||
if token == "" {
|
|
||||||
return http.StatusBadRequest, errInvalidLockToken
|
|
||||||
}
|
|
||||||
ld, err = this.LockSystem.Refresh(now, token, duration)
|
|
||||||
if err != nil {
|
|
||||||
if err == ErrNoSuchLock {
|
|
||||||
return http.StatusPreconditionFailed, err
|
|
||||||
}
|
|
||||||
return http.StatusInternalServerError, err
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
|
||||||
// Section 9.10.3 says that "If no Depth header is submitted on a LOCK request,
|
|
||||||
// then the request MUST act as if a "Depth:infinity" had been submitted."
|
|
||||||
depth := infiniteDepth
|
|
||||||
if hdr := r.Header.Get("Depth"); hdr != "" {
|
|
||||||
depth = parseDepth(hdr)
|
|
||||||
if depth != 0 && depth != infiniteDepth {
|
|
||||||
// Section 9.10.3 says that "Values other than 0 or infinity must not be
|
|
||||||
// used with the Depth header on a LOCK method".
|
|
||||||
return http.StatusBadRequest, errInvalidDepth
|
|
||||||
}
|
|
||||||
}
|
|
||||||
reqPath, status, err := this.stripPrefix(r.URL.Path)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
ld = LockDetails{
|
|
||||||
Root: reqPath,
|
|
||||||
Duration: duration,
|
|
||||||
OwnerXML: li.Owner.InnerXML,
|
|
||||||
ZeroDepth: depth == 0,
|
|
||||||
}
|
|
||||||
token, err = this.LockSystem.Create(now, ld)
|
|
||||||
if err != nil {
|
|
||||||
if err == ErrLocked {
|
|
||||||
return StatusLocked, err
|
|
||||||
}
|
|
||||||
return http.StatusInternalServerError, err
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
if retErr != nil {
|
|
||||||
this.LockSystem.Unlock(now, token)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
// Create the resource if it didn't previously exist.
|
|
||||||
if _, err := this.FileSystem.Stat(ctx, reqPath); err != nil {
|
|
||||||
f, err := this.FileSystem.OpenFile(ctx, reqPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666)
|
|
||||||
if err != nil {
|
|
||||||
// TODO: detect missing intermediate dirs and return http.StatusConflict?
|
|
||||||
return http.StatusInternalServerError, err
|
|
||||||
}
|
|
||||||
f.Close()
|
|
||||||
created = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// http://www.webdav.org/specs/rfc4918.html#HEADER_Lock-Token says that the
|
|
||||||
// Lock-Token value is a Coded-URL. We add angle brackets.
|
|
||||||
w.Header().Set("Lock-Token", "<"+token+">")
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/xml; charset=utf-8")
|
|
||||||
if created {
|
|
||||||
// This is "w.WriteHeader(http.StatusCreated)" and not "return
|
|
||||||
// http.StatusCreated, nil" because we write our own (XML) response to w
|
|
||||||
// and Handler.ServeHTTP would otherwise write "Created".
|
|
||||||
w.WriteHeader(http.StatusCreated)
|
|
||||||
}
|
|
||||||
WriteLockInfo(w, token, ld)
|
|
||||||
return 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) handleUnlock(w http.ResponseWriter, r *http.Request) (status int, err error) {
|
|
||||||
// http://www.webdav.org/specs/rfc4918.html#HEADER_Lock-Token says that the
|
|
||||||
// Lock-Token value is a Coded-URL. We strip its angle brackets.
|
|
||||||
t := r.Header.Get("Lock-Token")
|
|
||||||
if len(t) < 2 || t[0] != '<' || t[len(t)-1] != '>' {
|
|
||||||
return http.StatusBadRequest, errInvalidLockToken
|
|
||||||
}
|
|
||||||
t = t[1 : len(t)-1]
|
|
||||||
|
|
||||||
switch err = this.LockSystem.Unlock(time.Now(), t); err {
|
|
||||||
case nil:
|
|
||||||
return http.StatusNoContent, err
|
|
||||||
case ErrForbidden:
|
|
||||||
return http.StatusForbidden, err
|
|
||||||
case ErrLocked:
|
|
||||||
return StatusLocked, err
|
|
||||||
case ErrNoSuchLock:
|
|
||||||
return http.StatusConflict, err
|
|
||||||
default:
|
|
||||||
return http.StatusInternalServerError, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) handlePropfind(writer http.ResponseWriter, request *http.Request) (status int, err error) {
|
|
||||||
reqPath, status, err := this.stripPrefix(request.URL.Path)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
ctx := request.Context()
|
|
||||||
fileInfo, err := this.FileSystem.Stat(ctx, reqPath)
|
|
||||||
if err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return http.StatusNotFound, err
|
|
||||||
}
|
|
||||||
return http.StatusMethodNotAllowed, err
|
|
||||||
}
|
|
||||||
depth := infiniteDepth
|
|
||||||
if hdr := request.Header.Get("Depth"); hdr != "" {
|
|
||||||
depth = parseDepth(hdr)
|
|
||||||
if depth == invalidDepth {
|
|
||||||
return http.StatusBadRequest, errInvalidDepth
|
|
||||||
}
|
|
||||||
}
|
|
||||||
//读取出request希望获取的文件属性。
|
|
||||||
pf, status, err := ReadPropfind(request.Body)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
|
|
||||||
multiStatusWriter := MultiStatusWriter{Writer: writer}
|
|
||||||
|
|
||||||
walkFn := func(reqPath string, info os.FileInfo, err error) error {
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Printf("浏览:%s {name=%s,IsDir=%v,Mode=%v,ModTime=%v,Size=%v}\n",
|
|
||||||
reqPath, info.Name(), info.IsDir(), info.Mode(), info.ModTime(), info.Size())
|
|
||||||
var propstats []Propstat
|
|
||||||
if pf.Propname != nil {
|
|
||||||
pnames, err := Propnames(ctx, this.FileSystem, this.LockSystem, reqPath)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
pstat := Propstat{Status: http.StatusOK}
|
|
||||||
for _, xmlname := range pnames {
|
|
||||||
pstat.Props = append(pstat.Props, Property{XMLName: xmlname})
|
|
||||||
}
|
|
||||||
propstats = append(propstats, pstat)
|
|
||||||
} else if pf.Allprop != nil {
|
|
||||||
propstats, err = allprop(ctx, this.FileSystem, this.LockSystem, reqPath, pf.Prop)
|
|
||||||
} else {
|
|
||||||
propstats, err = props(ctx, this.FileSystem, this.LockSystem, reqPath, pf.Prop)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
href := path.Join(this.Prefix, reqPath)
|
|
||||||
if info.IsDir() {
|
|
||||||
href += "/"
|
|
||||||
}
|
|
||||||
|
|
||||||
propstatResponse := makePropstatResponse(href, propstats)
|
|
||||||
return multiStatusWriter.Write(propstatResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
walkErr := walkFS(ctx, this.FileSystem, depth, reqPath, fileInfo, walkFn)
|
|
||||||
closeErr := multiStatusWriter.Close()
|
|
||||||
if walkErr != nil {
|
|
||||||
return http.StatusInternalServerError, walkErr
|
|
||||||
}
|
|
||||||
if closeErr != nil {
|
|
||||||
return http.StatusInternalServerError, closeErr
|
|
||||||
}
|
|
||||||
return 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *Handler) handleProppatch(w http.ResponseWriter, r *http.Request) (status int, err error) {
|
|
||||||
reqPath, status, err := this.stripPrefix(r.URL.Path)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
release, status, err := this.confirmLocks(r, reqPath, "")
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
ctx := r.Context()
|
|
||||||
|
|
||||||
if _, err := this.FileSystem.Stat(ctx, reqPath); err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return http.StatusNotFound, err
|
|
||||||
}
|
|
||||||
return http.StatusMethodNotAllowed, err
|
|
||||||
}
|
|
||||||
patches, status, err := ReadProppatch(r.Body)
|
|
||||||
if err != nil {
|
|
||||||
return status, err
|
|
||||||
}
|
|
||||||
pstats, err := patch(ctx, this.FileSystem, this.LockSystem, reqPath, patches)
|
|
||||||
if err != nil {
|
|
||||||
return http.StatusInternalServerError, err
|
|
||||||
}
|
|
||||||
mw := MultiStatusWriter{Writer: w}
|
|
||||||
writeErr := mw.Write(makePropstatResponse(r.URL.Path, pstats))
|
|
||||||
closeErr := mw.Close()
|
|
||||||
if writeErr != nil {
|
|
||||||
return http.StatusInternalServerError, writeErr
|
|
||||||
}
|
|
||||||
if closeErr != nil {
|
|
||||||
return http.StatusInternalServerError, closeErr
|
|
||||||
}
|
|
||||||
return 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func makePropstatResponse(href string, pstats []Propstat) *Response {
|
|
||||||
resp := Response{
|
|
||||||
Href: []string{(&url.URL{Path: href}).EscapedPath()},
|
|
||||||
Propstat: make([]SubPropstat, 0, len(pstats)),
|
|
||||||
}
|
|
||||||
for _, p := range pstats {
|
|
||||||
var xmlErr *XmlError
|
|
||||||
if p.XMLError != "" {
|
|
||||||
xmlErr = &XmlError{InnerXML: []byte(p.XMLError)}
|
|
||||||
}
|
|
||||||
resp.Propstat = append(resp.Propstat, SubPropstat{
|
|
||||||
Status: fmt.Sprintf("HTTP/1.1 %d %s", p.Status, StatusText(p.Status)),
|
|
||||||
Prop: p.Props,
|
|
||||||
ResponseDescription: p.ResponseDescription,
|
|
||||||
Error: xmlErr,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return &resp
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
infiniteDepth = -1
|
|
||||||
invalidDepth = -2
|
|
||||||
)
|
|
||||||
|
|
||||||
// parseDepth maps the strings "0", "1" and "infinity" to 0, 1 and
|
|
||||||
// infiniteDepth. Parsing any other string returns invalidDepth.
|
|
||||||
//
|
|
||||||
// Different WebDAV methods have further constraints on valid depths:
|
|
||||||
// - PROPFIND has no further restrictions, as per section 9.1.
|
|
||||||
// - COPY accepts only "0" or "infinity", as per section 9.8.3.
|
|
||||||
// - MOVE accepts only "infinity", as per section 9.9.2.
|
|
||||||
// - LOCK accepts only "0" or "infinity", as per section 9.10.3.
|
|
||||||
// These constraints are enforced by the handleXxx methods.
|
|
||||||
func parseDepth(s string) int {
|
|
||||||
switch s {
|
|
||||||
case "0":
|
|
||||||
return 0
|
|
||||||
case "1":
|
|
||||||
return 1
|
|
||||||
case "infinity":
|
|
||||||
return infiniteDepth
|
|
||||||
}
|
|
||||||
return invalidDepth
|
|
||||||
}
|
|
||||||
|
|
||||||
// http://www.webdav.org/specs/rfc4918.html#status.code.extensions.to.http11
|
|
||||||
const (
|
|
||||||
StatusMulti = 207
|
|
||||||
StatusUnprocessableEntity = 422
|
|
||||||
StatusLocked = 423
|
|
||||||
StatusFailedDependency = 424
|
|
||||||
StatusInsufficientStorage = 507
|
|
||||||
)
|
|
||||||
|
|
||||||
func StatusText(code int) string {
|
|
||||||
switch code {
|
|
||||||
case StatusMulti:
|
|
||||||
return "Multi-Status"
|
|
||||||
case StatusUnprocessableEntity:
|
|
||||||
return "Unprocessable Entity"
|
|
||||||
case StatusLocked:
|
|
||||||
return "Locked"
|
|
||||||
case StatusFailedDependency:
|
|
||||||
return "Failed Dependency"
|
|
||||||
case StatusInsufficientStorage:
|
|
||||||
return "Insufficient Storage"
|
|
||||||
}
|
|
||||||
return http.StatusText(code)
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
errDestinationEqualsSource = errors.New("webdav: destination equals source")
|
|
||||||
errDirectoryNotEmpty = errors.New("webdav: directory not empty")
|
|
||||||
errInvalidDepth = errors.New("webdav: invalid depth")
|
|
||||||
errInvalidDestination = errors.New("webdav: invalid destination")
|
|
||||||
errInvalidIfHeader = errors.New("webdav: invalid If header")
|
|
||||||
errInvalidLockInfo = errors.New("webdav: invalid lock info")
|
|
||||||
errInvalidLockToken = errors.New("webdav: invalid lock token")
|
|
||||||
errInvalidPropfind = errors.New("webdav: invalid propfind")
|
|
||||||
errInvalidProppatch = errors.New("webdav: invalid proppatch")
|
|
||||||
errInvalidResponse = errors.New("webdav: invalid response")
|
|
||||||
errInvalidTimeout = errors.New("webdav: invalid timeout")
|
|
||||||
errNoFileSystem = errors.New("webdav: no file system")
|
|
||||||
errNoLockSystem = errors.New("webdav: no lock system")
|
|
||||||
errNotADirectory = errors.New("webdav: not a directory")
|
|
||||||
errPrefixMismatch = errors.New("webdav: prefix mismatch")
|
|
||||||
errRecursionTooDeep = errors.New("webdav: recursion too deep")
|
|
||||||
errUnsupportedLockInfo = errors.New("webdav: unsupported lock info")
|
|
||||||
errUnsupportedMethod = errors.New("webdav: unsupported method")
|
|
||||||
)
|
|
@ -1,343 +0,0 @@
|
|||||||
// Copyright 2015 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package dav
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"io/ioutil"
|
|
||||||
"net/http"
|
|
||||||
"net/http/httptest"
|
|
||||||
"net/url"
|
|
||||||
"os"
|
|
||||||
"reflect"
|
|
||||||
"regexp"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
// TODO: add tests to check XML responses with the expected prefix path
|
|
||||||
func TestPrefix(t *testing.T) {
|
|
||||||
const dst, blah = "Destination", "blah blah blah"
|
|
||||||
|
|
||||||
// createLockBody comes from the example in Section 9.10.7.
|
|
||||||
const createLockBody = `<?xml version="1.0" encoding="utf-8" ?>
|
|
||||||
<D:lockinfo xmlns:D='DAV:'>
|
|
||||||
<D:lockscope><D:exclusive/></D:lockscope>
|
|
||||||
<D:locktype><D:write/></D:locktype>
|
|
||||||
<D:owner>
|
|
||||||
<D:href>http://example.org/~ejw/contact.html</D:href>
|
|
||||||
</D:owner>
|
|
||||||
</D:lockinfo>
|
|
||||||
`
|
|
||||||
|
|
||||||
do := func(method, urlStr string, body string, wantStatusCode int, headers ...string) (http.Header, error) {
|
|
||||||
var bodyReader io.Reader
|
|
||||||
if body != "" {
|
|
||||||
bodyReader = strings.NewReader(body)
|
|
||||||
}
|
|
||||||
req, err := http.NewRequest(method, urlStr, bodyReader)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
for len(headers) >= 2 {
|
|
||||||
req.Header.Add(headers[0], headers[1])
|
|
||||||
headers = headers[2:]
|
|
||||||
}
|
|
||||||
res, err := http.DefaultTransport.RoundTrip(req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer res.Body.Close()
|
|
||||||
if res.StatusCode != wantStatusCode {
|
|
||||||
return nil, fmt.Errorf("got status code %d, want %d", res.StatusCode, wantStatusCode)
|
|
||||||
}
|
|
||||||
return res.Header, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
prefixes := []string{
|
|
||||||
"/",
|
|
||||||
"/a/",
|
|
||||||
"/a/b/",
|
|
||||||
"/a/b/c/",
|
|
||||||
}
|
|
||||||
ctx := context.Background()
|
|
||||||
for _, prefix := range prefixes {
|
|
||||||
fs := NewMemFS()
|
|
||||||
h := &Handler{
|
|
||||||
FileSystem: fs,
|
|
||||||
LockSystem: NewMemLS(),
|
|
||||||
}
|
|
||||||
mux := http.NewServeMux()
|
|
||||||
if prefix != "/" {
|
|
||||||
h.Prefix = prefix
|
|
||||||
}
|
|
||||||
mux.Handle(prefix, h)
|
|
||||||
srv := httptest.NewServer(mux)
|
|
||||||
defer srv.Close()
|
|
||||||
|
|
||||||
// The script is:
|
|
||||||
// MKCOL /a
|
|
||||||
// MKCOL /a/b
|
|
||||||
// PUT /a/b/c
|
|
||||||
// COPY /a/b/c /a/b/d
|
|
||||||
// MKCOL /a/b/e
|
|
||||||
// MOVE /a/b/d /a/b/e/f
|
|
||||||
// LOCK /a/b/e/g
|
|
||||||
// PUT /a/b/e/g
|
|
||||||
// which should yield the (possibly stripped) filenames /a/b/c,
|
|
||||||
// /a/b/e/f and /a/b/e/g, plus their parent directories.
|
|
||||||
|
|
||||||
wantA := map[string]int{
|
|
||||||
"/": http.StatusCreated,
|
|
||||||
"/a/": http.StatusMovedPermanently,
|
|
||||||
"/a/b/": http.StatusNotFound,
|
|
||||||
"/a/b/c/": http.StatusNotFound,
|
|
||||||
}[prefix]
|
|
||||||
if _, err := do("MKCOL", srv.URL+"/a", "", wantA); err != nil {
|
|
||||||
t.Errorf("prefix=%-9q MKCOL /a: %v", prefix, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
wantB := map[string]int{
|
|
||||||
"/": http.StatusCreated,
|
|
||||||
"/a/": http.StatusCreated,
|
|
||||||
"/a/b/": http.StatusMovedPermanently,
|
|
||||||
"/a/b/c/": http.StatusNotFound,
|
|
||||||
}[prefix]
|
|
||||||
if _, err := do("MKCOL", srv.URL+"/a/b", "", wantB); err != nil {
|
|
||||||
t.Errorf("prefix=%-9q MKCOL /a/b: %v", prefix, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
wantC := map[string]int{
|
|
||||||
"/": http.StatusCreated,
|
|
||||||
"/a/": http.StatusCreated,
|
|
||||||
"/a/b/": http.StatusCreated,
|
|
||||||
"/a/b/c/": http.StatusMovedPermanently,
|
|
||||||
}[prefix]
|
|
||||||
if _, err := do("PUT", srv.URL+"/a/b/c", blah, wantC); err != nil {
|
|
||||||
t.Errorf("prefix=%-9q PUT /a/b/c: %v", prefix, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
wantD := map[string]int{
|
|
||||||
"/": http.StatusCreated,
|
|
||||||
"/a/": http.StatusCreated,
|
|
||||||
"/a/b/": http.StatusCreated,
|
|
||||||
"/a/b/c/": http.StatusMovedPermanently,
|
|
||||||
}[prefix]
|
|
||||||
if _, err := do("COPY", srv.URL+"/a/b/c", "", wantD, dst, srv.URL+"/a/b/d"); err != nil {
|
|
||||||
t.Errorf("prefix=%-9q COPY /a/b/c /a/b/d: %v", prefix, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
wantE := map[string]int{
|
|
||||||
"/": http.StatusCreated,
|
|
||||||
"/a/": http.StatusCreated,
|
|
||||||
"/a/b/": http.StatusCreated,
|
|
||||||
"/a/b/c/": http.StatusNotFound,
|
|
||||||
}[prefix]
|
|
||||||
if _, err := do("MKCOL", srv.URL+"/a/b/e", "", wantE); err != nil {
|
|
||||||
t.Errorf("prefix=%-9q MKCOL /a/b/e: %v", prefix, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
wantF := map[string]int{
|
|
||||||
"/": http.StatusCreated,
|
|
||||||
"/a/": http.StatusCreated,
|
|
||||||
"/a/b/": http.StatusCreated,
|
|
||||||
"/a/b/c/": http.StatusNotFound,
|
|
||||||
}[prefix]
|
|
||||||
if _, err := do("MOVE", srv.URL+"/a/b/d", "", wantF, dst, srv.URL+"/a/b/e/f"); err != nil {
|
|
||||||
t.Errorf("prefix=%-9q MOVE /a/b/d /a/b/e/f: %v", prefix, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
var lockToken string
|
|
||||||
wantG := map[string]int{
|
|
||||||
"/": http.StatusCreated,
|
|
||||||
"/a/": http.StatusCreated,
|
|
||||||
"/a/b/": http.StatusCreated,
|
|
||||||
"/a/b/c/": http.StatusNotFound,
|
|
||||||
}[prefix]
|
|
||||||
if h, err := do("LOCK", srv.URL+"/a/b/e/g", createLockBody, wantG); err != nil {
|
|
||||||
t.Errorf("prefix=%-9q LOCK /a/b/e/g: %v", prefix, err)
|
|
||||||
continue
|
|
||||||
} else {
|
|
||||||
lockToken = h.Get("Lock-Token")
|
|
||||||
}
|
|
||||||
|
|
||||||
ifHeader := fmt.Sprintf("<%s/a/b/e/g> (%s)", srv.URL, lockToken)
|
|
||||||
wantH := map[string]int{
|
|
||||||
"/": http.StatusCreated,
|
|
||||||
"/a/": http.StatusCreated,
|
|
||||||
"/a/b/": http.StatusCreated,
|
|
||||||
"/a/b/c/": http.StatusNotFound,
|
|
||||||
}[prefix]
|
|
||||||
if _, err := do("PUT", srv.URL+"/a/b/e/g", blah, wantH, "If", ifHeader); err != nil {
|
|
||||||
t.Errorf("prefix=%-9q PUT /a/b/e/g: %v", prefix, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
got, err := find(ctx, nil, fs, "/")
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("prefix=%-9q find: %v", prefix, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
sort.Strings(got)
|
|
||||||
want := map[string][]string{
|
|
||||||
"/": {"/", "/a", "/a/b", "/a/b/c", "/a/b/e", "/a/b/e/f", "/a/b/e/g"},
|
|
||||||
"/a/": {"/", "/b", "/b/c", "/b/e", "/b/e/f", "/b/e/g"},
|
|
||||||
"/a/b/": {"/", "/c", "/e", "/e/f", "/e/g"},
|
|
||||||
"/a/b/c/": {"/"},
|
|
||||||
}[prefix]
|
|
||||||
if !reflect.DeepEqual(got, want) {
|
|
||||||
t.Errorf("prefix=%-9q find:\ngot %v\nwant %v", prefix, got, want)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestEscapeXML(t *testing.T) {
|
|
||||||
// These test cases aren't exhaustive, and there is more than one way to
|
|
||||||
// escape e.g. a quot (as """ or """) or an apos. We presume that
|
|
||||||
// the encoding/xml package tests xml.EscapeText more thoroughly. This test
|
|
||||||
// here is just a sanity check for this package's EscapeXML function, and
|
|
||||||
// its attempt to provide a fast path (and avoid a bytes.Buffer allocation)
|
|
||||||
// when escaping filenames is obviously a no-op.
|
|
||||||
testCases := map[string]string{
|
|
||||||
"": "",
|
|
||||||
" ": " ",
|
|
||||||
"&": "&",
|
|
||||||
"*": "*",
|
|
||||||
"+": "+",
|
|
||||||
",": ",",
|
|
||||||
"-": "-",
|
|
||||||
".": ".",
|
|
||||||
"/": "/",
|
|
||||||
"0": "0",
|
|
||||||
"9": "9",
|
|
||||||
":": ":",
|
|
||||||
"<": "<",
|
|
||||||
">": ">",
|
|
||||||
"A": "A",
|
|
||||||
"_": "_",
|
|
||||||
"a": "a",
|
|
||||||
"~": "~",
|
|
||||||
"\u0201": "\u0201",
|
|
||||||
"&": "&amp;",
|
|
||||||
"foo&<b/ar>baz": "foo&<b/ar>baz",
|
|
||||||
}
|
|
||||||
|
|
||||||
for in, want := range testCases {
|
|
||||||
if got := EscapeXML(in); got != want {
|
|
||||||
t.Errorf("in=%q: got %q, want %q", in, got, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestFilenameEscape(t *testing.T) {
|
|
||||||
hrefRe := regexp.MustCompile(`<D:href>([^<]*)</D:href>`)
|
|
||||||
displayNameRe := regexp.MustCompile(`<D:displayname>([^<]*)</D:displayname>`)
|
|
||||||
do := func(method, urlStr string) (string, string, error) {
|
|
||||||
req, err := http.NewRequest(method, urlStr, nil)
|
|
||||||
if err != nil {
|
|
||||||
return "", "", err
|
|
||||||
}
|
|
||||||
res, err := http.DefaultClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return "", "", err
|
|
||||||
}
|
|
||||||
defer res.Body.Close()
|
|
||||||
|
|
||||||
b, err := ioutil.ReadAll(res.Body)
|
|
||||||
if err != nil {
|
|
||||||
return "", "", err
|
|
||||||
}
|
|
||||||
hrefMatch := hrefRe.FindStringSubmatch(string(b))
|
|
||||||
if len(hrefMatch) != 2 {
|
|
||||||
return "", "", errors.New("D:href not found")
|
|
||||||
}
|
|
||||||
displayNameMatch := displayNameRe.FindStringSubmatch(string(b))
|
|
||||||
if len(displayNameMatch) != 2 {
|
|
||||||
return "", "", errors.New("D:displayname not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
return hrefMatch[1], displayNameMatch[1], nil
|
|
||||||
}
|
|
||||||
|
|
||||||
testCases := []struct {
|
|
||||||
name, wantHref, wantDisplayName string
|
|
||||||
}{{
|
|
||||||
name: `/foo%bar`,
|
|
||||||
wantHref: `/foo%25bar`,
|
|
||||||
wantDisplayName: `foo%bar`,
|
|
||||||
}, {
|
|
||||||
name: `/こんにちわ世界`,
|
|
||||||
wantHref: `/%E3%81%93%E3%82%93%E3%81%AB%E3%81%A1%E3%82%8F%E4%B8%96%E7%95%8C`,
|
|
||||||
wantDisplayName: `こんにちわ世界`,
|
|
||||||
}, {
|
|
||||||
name: `/Program Files/`,
|
|
||||||
wantHref: `/Program%20Files/`,
|
|
||||||
wantDisplayName: `Program Files`,
|
|
||||||
}, {
|
|
||||||
name: `/go+lang`,
|
|
||||||
wantHref: `/go+lang`,
|
|
||||||
wantDisplayName: `go+lang`,
|
|
||||||
}, {
|
|
||||||
name: `/go&lang`,
|
|
||||||
wantHref: `/go&lang`,
|
|
||||||
wantDisplayName: `go&lang`,
|
|
||||||
}, {
|
|
||||||
name: `/go<lang`,
|
|
||||||
wantHref: `/go%3Clang`,
|
|
||||||
wantDisplayName: `go<lang`,
|
|
||||||
}}
|
|
||||||
ctx := context.Background()
|
|
||||||
fs := NewMemFS()
|
|
||||||
for _, tc := range testCases {
|
|
||||||
if strings.HasSuffix(tc.name, "/") {
|
|
||||||
if err := fs.Mkdir(ctx, tc.name, 0755); err != nil {
|
|
||||||
t.Fatalf("name=%q: Mkdir: %v", tc.name, err)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
f, err := fs.OpenFile(ctx, tc.name, os.O_CREATE, 0644)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("name=%q: OpenFile: %v", tc.name, err)
|
|
||||||
}
|
|
||||||
f.Close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
srv := httptest.NewServer(&Handler{
|
|
||||||
FileSystem: fs,
|
|
||||||
LockSystem: NewMemLS(),
|
|
||||||
})
|
|
||||||
defer srv.Close()
|
|
||||||
|
|
||||||
u, err := url.Parse(srv.URL)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tc := range testCases {
|
|
||||||
u.Path = tc.name
|
|
||||||
gotHref, gotDisplayName, err := do("PROPFIND", u.String())
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("name=%q: PROPFIND: %v", tc.name, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if gotHref != tc.wantHref {
|
|
||||||
t.Errorf("name=%q: got href %q, want %q", tc.name, gotHref, tc.wantHref)
|
|
||||||
}
|
|
||||||
if gotDisplayName != tc.wantDisplayName {
|
|
||||||
t.Errorf("name=%q: got dispayname %q, want %q", tc.name, gotDisplayName, tc.wantDisplayName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -10,6 +10,7 @@ package dav
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/xml"
|
"encoding/xml"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
@ -35,6 +36,56 @@ import (
|
|||||||
ixml "tank/rest/dav/internal/xml"
|
ixml "tank/rest/dav/internal/xml"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#status.code.extensions.to.http11
|
||||||
|
const (
|
||||||
|
StatusMulti = 207
|
||||||
|
StatusUnprocessableEntity = 422
|
||||||
|
StatusLocked = 423
|
||||||
|
StatusFailedDependency = 424
|
||||||
|
StatusInsufficientStorage = 507
|
||||||
|
)
|
||||||
|
|
||||||
|
func StatusText(code int) string {
|
||||||
|
switch code {
|
||||||
|
case StatusMulti:
|
||||||
|
return "Multi-Status"
|
||||||
|
case StatusUnprocessableEntity:
|
||||||
|
return "Unprocessable Entity"
|
||||||
|
case StatusLocked:
|
||||||
|
return "Locked"
|
||||||
|
case StatusFailedDependency:
|
||||||
|
return "Failed Dependency"
|
||||||
|
case StatusInsufficientStorage:
|
||||||
|
return "Insufficient Storage"
|
||||||
|
}
|
||||||
|
return http.StatusText(code)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
errDestinationEqualsSource = errors.New("webdav: destination equals source")
|
||||||
|
errDirectoryNotEmpty = errors.New("webdav: directory not empty")
|
||||||
|
errInvalidDepth = errors.New("webdav: invalid depth")
|
||||||
|
errInvalidDestination = errors.New("webdav: invalid destination")
|
||||||
|
errInvalidIfHeader = errors.New("webdav: invalid If header")
|
||||||
|
errInvalidLockInfo = errors.New("webdav: invalid lock info")
|
||||||
|
errInvalidLockToken = errors.New("webdav: invalid lock token")
|
||||||
|
errInvalidPropfind = errors.New("webdav: invalid propfind")
|
||||||
|
errInvalidProppatch = errors.New("webdav: invalid proppatch")
|
||||||
|
errInvalidResponse = errors.New("webdav: invalid response")
|
||||||
|
errInvalidTimeout = errors.New("webdav: invalid timeout")
|
||||||
|
errNoFileSystem = errors.New("webdav: no file system")
|
||||||
|
errNoLockSystem = errors.New("webdav: no lock system")
|
||||||
|
errNotADirectory = errors.New("webdav: not a directory")
|
||||||
|
errPrefixMismatch = errors.New("webdav: prefix mismatch")
|
||||||
|
errRecursionTooDeep = errors.New("webdav: recursion too deep")
|
||||||
|
errUnsupportedLockInfo = errors.New("webdav: unsupported lock info")
|
||||||
|
errUnsupportedMethod = errors.New("webdav: unsupported method")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_lockinfo
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_lockinfo
|
||||||
type LockInfo struct {
|
type LockInfo struct {
|
||||||
XMLName ixml.Name `xml:"lockinfo"`
|
XMLName ixml.Name `xml:"lockinfo"`
|
||||||
@ -49,26 +100,6 @@ type Owner struct {
|
|||||||
InnerXML string `xml:",innerxml"`
|
InnerXML string `xml:",innerxml"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func ReadLockInfo(r io.Reader) (li LockInfo, status int, err error) {
|
|
||||||
c := &CountingReader{reader: r}
|
|
||||||
if err = ixml.NewDecoder(c).Decode(&li); err != nil {
|
|
||||||
if err == io.EOF {
|
|
||||||
if c.n == 0 {
|
|
||||||
// An empty body means to refresh the lock.
|
|
||||||
// http://www.webdav.org/specs/rfc4918.html#refreshing-locks
|
|
||||||
return LockInfo{}, 0, nil
|
|
||||||
}
|
|
||||||
err = errInvalidLockInfo
|
|
||||||
}
|
|
||||||
return LockInfo{}, http.StatusBadRequest, err
|
|
||||||
}
|
|
||||||
// We only support exclusive (non-shared) write locks. In practice, these are
|
|
||||||
// the only types of locks that seem to matter.
|
|
||||||
if li.Exclusive == nil || li.Shared != nil || li.Write == nil {
|
|
||||||
return LockInfo{}, http.StatusNotImplemented, errUnsupportedLockInfo
|
|
||||||
}
|
|
||||||
return li, 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
//这是一个带字节计数器的Reader,可以知道总共读取了多少个字节。
|
//这是一个带字节计数器的Reader,可以知道总共读取了多少个字节。
|
||||||
type CountingReader struct {
|
type CountingReader struct {
|
||||||
|
@ -1,906 +0,0 @@
|
|||||||
// Copyright 2014 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package dav
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/xml"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"net/http/httptest"
|
|
||||||
"reflect"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
ixml "tank/rest/dav/internal/xml"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestReadLockInfo(t *testing.T) {
|
|
||||||
// The "section x.y.z" test cases come from section x.y.z of the spec at
|
|
||||||
// http://www.webdav.org/specs/rfc4918.html
|
|
||||||
testCases := []struct {
|
|
||||||
desc string
|
|
||||||
input string
|
|
||||||
wantLI LockInfo
|
|
||||||
wantStatus int
|
|
||||||
}{{
|
|
||||||
"bad: junk",
|
|
||||||
"xxx",
|
|
||||||
LockInfo{},
|
|
||||||
http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
"bad: invalid owner XML",
|
|
||||||
"" +
|
|
||||||
"<D:lockinfo xmlns:D='DAV:'>\n" +
|
|
||||||
" <D:lockscope><D:exclusive/></D:lockscope>\n" +
|
|
||||||
" <D:locktype><D:write/></D:locktype>\n" +
|
|
||||||
" <D:owner>\n" +
|
|
||||||
" <D:href> no end tag \n" +
|
|
||||||
" </D:owner>\n" +
|
|
||||||
"</D:lockinfo>",
|
|
||||||
LockInfo{},
|
|
||||||
http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
"bad: invalid UTF-8",
|
|
||||||
"" +
|
|
||||||
"<D:lockinfo xmlns:D='DAV:'>\n" +
|
|
||||||
" <D:lockscope><D:exclusive/></D:lockscope>\n" +
|
|
||||||
" <D:locktype><D:write/></D:locktype>\n" +
|
|
||||||
" <D:owner>\n" +
|
|
||||||
" <D:href> \xff </D:href>\n" +
|
|
||||||
" </D:owner>\n" +
|
|
||||||
"</D:lockinfo>",
|
|
||||||
LockInfo{},
|
|
||||||
http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
"bad: unfinished XML #1",
|
|
||||||
"" +
|
|
||||||
"<D:lockinfo xmlns:D='DAV:'>\n" +
|
|
||||||
" <D:lockscope><D:exclusive/></D:lockscope>\n" +
|
|
||||||
" <D:locktype><D:write/></D:locktype>\n",
|
|
||||||
LockInfo{},
|
|
||||||
http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
"bad: unfinished XML #2",
|
|
||||||
"" +
|
|
||||||
"<D:lockinfo xmlns:D='DAV:'>\n" +
|
|
||||||
" <D:lockscope><D:exclusive/></D:lockscope>\n" +
|
|
||||||
" <D:locktype><D:write/></D:locktype>\n" +
|
|
||||||
" <D:owner>\n",
|
|
||||||
LockInfo{},
|
|
||||||
http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
"good: empty",
|
|
||||||
"",
|
|
||||||
LockInfo{},
|
|
||||||
0,
|
|
||||||
}, {
|
|
||||||
"good: plain-text owner",
|
|
||||||
"" +
|
|
||||||
"<D:lockinfo xmlns:D='DAV:'>\n" +
|
|
||||||
" <D:lockscope><D:exclusive/></D:lockscope>\n" +
|
|
||||||
" <D:locktype><D:write/></D:locktype>\n" +
|
|
||||||
" <D:owner>gopher</D:owner>\n" +
|
|
||||||
"</D:lockinfo>",
|
|
||||||
LockInfo{
|
|
||||||
XMLName: ixml.Name{Space: "DAV:", Local: "lockinfo"},
|
|
||||||
Exclusive: new(struct{}),
|
|
||||||
Write: new(struct{}),
|
|
||||||
Owner: Owner{
|
|
||||||
InnerXML: "gopher",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
0,
|
|
||||||
}, {
|
|
||||||
"section 9.10.7",
|
|
||||||
"" +
|
|
||||||
"<D:lockinfo xmlns:D='DAV:'>\n" +
|
|
||||||
" <D:lockscope><D:exclusive/></D:lockscope>\n" +
|
|
||||||
" <D:locktype><D:write/></D:locktype>\n" +
|
|
||||||
" <D:owner>\n" +
|
|
||||||
" <D:href>http://example.org/~ejw/contact.html</D:href>\n" +
|
|
||||||
" </D:owner>\n" +
|
|
||||||
"</D:lockinfo>",
|
|
||||||
LockInfo{
|
|
||||||
XMLName: ixml.Name{Space: "DAV:", Local: "lockinfo"},
|
|
||||||
Exclusive: new(struct{}),
|
|
||||||
Write: new(struct{}),
|
|
||||||
Owner: Owner{
|
|
||||||
InnerXML: "\n <D:href>http://example.org/~ejw/contact.html</D:href>\n ",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
0,
|
|
||||||
}}
|
|
||||||
|
|
||||||
for _, tc := range testCases {
|
|
||||||
li, status, err := ReadLockInfo(strings.NewReader(tc.input))
|
|
||||||
if tc.wantStatus != 0 {
|
|
||||||
if err == nil {
|
|
||||||
t.Errorf("%s: got nil error, want non-nil", tc.desc)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
} else if err != nil {
|
|
||||||
t.Errorf("%s: %v", tc.desc, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(li, tc.wantLI) || status != tc.wantStatus {
|
|
||||||
t.Errorf("%s:\ngot lockInfo=%v, status=%v\nwant lockInfo=%v, status=%v",
|
|
||||||
tc.desc, li, status, tc.wantLI, tc.wantStatus)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestReadPropfind(t *testing.T) {
|
|
||||||
testCases := []struct {
|
|
||||||
desc string
|
|
||||||
input string
|
|
||||||
wantPF Propfind
|
|
||||||
wantStatus int
|
|
||||||
}{{
|
|
||||||
desc: "propfind: propname",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:propname/>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantPF: Propfind{
|
|
||||||
XMLName: ixml.Name{Space: "DAV:", Local: "propfind"},
|
|
||||||
Propname: new(struct{}),
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
desc: "propfind: empty body means allprop",
|
|
||||||
input: "",
|
|
||||||
wantPF: Propfind{
|
|
||||||
Allprop: new(struct{}),
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
desc: "propfind: allprop",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:allprop/>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantPF: Propfind{
|
|
||||||
XMLName: ixml.Name{Space: "DAV:", Local: "propfind"},
|
|
||||||
Allprop: new(struct{}),
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
desc: "propfind: allprop followed by include",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:allprop/>\n" +
|
|
||||||
" <A:include><A:displayname/></A:include>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantPF: Propfind{
|
|
||||||
XMLName: ixml.Name{Space: "DAV:", Local: "propfind"},
|
|
||||||
Allprop: new(struct{}),
|
|
||||||
Include: PropfindProps{xml.Name{Space: "DAV:", Local: "displayname"}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
desc: "propfind: include followed by allprop",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:include><A:displayname/></A:include>\n" +
|
|
||||||
" <A:allprop/>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantPF: Propfind{
|
|
||||||
XMLName: ixml.Name{Space: "DAV:", Local: "propfind"},
|
|
||||||
Allprop: new(struct{}),
|
|
||||||
Include: PropfindProps{xml.Name{Space: "DAV:", Local: "displayname"}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
desc: "propfind: propfind",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:prop><A:displayname/></A:prop>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantPF: Propfind{
|
|
||||||
XMLName: ixml.Name{Space: "DAV:", Local: "propfind"},
|
|
||||||
Prop: PropfindProps{xml.Name{Space: "DAV:", Local: "displayname"}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
desc: "propfind: prop with ignored comments",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:prop>\n" +
|
|
||||||
" <!-- ignore -->\n" +
|
|
||||||
" <A:displayname><!-- ignore --></A:displayname>\n" +
|
|
||||||
" </A:prop>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantPF: Propfind{
|
|
||||||
XMLName: ixml.Name{Space: "DAV:", Local: "propfind"},
|
|
||||||
Prop: PropfindProps{xml.Name{Space: "DAV:", Local: "displayname"}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
desc: "propfind: propfind with ignored whitespace",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:prop> <A:displayname/></A:prop>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantPF: Propfind{
|
|
||||||
XMLName: ixml.Name{Space: "DAV:", Local: "propfind"},
|
|
||||||
Prop: PropfindProps{xml.Name{Space: "DAV:", Local: "displayname"}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
desc: "propfind: propfind with ignored mixed-content",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:prop>foo<A:displayname/>bar</A:prop>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantPF: Propfind{
|
|
||||||
XMLName: ixml.Name{Space: "DAV:", Local: "propfind"},
|
|
||||||
Prop: PropfindProps{xml.Name{Space: "DAV:", Local: "displayname"}},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
desc: "propfind: propname with ignored element (section A.4)",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:propname/>\n" +
|
|
||||||
" <E:leave-out xmlns:E='E:'>*boss*</E:leave-out>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantPF: Propfind{
|
|
||||||
XMLName: ixml.Name{Space: "DAV:", Local: "propfind"},
|
|
||||||
Propname: new(struct{}),
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
desc: "propfind: bad: junk",
|
|
||||||
input: "xxx",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "propfind: bad: propname and allprop (section A.3)",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:propname/>" +
|
|
||||||
" <A:allprop/>" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "propfind: bad: propname and prop",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:prop><A:displayname/></A:prop>\n" +
|
|
||||||
" <A:propname/>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "propfind: bad: allprop and prop",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:allprop/>\n" +
|
|
||||||
" <A:prop><A:foo/><A:/prop>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "propfind: bad: empty propfind with ignored element (section A.4)",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <E:expired-props/>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "propfind: bad: empty prop",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:prop/>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "propfind: bad: prop with just chardata",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:prop>foo</A:prop>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "bad: interrupted prop",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:prop><A:foo></A:prop>\n",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "bad: malformed end element prop",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:prop><A:foo/></A:bar></A:prop>\n",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "propfind: bad: property with chardata value",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:prop><A:foo>bar</A:foo></A:prop>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "propfind: bad: property with whitespace value",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:prop><A:foo> </A:foo></A:prop>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "propfind: bad: include without allprop",
|
|
||||||
input: "" +
|
|
||||||
"<A:propfind xmlns:A='DAV:'>\n" +
|
|
||||||
" <A:include><A:foo/></A:include>\n" +
|
|
||||||
"</A:propfind>",
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}}
|
|
||||||
|
|
||||||
for _, tc := range testCases {
|
|
||||||
pf, status, err := ReadPropfind(strings.NewReader(tc.input))
|
|
||||||
if tc.wantStatus != 0 {
|
|
||||||
if err == nil {
|
|
||||||
t.Errorf("%s: got nil error, want non-nil", tc.desc)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
} else if err != nil {
|
|
||||||
t.Errorf("%s: %v", tc.desc, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(pf, tc.wantPF) || status != tc.wantStatus {
|
|
||||||
t.Errorf("%s:\ngot propfind=%v, status=%v\nwant propfind=%v, status=%v",
|
|
||||||
tc.desc, pf, status, tc.wantPF, tc.wantStatus)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMultistatusWriter(t *testing.T) {
|
|
||||||
///The "section x.y.z" test cases come from section x.y.z of the spec at
|
|
||||||
// http://www.webdav.org/specs/rfc4918.html
|
|
||||||
testCases := []struct {
|
|
||||||
desc string
|
|
||||||
responses []Response
|
|
||||||
respdesc string
|
|
||||||
writeHeader bool
|
|
||||||
wantXML string
|
|
||||||
wantCode int
|
|
||||||
wantErr error
|
|
||||||
}{{
|
|
||||||
desc: "section 9.2.2 (failed dependency)",
|
|
||||||
responses: []Response{{
|
|
||||||
Href: []string{"http://example.com/foo"},
|
|
||||||
Propstat: []SubPropstat{{
|
|
||||||
Prop: []Property{{
|
|
||||||
XMLName: xml.Name{
|
|
||||||
Space: "http://ns.example.com/",
|
|
||||||
Local: "Authors",
|
|
||||||
},
|
|
||||||
}},
|
|
||||||
Status: "HTTP/1.1 424 Failed Dependency",
|
|
||||||
}, {
|
|
||||||
Prop: []Property{{
|
|
||||||
XMLName: xml.Name{
|
|
||||||
Space: "http://ns.example.com/",
|
|
||||||
Local: "Copyright-Owner",
|
|
||||||
},
|
|
||||||
}},
|
|
||||||
Status: "HTTP/1.1 409 Conflict",
|
|
||||||
}},
|
|
||||||
ResponseDescription: "Copyright Owner cannot be deleted or altered.",
|
|
||||||
}},
|
|
||||||
wantXML: `` +
|
|
||||||
`<?xml version="1.0" encoding="UTF-8"?>` +
|
|
||||||
`<multistatus xmlns="DAV:">` +
|
|
||||||
` <response>` +
|
|
||||||
` <href>http://example.com/foo</href>` +
|
|
||||||
` <propstat>` +
|
|
||||||
` <prop>` +
|
|
||||||
` <Authors xmlns="http://ns.example.com/"></Authors>` +
|
|
||||||
` </prop>` +
|
|
||||||
` <status>HTTP/1.1 424 Failed Dependency</status>` +
|
|
||||||
` </propstat>` +
|
|
||||||
` <propstat xmlns="DAV:">` +
|
|
||||||
` <prop>` +
|
|
||||||
` <Copyright-Owner xmlns="http://ns.example.com/"></Copyright-Owner>` +
|
|
||||||
` </prop>` +
|
|
||||||
` <status>HTTP/1.1 409 Conflict</status>` +
|
|
||||||
` </propstat>` +
|
|
||||||
` <responsedescription>Copyright Owner cannot be deleted or altered.</responsedescription>` +
|
|
||||||
`</response>` +
|
|
||||||
`</multistatus>`,
|
|
||||||
wantCode: StatusMulti,
|
|
||||||
}, {
|
|
||||||
desc: "section 9.6.2 (lock-token-submitted)",
|
|
||||||
responses: []Response{{
|
|
||||||
Href: []string{"http://example.com/foo"},
|
|
||||||
Status: "HTTP/1.1 423 Locked",
|
|
||||||
Error: &XmlError{
|
|
||||||
InnerXML: []byte(`<lock-token-submitted xmlns="DAV:"/>`),
|
|
||||||
},
|
|
||||||
}},
|
|
||||||
wantXML: `` +
|
|
||||||
`<?xml version="1.0" encoding="UTF-8"?>` +
|
|
||||||
`<multistatus xmlns="DAV:">` +
|
|
||||||
` <response>` +
|
|
||||||
` <href>http://example.com/foo</href>` +
|
|
||||||
` <status>HTTP/1.1 423 Locked</status>` +
|
|
||||||
` <error><lock-token-submitted xmlns="DAV:"/></error>` +
|
|
||||||
` </response>` +
|
|
||||||
`</multistatus>`,
|
|
||||||
wantCode: StatusMulti,
|
|
||||||
}, {
|
|
||||||
desc: "section 9.1.3",
|
|
||||||
responses: []Response{{
|
|
||||||
Href: []string{"http://example.com/foo"},
|
|
||||||
Propstat: []SubPropstat{{
|
|
||||||
Prop: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "http://ns.example.com/boxschema/", Local: "bigbox"},
|
|
||||||
InnerXML: []byte(`` +
|
|
||||||
`<BoxType xmlns="http://ns.example.com/boxschema/">` +
|
|
||||||
`Box type A` +
|
|
||||||
`</BoxType>`),
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "http://ns.example.com/boxschema/", Local: "author"},
|
|
||||||
InnerXML: []byte(`` +
|
|
||||||
`<Name xmlns="http://ns.example.com/boxschema/">` +
|
|
||||||
`J.J. Johnson` +
|
|
||||||
`</Name>`),
|
|
||||||
}},
|
|
||||||
Status: "HTTP/1.1 200 OK",
|
|
||||||
}, {
|
|
||||||
Prop: []Property{{
|
|
||||||
XMLName: xml.Name{Space: "http://ns.example.com/boxschema/", Local: "DingALing"},
|
|
||||||
}, {
|
|
||||||
XMLName: xml.Name{Space: "http://ns.example.com/boxschema/", Local: "Random"},
|
|
||||||
}},
|
|
||||||
Status: "HTTP/1.1 403 Forbidden",
|
|
||||||
ResponseDescription: "The user does not have access to the DingALing property.",
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
respdesc: "There has been an access violation error.",
|
|
||||||
wantXML: `` +
|
|
||||||
`<?xml version="1.0" encoding="UTF-8"?>` +
|
|
||||||
`<multistatus xmlns="DAV:" xmlns:B="http://ns.example.com/boxschema/">` +
|
|
||||||
` <response>` +
|
|
||||||
` <href>http://example.com/foo</href>` +
|
|
||||||
` <propstat>` +
|
|
||||||
` <prop>` +
|
|
||||||
` <B:bigbox><B:BoxType>Box type A</B:BoxType></B:bigbox>` +
|
|
||||||
` <B:author><B:Name>J.J. Johnson</B:Name></B:author>` +
|
|
||||||
` </prop>` +
|
|
||||||
` <status>HTTP/1.1 200 OK</status>` +
|
|
||||||
` </propstat>` +
|
|
||||||
` <propstat>` +
|
|
||||||
` <prop>` +
|
|
||||||
` <B:DingALing/>` +
|
|
||||||
` <B:Random/>` +
|
|
||||||
` </prop>` +
|
|
||||||
` <status>HTTP/1.1 403 Forbidden</status>` +
|
|
||||||
` <responsedescription>The user does not have access to the DingALing property.</responsedescription>` +
|
|
||||||
` </propstat>` +
|
|
||||||
` </response>` +
|
|
||||||
` <responsedescription>There has been an access violation error.</responsedescription>` +
|
|
||||||
`</multistatus>`,
|
|
||||||
wantCode: StatusMulti,
|
|
||||||
}, {
|
|
||||||
desc: "no response written",
|
|
||||||
// default of http.responseWriter
|
|
||||||
wantCode: http.StatusOK,
|
|
||||||
}, {
|
|
||||||
desc: "no response written (with description)",
|
|
||||||
respdesc: "too bad",
|
|
||||||
// default of http.responseWriter
|
|
||||||
wantCode: http.StatusOK,
|
|
||||||
}, {
|
|
||||||
desc: "empty multistatus with header",
|
|
||||||
writeHeader: true,
|
|
||||||
wantXML: `<multistatus xmlns="DAV:"></multistatus>`,
|
|
||||||
wantCode: StatusMulti,
|
|
||||||
}, {
|
|
||||||
desc: "bad: no href",
|
|
||||||
responses: []Response{{
|
|
||||||
Propstat: []SubPropstat{{
|
|
||||||
Prop: []Property{{
|
|
||||||
XMLName: xml.Name{
|
|
||||||
Space: "http://example.com/",
|
|
||||||
Local: "foo",
|
|
||||||
},
|
|
||||||
}},
|
|
||||||
Status: "HTTP/1.1 200 OK",
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
wantErr: errInvalidResponse,
|
|
||||||
// default of http.responseWriter
|
|
||||||
wantCode: http.StatusOK,
|
|
||||||
}, {
|
|
||||||
desc: "bad: multiple hrefs and no status",
|
|
||||||
responses: []Response{{
|
|
||||||
Href: []string{"http://example.com/foo", "http://example.com/bar"},
|
|
||||||
}},
|
|
||||||
wantErr: errInvalidResponse,
|
|
||||||
// default of http.responseWriter
|
|
||||||
wantCode: http.StatusOK,
|
|
||||||
}, {
|
|
||||||
desc: "bad: one href and no propstat",
|
|
||||||
responses: []Response{{
|
|
||||||
Href: []string{"http://example.com/foo"},
|
|
||||||
}},
|
|
||||||
wantErr: errInvalidResponse,
|
|
||||||
// default of http.responseWriter
|
|
||||||
wantCode: http.StatusOK,
|
|
||||||
}, {
|
|
||||||
desc: "bad: status with one href and propstat",
|
|
||||||
responses: []Response{{
|
|
||||||
Href: []string{"http://example.com/foo"},
|
|
||||||
Propstat: []SubPropstat{{
|
|
||||||
Prop: []Property{{
|
|
||||||
XMLName: xml.Name{
|
|
||||||
Space: "http://example.com/",
|
|
||||||
Local: "foo",
|
|
||||||
},
|
|
||||||
}},
|
|
||||||
Status: "HTTP/1.1 200 OK",
|
|
||||||
}},
|
|
||||||
Status: "HTTP/1.1 200 OK",
|
|
||||||
}},
|
|
||||||
wantErr: errInvalidResponse,
|
|
||||||
// default of http.responseWriter
|
|
||||||
wantCode: http.StatusOK,
|
|
||||||
}, {
|
|
||||||
desc: "bad: multiple hrefs and propstat",
|
|
||||||
responses: []Response{{
|
|
||||||
Href: []string{
|
|
||||||
"http://example.com/foo",
|
|
||||||
"http://example.com/bar",
|
|
||||||
},
|
|
||||||
Propstat: []SubPropstat{{
|
|
||||||
Prop: []Property{{
|
|
||||||
XMLName: xml.Name{
|
|
||||||
Space: "http://example.com/",
|
|
||||||
Local: "foo",
|
|
||||||
},
|
|
||||||
}},
|
|
||||||
Status: "HTTP/1.1 200 OK",
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
wantErr: errInvalidResponse,
|
|
||||||
// default of http.responseWriter
|
|
||||||
wantCode: http.StatusOK,
|
|
||||||
}}
|
|
||||||
|
|
||||||
n := xmlNormalizer{omitWhitespace: true}
|
|
||||||
loop:
|
|
||||||
for _, tc := range testCases {
|
|
||||||
rec := httptest.NewRecorder()
|
|
||||||
w := MultiStatusWriter{Writer: rec, ResponseDescription: tc.respdesc}
|
|
||||||
if tc.writeHeader {
|
|
||||||
if err := w.writeHeader(); err != nil {
|
|
||||||
t.Errorf("%s: got writeHeader error %v, want nil", tc.desc, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, r := range tc.responses {
|
|
||||||
if err := w.Write(&r); err != nil {
|
|
||||||
if err != tc.wantErr {
|
|
||||||
t.Errorf("%s: got write error %v, want %v",
|
|
||||||
tc.desc, err, tc.wantErr)
|
|
||||||
}
|
|
||||||
continue loop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err := w.Close(); err != tc.wantErr {
|
|
||||||
t.Errorf("%s: got close error %v, want %v",
|
|
||||||
tc.desc, err, tc.wantErr)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if rec.Code != tc.wantCode {
|
|
||||||
t.Errorf("%s: got HTTP status code %d, want %d\n",
|
|
||||||
tc.desc, rec.Code, tc.wantCode)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
gotXML := rec.Body.String()
|
|
||||||
eq, err := n.equalXML(strings.NewReader(gotXML), strings.NewReader(tc.wantXML))
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("%s: equalXML: %v", tc.desc, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !eq {
|
|
||||||
t.Errorf("%s: XML body\ngot %s\nwant %s", tc.desc, gotXML, tc.wantXML)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestReadProppatch(t *testing.T) {
|
|
||||||
ppStr := func(pps []Proppatch) string {
|
|
||||||
var outer []string
|
|
||||||
for _, pp := range pps {
|
|
||||||
var inner []string
|
|
||||||
for _, p := range pp.Props {
|
|
||||||
inner = append(inner, fmt.Sprintf("{XMLName: %q, Lang: %q, InnerXML: %q}",
|
|
||||||
p.XMLName, p.Lang, p.InnerXML))
|
|
||||||
}
|
|
||||||
outer = append(outer, fmt.Sprintf("{Remove: %t, Props: [%s]}",
|
|
||||||
pp.Remove, strings.Join(inner, ", ")))
|
|
||||||
}
|
|
||||||
return "[" + strings.Join(outer, ", ") + "]"
|
|
||||||
}
|
|
||||||
|
|
||||||
testCases := []struct {
|
|
||||||
desc string
|
|
||||||
input string
|
|
||||||
wantPP []Proppatch
|
|
||||||
wantStatus int
|
|
||||||
}{{
|
|
||||||
desc: "proppatch: section 9.2 (with simple property value)",
|
|
||||||
input: `` +
|
|
||||||
`<?xml version="1.0" encoding="utf-8" ?>` +
|
|
||||||
`<D:propertyupdate xmlns:D="DAV:"` +
|
|
||||||
` xmlns:Z="http://ns.example.com/z/">` +
|
|
||||||
` <D:set>` +
|
|
||||||
` <D:prop><Z:Authors>somevalue</Z:Authors></D:prop>` +
|
|
||||||
` </D:set>` +
|
|
||||||
` <D:remove>` +
|
|
||||||
` <D:prop><Z:Copyright-Owner/></D:prop>` +
|
|
||||||
` </D:remove>` +
|
|
||||||
`</D:propertyupdate>`,
|
|
||||||
wantPP: []Proppatch{{
|
|
||||||
Props: []Property{{
|
|
||||||
xml.Name{Space: "http://ns.example.com/z/", Local: "Authors"},
|
|
||||||
"",
|
|
||||||
[]byte(`somevalue`),
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
Remove: true,
|
|
||||||
Props: []Property{{
|
|
||||||
xml.Name{Space: "http://ns.example.com/z/", Local: "Copyright-Owner"},
|
|
||||||
"",
|
|
||||||
nil,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "proppatch: lang attribute on prop",
|
|
||||||
input: `` +
|
|
||||||
`<?xml version="1.0" encoding="utf-8" ?>` +
|
|
||||||
`<D:propertyupdate xmlns:D="DAV:">` +
|
|
||||||
` <D:set>` +
|
|
||||||
` <D:prop xml:lang="en">` +
|
|
||||||
` <foo xmlns="http://example.com/ns"/>` +
|
|
||||||
` </D:prop>` +
|
|
||||||
` </D:set>` +
|
|
||||||
`</D:propertyupdate>`,
|
|
||||||
wantPP: []Proppatch{{
|
|
||||||
Props: []Property{{
|
|
||||||
xml.Name{Space: "http://example.com/ns", Local: "foo"},
|
|
||||||
"en",
|
|
||||||
nil,
|
|
||||||
}},
|
|
||||||
}},
|
|
||||||
}, {
|
|
||||||
desc: "bad: remove with value",
|
|
||||||
input: `` +
|
|
||||||
`<?xml version="1.0" encoding="utf-8" ?>` +
|
|
||||||
`<D:propertyupdate xmlns:D="DAV:"` +
|
|
||||||
` xmlns:Z="http://ns.example.com/z/">` +
|
|
||||||
` <D:remove>` +
|
|
||||||
` <D:prop>` +
|
|
||||||
` <Z:Authors>` +
|
|
||||||
` <Z:Author>Jim Whitehead</Z:Author>` +
|
|
||||||
` </Z:Authors>` +
|
|
||||||
` </D:prop>` +
|
|
||||||
` </D:remove>` +
|
|
||||||
`</D:propertyupdate>`,
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "bad: empty propertyupdate",
|
|
||||||
input: `` +
|
|
||||||
`<?xml version="1.0" encoding="utf-8" ?>` +
|
|
||||||
`<D:propertyupdate xmlns:D="DAV:"` +
|
|
||||||
`</D:propertyupdate>`,
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}, {
|
|
||||||
desc: "bad: empty prop",
|
|
||||||
input: `` +
|
|
||||||
`<?xml version="1.0" encoding="utf-8" ?>` +
|
|
||||||
`<D:propertyupdate xmlns:D="DAV:"` +
|
|
||||||
` xmlns:Z="http://ns.example.com/z/">` +
|
|
||||||
` <D:remove>` +
|
|
||||||
` <D:prop/>` +
|
|
||||||
` </D:remove>` +
|
|
||||||
`</D:propertyupdate>`,
|
|
||||||
wantStatus: http.StatusBadRequest,
|
|
||||||
}}
|
|
||||||
|
|
||||||
for _, tc := range testCases {
|
|
||||||
pp, status, err := ReadProppatch(strings.NewReader(tc.input))
|
|
||||||
if tc.wantStatus != 0 {
|
|
||||||
if err == nil {
|
|
||||||
t.Errorf("%s: got nil error, want non-nil", tc.desc)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
} else if err != nil {
|
|
||||||
t.Errorf("%s: %v", tc.desc, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if status != tc.wantStatus {
|
|
||||||
t.Errorf("%s: got status %d, want %d", tc.desc, status, tc.wantStatus)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(pp, tc.wantPP) || status != tc.wantStatus {
|
|
||||||
t.Errorf("%s: proppatch\ngot %v\nwant %v", tc.desc, ppStr(pp), ppStr(tc.wantPP))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUnmarshalXMLValue(t *testing.T) {
|
|
||||||
testCases := []struct {
|
|
||||||
desc string
|
|
||||||
input string
|
|
||||||
wantVal string
|
|
||||||
}{{
|
|
||||||
desc: "simple char data",
|
|
||||||
input: "<root>foo</root>",
|
|
||||||
wantVal: "foo",
|
|
||||||
}, {
|
|
||||||
desc: "empty element",
|
|
||||||
input: "<root><foo/></root>",
|
|
||||||
wantVal: "<foo/>",
|
|
||||||
}, {
|
|
||||||
desc: "preserve namespace",
|
|
||||||
input: `<root><foo xmlns="bar"/></root>`,
|
|
||||||
wantVal: `<foo xmlns="bar"/>`,
|
|
||||||
}, {
|
|
||||||
desc: "preserve root element namespace",
|
|
||||||
input: `<root xmlns:bar="bar"><bar:foo/></root>`,
|
|
||||||
wantVal: `<foo xmlns="bar"/>`,
|
|
||||||
}, {
|
|
||||||
desc: "preserve whitespace",
|
|
||||||
input: "<root> \t </root>",
|
|
||||||
wantVal: " \t ",
|
|
||||||
}, {
|
|
||||||
desc: "preserve mixed content",
|
|
||||||
input: `<root xmlns="bar"> <foo>a<bam xmlns="baz"/> </foo> </root>`,
|
|
||||||
wantVal: ` <foo xmlns="bar">a<bam xmlns="baz"/> </foo> `,
|
|
||||||
}, {
|
|
||||||
desc: "section 9.2",
|
|
||||||
input: `` +
|
|
||||||
`<Z:Authors xmlns:Z="http://ns.example.com/z/">` +
|
|
||||||
` <Z:Author>Jim Whitehead</Z:Author>` +
|
|
||||||
` <Z:Author>Roy Fielding</Z:Author>` +
|
|
||||||
`</Z:Authors>`,
|
|
||||||
wantVal: `` +
|
|
||||||
` <Author xmlns="http://ns.example.com/z/">Jim Whitehead</Author>` +
|
|
||||||
` <Author xmlns="http://ns.example.com/z/">Roy Fielding</Author>`,
|
|
||||||
}, {
|
|
||||||
desc: "section 4.3.1 (mixed content)",
|
|
||||||
input: `` +
|
|
||||||
`<x:author ` +
|
|
||||||
` xmlns:x='http://example.com/ns' ` +
|
|
||||||
` xmlns:D="DAV:">` +
|
|
||||||
` <x:name>Jane Doe</x:name>` +
|
|
||||||
` <!-- Jane's contact info -->` +
|
|
||||||
` <x:uri type='email'` +
|
|
||||||
` added='2005-11-26'>mailto:jane.doe@example.com</x:uri>` +
|
|
||||||
` <x:uri type='web'` +
|
|
||||||
` added='2005-11-27'>http://www.example.com</x:uri>` +
|
|
||||||
` <x:notes xmlns:h='http://www.w3.org/1999/xhtml'>` +
|
|
||||||
` Jane has been working way <h:em>too</h:em> long on the` +
|
|
||||||
` long-awaited revision of <![CDATA[<RFC2518>]]>.` +
|
|
||||||
` </x:notes>` +
|
|
||||||
`</x:author>`,
|
|
||||||
wantVal: `` +
|
|
||||||
` <name xmlns="http://example.com/ns">Jane Doe</name>` +
|
|
||||||
` ` +
|
|
||||||
` <uri type='email'` +
|
|
||||||
` xmlns="http://example.com/ns" ` +
|
|
||||||
` added='2005-11-26'>mailto:jane.doe@example.com</uri>` +
|
|
||||||
` <uri added='2005-11-27'` +
|
|
||||||
` type='web'` +
|
|
||||||
` xmlns="http://example.com/ns">http://www.example.com</uri>` +
|
|
||||||
` <notes xmlns="http://example.com/ns" ` +
|
|
||||||
` xmlns:h="http://www.w3.org/1999/xhtml">` +
|
|
||||||
` Jane has been working way <h:em>too</h:em> long on the` +
|
|
||||||
` long-awaited revision of <RFC2518>.` +
|
|
||||||
` </notes>`,
|
|
||||||
}}
|
|
||||||
|
|
||||||
var n xmlNormalizer
|
|
||||||
for _, tc := range testCases {
|
|
||||||
d := ixml.NewDecoder(strings.NewReader(tc.input))
|
|
||||||
var v XmlValue
|
|
||||||
if err := d.Decode(&v); err != nil {
|
|
||||||
t.Errorf("%s: got error %v, want nil", tc.desc, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
eq, err := n.equalXML(bytes.NewReader(v), strings.NewReader(tc.wantVal))
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("%s: equalXML: %v", tc.desc, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !eq {
|
|
||||||
t.Errorf("%s:\ngot %s\nwant %s", tc.desc, string(v), tc.wantVal)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// xmlNormalizer normalizes XML.
|
|
||||||
type xmlNormalizer struct {
|
|
||||||
// omitWhitespace instructs to ignore whitespace between element tags.
|
|
||||||
omitWhitespace bool
|
|
||||||
// omitComments instructs to ignore XML comments.
|
|
||||||
omitComments bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// normalize writes the normalized XML content of r to w. It applies the
|
|
||||||
// following rules
|
|
||||||
//
|
|
||||||
// * Rename namespace prefixes according to an internal heuristic.
|
|
||||||
// * Remove unnecessary namespace declarations.
|
|
||||||
// * Sort attributes in XML start elements in lexical order of their
|
|
||||||
// fully qualified name.
|
|
||||||
// * Remove XML directives and processing instructions.
|
|
||||||
// * Remove CDATA between XML tags that only contains whitespace, if
|
|
||||||
// instructed to do so.
|
|
||||||
// * Remove comments, if instructed to do so.
|
|
||||||
//
|
|
||||||
func (n *xmlNormalizer) normalize(w io.Writer, r io.Reader) error {
|
|
||||||
d := ixml.NewDecoder(r)
|
|
||||||
e := ixml.NewEncoder(w)
|
|
||||||
for {
|
|
||||||
t, err := d.Token()
|
|
||||||
if err != nil {
|
|
||||||
if t == nil && err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
switch val := t.(type) {
|
|
||||||
case ixml.Directive, ixml.ProcInst:
|
|
||||||
continue
|
|
||||||
case ixml.Comment:
|
|
||||||
if n.omitComments {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
case ixml.CharData:
|
|
||||||
if n.omitWhitespace && len(bytes.TrimSpace(val)) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
case ixml.StartElement:
|
|
||||||
start, _ := ixml.CopyToken(val).(ixml.StartElement)
|
|
||||||
attr := start.Attr[:0]
|
|
||||||
for _, a := range start.Attr {
|
|
||||||
if a.Name.Space == "xmlns" || a.Name.Local == "xmlns" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
attr = append(attr, a)
|
|
||||||
}
|
|
||||||
sort.Sort(byName(attr))
|
|
||||||
start.Attr = attr
|
|
||||||
t = start
|
|
||||||
}
|
|
||||||
err = e.EncodeToken(t)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return e.Flush()
|
|
||||||
}
|
|
||||||
|
|
||||||
// equalXML tests for equality of the normalized XML contents of a and b.
|
|
||||||
func (n *xmlNormalizer) equalXML(a, b io.Reader) (bool, error) {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
if err := n.normalize(&buf, a); err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
normA := buf.String()
|
|
||||||
buf.Reset()
|
|
||||||
if err := n.normalize(&buf, b); err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
normB := buf.String()
|
|
||||||
return normA == normB, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type byName []ixml.Attr
|
|
||||||
|
|
||||||
func (a byName) Len() int { return len(a) }
|
|
||||||
func (a byName) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
|
||||||
func (a byName) Less(i, j int) bool {
|
|
||||||
if a[i].Name.Space != a[j].Name.Space {
|
|
||||||
return a[i].Name.Space < a[j].Name.Space
|
|
||||||
}
|
|
||||||
return a[i].Name.Local < a[j].Name.Local
|
|
||||||
}
|
|
@ -612,7 +612,7 @@ func (this *MatterService) CreateDirectories(user *User, dirPath string) *Matter
|
|||||||
func (this *MatterService) WrapDetail(matter *Matter) *Matter {
|
func (this *MatterService) WrapDetail(matter *Matter) *Matter {
|
||||||
|
|
||||||
if matter == nil {
|
if matter == nil {
|
||||||
this.PanicBadRequest("matter cannot be nil.")
|
panic(result.BadRequest("matter cannot be nil."))
|
||||||
}
|
}
|
||||||
|
|
||||||
//组装file的内容,展示其父组件。
|
//组装file的内容,展示其父组件。
|
||||||
|
Loading…
Reference in New Issue
Block a user