From 2fa7c464f019f67c5c0494aaf5ac0f5ecc1ee7a7 Mon Sep 17 00:00:00 2001 From: Harrison Healey Date: Tue, 16 Jan 2018 12:03:31 -0500 Subject: Updated dependencies and added avct/uasurfer (#8089) * Updated dependencies and added avct/uasurfer * Added uasurfer to NOTICE.txt --- vendor/github.com/miekg/dns/scan.go | 66 ++++++++++++++++++------------------- 1 file changed, 33 insertions(+), 33 deletions(-) (limited to 'vendor/github.com/miekg/dns/scan.go') diff --git a/vendor/github.com/miekg/dns/scan.go b/vendor/github.com/miekg/dns/scan.go index 243b9cf18..f9cd47401 100644 --- a/vendor/github.com/miekg/dns/scan.go +++ b/vendor/github.com/miekg/dns/scan.go @@ -1,23 +1,14 @@ package dns import ( + "fmt" "io" - "log" "os" + "path/filepath" "strconv" "strings" ) -type debugging bool - -const debug debugging = false - -func (d debugging) Printf(format string, args ...interface{}) { - if d { - log.Printf(format, args...) - } -} - const maxTok = 2048 // Largest token we can return. const maxUint16 = 1<<16 - 1 @@ -127,7 +118,7 @@ func NewRR(s string) (RR, error) { // See NewRR for more documentation. func ReadRR(q io.Reader, filename string) (RR, error) { defttl := &ttlState{defaultTtl, false} - r := <-parseZoneHelper(q, ".", defttl, filename, 1) + r := <-parseZoneHelper(q, ".", filename, defttl, 1) if r == nil { return nil, nil } @@ -164,25 +155,37 @@ func ReadRR(q io.Reader, filename string) (RR, error) { // The text "; this is comment" is returned in Token.Comment. Comments inside the // RR are discarded. Comments on a line by themselves are discarded too. func ParseZone(r io.Reader, origin, file string) chan *Token { - return parseZoneHelper(r, origin, nil, file, 10000) + return parseZoneHelper(r, origin, file, nil, 10000) } -func parseZoneHelper(r io.Reader, origin string, defttl *ttlState, file string, chansize int) chan *Token { +func parseZoneHelper(r io.Reader, origin, file string, defttl *ttlState, chansize int) chan *Token { t := make(chan *Token, chansize) - go parseZone(r, origin, defttl, file, t, 0) + go parseZone(r, origin, file, defttl, t, 0) return t } -func parseZone(r io.Reader, origin string, defttl *ttlState, f string, t chan *Token, include int) { +func parseZone(r io.Reader, origin, f string, defttl *ttlState, t chan *Token, include int) { defer func() { if include == 0 { close(t) } }() - s := scanInit(r) + s, cancel := scanInit(r) c := make(chan lex) // Start the lexer go zlexer(s, c) + + defer func() { + cancel() + // zlexer can send up to three tokens, the next one and possibly 2 remainders. + // Do a non-blocking read. + _, ok := <-c + _, ok = <-c + _, ok = <-c + if !ok { + // too bad + } + }() // 6 possible beginnings of a line, _ is a space // 0. zRRTYPE -> all omitted until the rrtype // 1. zOwner _ zRrtype -> class/ttl omitted @@ -296,16 +299,24 @@ func parseZone(r io.Reader, origin string, defttl *ttlState, f string, t chan *T return } // Start with the new file - r1, e1 := os.Open(l.token) + includePath := l.token + if !filepath.IsAbs(includePath) { + includePath = filepath.Join(filepath.Dir(f), includePath) + } + r1, e1 := os.Open(includePath) if e1 != nil { - t <- &Token{Error: &ParseError{f, "failed to open `" + l.token + "'", l}} + msg := fmt.Sprintf("failed to open `%s'", l.token) + if !filepath.IsAbs(l.token) { + msg += fmt.Sprintf(" as `%s'", includePath) + } + t <- &Token{Error: &ParseError{f, msg, l}} return } if include+1 > 7 { t <- &Token{Error: &ParseError{f, "too deeply nested $INCLUDE", l}} return } - parseZone(r1, neworigin, defttl, l.token, t, include+1) + parseZone(r1, neworigin, includePath, defttl, t, include+1) st = zExpectOwnerDir case zExpectDirTTLBl: if l.value != zBlank { @@ -497,14 +508,12 @@ func zlexer(s *scan, c chan lex) { if stri >= maxTok { l.token = "token length insufficient for parsing" l.err = true - debug.Printf("[%+v]", l.token) c <- l return } if comi >= maxTok { l.token = "comment length insufficient for parsing" l.err = true - debug.Printf("[%+v]", l.token) c <- l return } @@ -547,7 +556,6 @@ func zlexer(s *scan, c chan lex) { case "$GENERATE": l.value = zDirGenerate } - debug.Printf("[7 %+v]", l.token) c <- l } else { l.value = zString @@ -569,6 +577,7 @@ func zlexer(s *scan, c chan lex) { return } l.value = zRrtpe + rrtype = true l.torc = t } } @@ -589,16 +598,14 @@ func zlexer(s *scan, c chan lex) { } } } - debug.Printf("[6 %+v]", l.token) c <- l } stri = 0 - // I reverse space stuff here + if !space && !commt { l.value = zBlank l.token = " " l.length = 1 - debug.Printf("[5 %+v]", l.token) c <- l } owner = false @@ -621,7 +628,6 @@ func zlexer(s *scan, c chan lex) { l.token = string(str[:stri]) l.tokenUpper = strings.ToUpper(l.token) l.length = stri - debug.Printf("[4 %+v]", l.token) c <- l stri = 0 } @@ -659,7 +665,6 @@ func zlexer(s *scan, c chan lex) { l.tokenUpper = l.token l.length = 1 l.comment = string(com[:comi]) - debug.Printf("[3 %+v %+v]", l.token, l.comment) c <- l l.comment = "" comi = 0 @@ -685,14 +690,12 @@ func zlexer(s *scan, c chan lex) { rrtype = true } } - debug.Printf("[2 %+v]", l.token) c <- l } l.value = zNewline l.token = "\n" l.tokenUpper = l.token l.length = 1 - debug.Printf("[1 %+v]", l.token) c <- l stri = 0 commt = false @@ -738,7 +741,6 @@ func zlexer(s *scan, c chan lex) { l.tokenUpper = strings.ToUpper(l.token) l.length = stri - debug.Printf("[%+v]", l.token) c <- l stri = 0 } @@ -774,7 +776,6 @@ func zlexer(s *scan, c chan lex) { l.token = "extra closing brace" l.tokenUpper = l.token l.err = true - debug.Printf("[%+v]", l.token) c <- l return } @@ -800,7 +801,6 @@ func zlexer(s *scan, c chan lex) { l.tokenUpper = strings.ToUpper(l.token) l.length = stri l.value = zString - debug.Printf("[%+v]", l.token) c <- l } if brace != 0 { -- cgit v1.2.3-1-g7c22