From b9fd4df60fca2edb3e759a40b0f8e743e62e81de Mon Sep 17 00:00:00 2001 From: sawka Date: Tue, 15 Nov 2022 00:39:53 -0800 Subject: [PATCH] words -> str. test roundtrip --- pkg/shparse/shparse_test.go | 5 +++++ pkg/shparse/tokenize.go | 12 ++++++++++++ 2 files changed, 17 insertions(+) diff --git a/pkg/shparse/shparse_test.go b/pkg/shparse/shparse_test.go index c81ccd801..206b225e2 100644 --- a/pkg/shparse/shparse_test.go +++ b/pkg/shparse/shparse_test.go @@ -18,6 +18,11 @@ func testParse(t *testing.T, s string) { fmt.Printf("%s\n", s) dumpWords(words, " ") fmt.Printf("\n") + + outStr := wordsToStr(words) + if outStr != s { + t.Errorf("tokenization output does not match input: %q => %q", s, outStr) + } } func Test1(t *testing.T) { diff --git a/pkg/shparse/tokenize.go b/pkg/shparse/tokenize.go index dd6e0d265..9d447b89f 100644 --- a/pkg/shparse/tokenize.go +++ b/pkg/shparse/tokenize.go @@ -1,6 +1,7 @@ package shparse import ( + "bytes" "unicode" ) @@ -108,3 +109,14 @@ func Tokenize(cmd string) []*wordType { state.finish(c) return state.Rtn } + +func wordsToStr(words []*wordType) string { + var buf bytes.Buffer + for _, word := range words { + if len(word.Prefix) > 0 { + buf.WriteString(string(word.Prefix)) + } + buf.WriteString(string(word.Raw)) + } + return buf.String() +}