Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
89 changes: 39 additions & 50 deletions parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ import (
"text/scanner"
)

//go:generate stringer -type=tokenType

type tokenType int

const (
Expand All @@ -23,41 +25,54 @@ const (
tokenStatementEnd
)

// Rule is a string type that represents a CSS rule.
type Rule string

type tokenEntry struct {
value string
pos scanner.Position
}

type tokenizer struct {
s *scanner.Scanner
}

// Type returns the rule type, which can be a class, id or a tag.
func (rule Rule) Type() string {
if strings.HasPrefix(string(rule), ".") {
return "class"
func newTokenType(typ string) tokenType {
types := map[string]tokenType{
"{": tokenBlockStart,
"}": tokenBlockEnd,
":": tokenStyleSeparator,
";": tokenStatementEnd,
".": tokenSelector,
"#": tokenSelector,
}
if strings.HasPrefix(string(rule), "#") {
return "id"

result, ok := types[typ]
if ok {
return result
}
return "tag"

return tokenValue
}

func (e tokenEntry) typ() tokenType {
return newTokenType(e.value)
}

type tokenizer struct {
s *scanner.Scanner
}

func newTokenizer(r io.Reader) *tokenizer {
s := &scanner.Scanner{}
s.Init(r)

return &tokenizer{
s: s,
}
}

func (t *tokenizer) next() (tokenEntry, error) {
token := t.s.Scan()
if token == scanner.EOF {
return tokenEntry{}, errors.New("EOF")
}
value := t.s.TokenText()
pos := t.s.Pos()
if newTokenType(value).String() == "STYLE_SEPARATOR" {
if newTokenType(value) == tokenStyleSeparator {
t.s.IsIdentRune = func(ch rune, i int) bool { // property value can contain spaces
if ch == -1 || ch == '\n' || ch == '\r' || ch == '\t' || ch == ':' || ch == ';' {
return false
Expand All @@ -78,44 +93,18 @@ func (t *tokenizer) next() (tokenEntry, error) {
}, nil
}

func (t tokenType) String() string {
switch t {
case tokenBlockStart:
return "BLOCK_START"
case tokenBlockEnd:
return "BLOCK_END"
case tokenStyleSeparator:
return "STYLE_SEPARATOR"
case tokenStatementEnd:
return "STATEMENT_END"
case tokenSelector:
return "SELECTOR"
}
return "VALUE"
}
// Rule is a string type that represents a CSS rule.
type Rule string

func newTokenType(typ string) tokenType {
switch typ {
case "{":
return tokenBlockStart
case "}":
return tokenBlockEnd
case ":":
return tokenStyleSeparator
case ";":
return tokenStatementEnd
case ".", "#":
return tokenSelector
// Type returns the rule type, which can be a class, id or a tag.
func (rule Rule) Type() string {
if strings.HasPrefix(string(rule), ".") {
return "class"
}
return tokenValue
}

func newTokenizer(r io.Reader) *tokenizer {
s := &scanner.Scanner{}
s.Init(r)
return &tokenizer{
s: s,
if strings.HasPrefix(string(rule), "#") {
return "id"
}
return "tag"
}

func buildList(r io.Reader) *list.List {
Expand Down
2 changes: 1 addition & 1 deletion parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ rule1 {
}

for _, tt := range cases {
t.Run("GoodCSS", func(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
css, err := Unmarshal([]byte(tt.CSS))
if err != nil {
t.Fatal(err)
Expand Down
31 changes: 31 additions & 0 deletions tokentype_string.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading