...

Source file src/github.com/coreos/go-systemd/v22/unit/deserialize.go

Documentation: github.com/coreos/go-systemd/v22/unit

     1  // Copyright 2015 CoreOS, Inc.
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //     http://www.apache.org/licenses/LICENSE-2.0
     8  //
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  // See the License for the specific language governing permissions and
    13  // limitations under the License.
    14  
    15  package unit
    16  
    17  import (
    18  	"bufio"
    19  	"bytes"
    20  	"errors"
    21  	"fmt"
    22  	"io"
    23  	"strings"
    24  	"unicode"
    25  )
    26  
    27  const (
    28  	// SYSTEMD_LINE_MAX mimics the maximum line length that systemd can use.
    29  	// On typical systemd platforms (i.e. modern Linux), this will most
    30  	// commonly be 2048, so let's use that as a sanity check.
    31  	// Technically, we should probably pull this at runtime:
    32  	//    SYSTEMD_LINE_MAX = int(C.sysconf(C.__SC_LINE_MAX))
    33  	// but this would introduce an (unfortunate) dependency on cgo
    34  	SYSTEMD_LINE_MAX = 2048
    35  
    36  	// SYSTEMD_NEWLINE defines characters that systemd considers indicators
    37  	// for a newline.
    38  	SYSTEMD_NEWLINE = "\r\n"
    39  )
    40  
    41  var (
    42  	// ErrLineTooLong gets returned when a line is too long for systemd to handle.
    43  	ErrLineTooLong = fmt.Errorf("line too long (max %d bytes)", SYSTEMD_LINE_MAX)
    44  )
    45  
    46  // DeserializeOptions parses a systemd unit file into a list of UnitOptions
    47  func DeserializeOptions(f io.Reader) (opts []*UnitOption, err error) {
    48  	_, options, err := deserializeAll(f)
    49  	return options, err
    50  }
    51  
    52  // DeserializeSections deserializes into a list of UnitSections.
    53  func DeserializeSections(f io.Reader) ([]*UnitSection, error) {
    54  	sections, _, err := deserializeAll(f)
    55  	return sections, err
    56  }
    57  
    58  // Deserialize parses a systemd unit file into a list of UnitOptions.
    59  // Note: this function is deprecated in favor of DeserializeOptions
    60  // and will be removed at a future date.
    61  func Deserialize(f io.Reader) (opts []*UnitOption, err error) {
    62  	return DeserializeOptions(f)
    63  }
    64  
    65  type lexDataType int
    66  
    67  const (
    68  	sectionKind lexDataType = iota
    69  	optionKind
    70  )
    71  
    72  // lexChanData - support either datatype in the lex channel.
    73  // Poor man's union data type.
    74  type lexData struct {
    75  	Type    lexDataType
    76  	Option  *UnitOption
    77  	Section *UnitSection
    78  }
    79  
    80  // deserializeAll deserializes into UnitSections and UnitOptions.
    81  func deserializeAll(f io.Reader) ([]*UnitSection, []*UnitOption, error) {
    82  
    83  	lexer, lexchan, errchan := newLexer(f)
    84  
    85  	go lexer.lex()
    86  
    87  	sections := []*UnitSection{}
    88  	options := []*UnitOption{}
    89  
    90  	for ld := range lexchan {
    91  		switch ld.Type {
    92  		case optionKind:
    93  			if ld.Option != nil {
    94  				// add to options
    95  				opt := ld.Option
    96  				options = append(options, &(*opt))
    97  
    98  				// sanity check. "should not happen" as sectionKind is first in code flow.
    99  				if len(sections) == 0 {
   100  					return nil, nil, fmt.Errorf(
   101  						"Unit file misparse: option before section")
   102  				}
   103  
   104  				// add to newest section entries.
   105  				s := len(sections) - 1
   106  				sections[s].Entries = append(sections[s].Entries,
   107  					&UnitEntry{Name: opt.Name, Value: opt.Value})
   108  			}
   109  		case sectionKind:
   110  			if ld.Section != nil {
   111  				sections = append(sections, ld.Section)
   112  			}
   113  		}
   114  	}
   115  
   116  	err := <-errchan
   117  
   118  	return sections, options, err
   119  }
   120  
   121  func newLexer(f io.Reader) (*lexer, <-chan *lexData, <-chan error) {
   122  	lexchan := make(chan *lexData)
   123  	errchan := make(chan error, 1)
   124  	buf := bufio.NewReader(f)
   125  
   126  	return &lexer{buf, lexchan, errchan, ""}, lexchan, errchan
   127  }
   128  
   129  type lexer struct {
   130  	buf     *bufio.Reader
   131  	lexchan chan *lexData
   132  	errchan chan error
   133  	section string
   134  }
   135  
   136  func (l *lexer) lex() {
   137  	defer func() {
   138  		close(l.lexchan)
   139  		close(l.errchan)
   140  	}()
   141  	next := l.lexNextSection
   142  	for next != nil {
   143  		if l.buf.Buffered() >= SYSTEMD_LINE_MAX {
   144  			// systemd truncates lines longer than LINE_MAX
   145  			// https://bugs.freedesktop.org/show_bug.cgi?id=85308
   146  			// Rather than allowing this to pass silently, let's
   147  			// explicitly gate people from encountering this
   148  			line, err := l.buf.Peek(SYSTEMD_LINE_MAX)
   149  			if err != nil {
   150  				l.errchan <- err
   151  				return
   152  			}
   153  			if !bytes.ContainsAny(line, SYSTEMD_NEWLINE) {
   154  				l.errchan <- ErrLineTooLong
   155  				return
   156  			}
   157  		}
   158  
   159  		var err error
   160  		next, err = next()
   161  		if err != nil {
   162  			l.errchan <- err
   163  			return
   164  		}
   165  	}
   166  }
   167  
   168  type lexStep func() (lexStep, error)
   169  
   170  func (l *lexer) lexSectionName() (lexStep, error) {
   171  	sec, err := l.buf.ReadBytes(']')
   172  	if err != nil {
   173  		return nil, errors.New("unable to find end of section")
   174  	}
   175  
   176  	return l.lexSectionSuffixFunc(string(sec[:len(sec)-1])), nil
   177  }
   178  
   179  func (l *lexer) lexSectionSuffixFunc(section string) lexStep {
   180  	return func() (lexStep, error) {
   181  		garbage, _, err := l.toEOL()
   182  		if err != nil {
   183  			return nil, err
   184  		}
   185  
   186  		garbage = bytes.TrimSpace(garbage)
   187  		if len(garbage) > 0 {
   188  			return nil, fmt.Errorf("found garbage after section name %s: %q", l.section, garbage)
   189  		}
   190  
   191  		l.lexchan <- &lexData{
   192  			Type:    sectionKind,
   193  			Section: &UnitSection{Section: section, Entries: []*UnitEntry{}},
   194  			Option:  nil,
   195  		}
   196  
   197  		return l.lexNextSectionOrOptionFunc(section), nil
   198  	}
   199  }
   200  
   201  func (l *lexer) ignoreLineFunc(next lexStep) lexStep {
   202  	return func() (lexStep, error) {
   203  		for {
   204  			line, _, err := l.toEOL()
   205  			if err != nil {
   206  				return nil, err
   207  			}
   208  
   209  			line = bytes.TrimSuffix(line, []byte{' '})
   210  
   211  			// lack of continuation means this line has been exhausted
   212  			if !bytes.HasSuffix(line, []byte{'\\'}) {
   213  				break
   214  			}
   215  		}
   216  
   217  		// reached end of buffer, safe to exit
   218  		return next, nil
   219  	}
   220  }
   221  
   222  func (l *lexer) lexNextSection() (lexStep, error) {
   223  	r, _, err := l.buf.ReadRune()
   224  	if err != nil {
   225  		if err == io.EOF {
   226  			err = nil
   227  		}
   228  		return nil, err
   229  	}
   230  
   231  	if r == '[' {
   232  		return l.lexSectionName, nil
   233  	} else if isComment(r) {
   234  		return l.ignoreLineFunc(l.lexNextSection), nil
   235  	}
   236  
   237  	return l.lexNextSection, nil
   238  }
   239  
   240  func (l *lexer) lexNextSectionOrOptionFunc(section string) lexStep {
   241  	return func() (lexStep, error) {
   242  		r, _, err := l.buf.ReadRune()
   243  		if err != nil {
   244  			if err == io.EOF {
   245  				err = nil
   246  			}
   247  			return nil, err
   248  		}
   249  
   250  		if unicode.IsSpace(r) {
   251  			return l.lexNextSectionOrOptionFunc(section), nil
   252  		} else if r == '[' {
   253  			return l.lexSectionName, nil
   254  		} else if isComment(r) {
   255  			return l.ignoreLineFunc(l.lexNextSectionOrOptionFunc(section)), nil
   256  		}
   257  
   258  		l.buf.UnreadRune()
   259  		return l.lexOptionNameFunc(section), nil
   260  	}
   261  }
   262  
   263  func (l *lexer) lexOptionNameFunc(section string) lexStep {
   264  	return func() (lexStep, error) {
   265  		var partial bytes.Buffer
   266  		for {
   267  			r, _, err := l.buf.ReadRune()
   268  			if err != nil {
   269  				return nil, err
   270  			}
   271  
   272  			if r == '\n' || r == '\r' {
   273  				return nil, errors.New("unexpected newline encountered while parsing option name")
   274  			}
   275  
   276  			if r == '=' {
   277  				break
   278  			}
   279  
   280  			partial.WriteRune(r)
   281  		}
   282  
   283  		name := strings.TrimSpace(partial.String())
   284  		return l.lexOptionValueFunc(section, name, bytes.Buffer{}), nil
   285  	}
   286  }
   287  
   288  func (l *lexer) lexOptionValueFunc(section, name string, partial bytes.Buffer) lexStep {
   289  	return func() (lexStep, error) {
   290  		for {
   291  			line, eof, err := l.toEOL()
   292  			if err != nil {
   293  				return nil, err
   294  			}
   295  
   296  			if len(bytes.TrimSpace(line)) == 0 {
   297  				break
   298  			}
   299  
   300  			partial.Write(line)
   301  
   302  			// lack of continuation means this value has been exhausted
   303  			idx := bytes.LastIndex(line, []byte{'\\'})
   304  			if idx == -1 || idx != (len(line)-1) {
   305  				break
   306  			}
   307  
   308  			if !eof {
   309  				partial.WriteRune('\n')
   310  			}
   311  
   312  			return l.lexOptionValueFunc(section, name, partial), nil
   313  		}
   314  
   315  		val := partial.String()
   316  		if strings.HasSuffix(val, "\n") {
   317  			// A newline was added to the end, so the file didn't end with a backslash.
   318  			// => Keep the newline
   319  			val = strings.TrimSpace(val) + "\n"
   320  		} else {
   321  			val = strings.TrimSpace(val)
   322  		}
   323  		l.lexchan <- &lexData{
   324  			Type:    optionKind,
   325  			Section: nil,
   326  			Option:  &UnitOption{Section: section, Name: name, Value: val},
   327  		}
   328  
   329  		return l.lexNextSectionOrOptionFunc(section), nil
   330  	}
   331  }
   332  
   333  // toEOL reads until the end-of-line or end-of-file.
   334  // Returns (data, EOFfound, error)
   335  func (l *lexer) toEOL() ([]byte, bool, error) {
   336  	line, err := l.buf.ReadBytes('\n')
   337  	// ignore EOF here since it's roughly equivalent to EOL
   338  	if err != nil && err != io.EOF {
   339  		return nil, false, err
   340  	}
   341  
   342  	line = bytes.TrimSuffix(line, []byte{'\r'})
   343  	line = bytes.TrimSuffix(line, []byte{'\n'})
   344  
   345  	return line, err == io.EOF, nil
   346  }
   347  
   348  func isComment(r rune) bool {
   349  	return r == '#' || r == ';'
   350  }
   351  

View as plain text