...

Source file src/cuelang.org/go/cue/load/loader_common.go

Documentation: cuelang.org/go/cue/load

     1  // Copyright 2018 The CUE Authors
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //     http://www.apache.org/licenses/LICENSE-2.0
     8  //
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  // See the License for the specific language governing permissions and
    13  // limitations under the License.
    14  
    15  package load
    16  
    17  import (
    18  	"bytes"
    19  	pathpkg "path"
    20  	"path/filepath"
    21  	"sort"
    22  	"strconv"
    23  	"strings"
    24  	"unicode"
    25  	"unicode/utf8"
    26  
    27  	"cuelang.org/go/cue/ast"
    28  	"cuelang.org/go/cue/build"
    29  	"cuelang.org/go/cue/errors"
    30  	"cuelang.org/go/cue/parser"
    31  	"cuelang.org/go/cue/token"
    32  	"cuelang.org/go/internal"
    33  )
    34  
    35  // An importMode controls the behavior of the Import method.
    36  type importMode uint
    37  
    38  const (
    39  	// If importComment is set, parse import comments on package statements.
    40  	// Import returns an error if it finds a comment it cannot understand
    41  	// or finds conflicting comments in multiple source files.
    42  	// See golang.org/s/go14customimport for more information.
    43  	importComment importMode = 1 << iota
    44  
    45  	allowAnonymous
    46  )
    47  
    48  func rewriteFiles(p *build.Instance, root string, isLocal bool) {
    49  	p.Root = root
    50  
    51  	normalizeFiles(p.BuildFiles)
    52  	normalizeFiles(p.IgnoredFiles)
    53  	normalizeFiles(p.OrphanedFiles)
    54  	normalizeFiles(p.InvalidFiles)
    55  	normalizeFiles(p.UnknownFiles)
    56  }
    57  
    58  // normalizeFiles sorts the files so that files contained by a parent directory
    59  // always come before files contained in sub-directories, and that filenames in
    60  // the same directory are sorted lexically byte-wise, like Go's `<` operator.
    61  func normalizeFiles(a []*build.File) {
    62  	sort.Slice(a, func(i, j int) bool {
    63  		fi := a[i].Filename
    64  		fj := a[j].Filename
    65  		ci := strings.Count(fi, string(filepath.Separator))
    66  		cj := strings.Count(fj, string(filepath.Separator))
    67  		if ci != cj {
    68  			return ci < cj
    69  		}
    70  		return fi < fj
    71  	})
    72  }
    73  
    74  func cleanImport(path string) string {
    75  	orig := path
    76  	path = pathpkg.Clean(path)
    77  	if strings.HasPrefix(orig, "./") && path != ".." && !strings.HasPrefix(path, "../") {
    78  		path = "./" + path
    79  	}
    80  	return path
    81  }
    82  
    83  // An importStack is a stack of import paths, possibly with the suffix " (test)" appended.
    84  // The import path of a test package is the import path of the corresponding
    85  // non-test package with the suffix "_test" added.
    86  type importStack []string
    87  
    88  func (s *importStack) Push(p string) {
    89  	*s = append(*s, p)
    90  }
    91  
    92  func (s *importStack) Pop() {
    93  	*s = (*s)[0 : len(*s)-1]
    94  }
    95  
    96  func (s *importStack) Copy() []string {
    97  	return append([]string{}, *s...)
    98  }
    99  
   100  type fileProcessor struct {
   101  	firstFile        string
   102  	firstCommentFile string
   103  	imported         map[string][]token.Pos
   104  	allTags          map[string]bool
   105  	allFiles         bool
   106  	ignoreOther      bool // ignore files from other packages
   107  	allPackages      bool
   108  
   109  	c      *fileProcessorConfig
   110  	tagger *tagger
   111  	pkgs   map[string]*build.Instance
   112  	pkg    *build.Instance
   113  
   114  	err errors.Error
   115  }
   116  
   117  type fileProcessorConfig = Config
   118  
   119  func newFileProcessor(c *fileProcessorConfig, p *build.Instance, tg *tagger) *fileProcessor {
   120  	return &fileProcessor{
   121  		imported: make(map[string][]token.Pos),
   122  		allTags:  make(map[string]bool),
   123  		c:        c,
   124  		pkgs:     map[string]*build.Instance{"_": p},
   125  		pkg:      p,
   126  		tagger:   tg,
   127  	}
   128  }
   129  
   130  func countCUEFiles(c *fileProcessorConfig, p *build.Instance) int {
   131  	count := len(p.BuildFiles)
   132  	for _, f := range p.IgnoredFiles {
   133  		if c.Tools && strings.HasSuffix(f.Filename, "_tool.cue") {
   134  			count++
   135  		}
   136  		if c.Tests && strings.HasSuffix(f.Filename, "_test.cue") {
   137  			count++
   138  		}
   139  	}
   140  	return count
   141  }
   142  
   143  func (fp *fileProcessor) finalize(p *build.Instance) errors.Error {
   144  	if fp.err != nil {
   145  		return fp.err
   146  	}
   147  	if countCUEFiles(fp.c, p) == 0 &&
   148  		!fp.c.DataFiles &&
   149  		(p.PkgName != "_" || !fp.allPackages) {
   150  		fp.err = errors.Append(fp.err, &NoFilesError{Package: p, ignored: len(p.IgnoredFiles) > 0})
   151  		return fp.err
   152  	}
   153  
   154  	for tag := range fp.allTags {
   155  		p.AllTags = append(p.AllTags, tag)
   156  	}
   157  	sort.Strings(p.AllTags)
   158  
   159  	p.ImportPaths, _ = cleanImports(fp.imported)
   160  
   161  	return nil
   162  }
   163  
   164  func (fp *fileProcessor) add(root string, file *build.File, mode importMode) (added bool) {
   165  	fullPath := file.Filename
   166  	if fullPath != "-" {
   167  		if !filepath.IsAbs(fullPath) {
   168  			fullPath = filepath.Join(root, fullPath)
   169  		}
   170  	}
   171  	file.Filename = fullPath
   172  
   173  	base := filepath.Base(fullPath)
   174  
   175  	// special * and _
   176  	p := fp.pkg // default package
   177  
   178  	// badFile := func(p *build.Instance, err errors.Error) bool {
   179  	badFile := func(err errors.Error) bool {
   180  		fp.err = errors.Append(fp.err, err)
   181  		file.ExcludeReason = fp.err
   182  		p.InvalidFiles = append(p.InvalidFiles, file)
   183  		return true
   184  	}
   185  
   186  	match, data, err := matchFile(fp.c, file, true, fp.allFiles, fp.allTags)
   187  	switch {
   188  	case match:
   189  
   190  	case err == nil:
   191  		// Not a CUE file.
   192  		p.OrphanedFiles = append(p.OrphanedFiles, file)
   193  		return false
   194  
   195  	case !errors.Is(err, errExclude):
   196  		return badFile(err)
   197  
   198  	default:
   199  		file.ExcludeReason = err
   200  		if file.Interpretation == "" {
   201  			p.IgnoredFiles = append(p.IgnoredFiles, file)
   202  		} else {
   203  			p.OrphanedFiles = append(p.OrphanedFiles, file)
   204  		}
   205  		return false
   206  	}
   207  
   208  	pf, perr := parser.ParseFile(fullPath, data, parser.ImportsOnly, parser.ParseComments)
   209  	if perr != nil {
   210  		badFile(errors.Promote(perr, "add failed"))
   211  		return true
   212  	}
   213  
   214  	_, pkg, pos := internal.PackageInfo(pf)
   215  	if pkg == "" {
   216  		pkg = "_"
   217  	}
   218  
   219  	switch {
   220  	case pkg == p.PkgName, mode&allowAnonymous != 0:
   221  	case fp.allPackages && pkg != "_":
   222  		q := fp.pkgs[pkg]
   223  		if q == nil {
   224  			q = &build.Instance{
   225  				PkgName: pkg,
   226  
   227  				Dir:         p.Dir,
   228  				DisplayPath: p.DisplayPath,
   229  				ImportPath:  p.ImportPath + ":" + pkg,
   230  				Root:        p.Root,
   231  				Module:      p.Module,
   232  			}
   233  			fp.pkgs[pkg] = q
   234  		}
   235  		p = q
   236  
   237  	case pkg != "_":
   238  
   239  	default:
   240  		file.ExcludeReason = excludeError{errors.Newf(pos, "no package name")}
   241  		p.IgnoredFiles = append(p.IgnoredFiles, file)
   242  		return false // don't mark as added
   243  	}
   244  
   245  	if !fp.c.AllCUEFiles {
   246  		if err := shouldBuildFile(pf, fp); err != nil {
   247  			if !errors.Is(err, errExclude) {
   248  				fp.err = errors.Append(fp.err, err)
   249  			}
   250  			file.ExcludeReason = err
   251  			p.IgnoredFiles = append(p.IgnoredFiles, file)
   252  			return false
   253  		}
   254  	}
   255  
   256  	if pkg != "" && pkg != "_" {
   257  		if p.PkgName == "" {
   258  			p.PkgName = pkg
   259  			fp.firstFile = base
   260  		} else if pkg != p.PkgName {
   261  			if fp.ignoreOther {
   262  				file.ExcludeReason = excludeError{errors.Newf(pos,
   263  					"package is %s, want %s", pkg, p.PkgName)}
   264  				p.IgnoredFiles = append(p.IgnoredFiles, file)
   265  				return false
   266  			}
   267  			return badFile(&MultiplePackageError{
   268  				Dir:      p.Dir,
   269  				Packages: []string{p.PkgName, pkg},
   270  				Files:    []string{fp.firstFile, base},
   271  			})
   272  		}
   273  	}
   274  
   275  	isTest := strings.HasSuffix(base, "_test"+cueSuffix)
   276  	isTool := strings.HasSuffix(base, "_tool"+cueSuffix)
   277  
   278  	if mode&importComment != 0 {
   279  		qcom, line := findimportComment(data)
   280  		if line != 0 {
   281  			com, err := strconv.Unquote(qcom)
   282  			if err != nil {
   283  				badFile(errors.Newf(pos, "%s:%d: cannot parse import comment", fullPath, line))
   284  			} else if p.ImportComment == "" {
   285  				p.ImportComment = com
   286  				fp.firstCommentFile = base
   287  			} else if p.ImportComment != com {
   288  				badFile(errors.Newf(pos, "found import comments %q (%s) and %q (%s) in %s", p.ImportComment, fp.firstCommentFile, com, base, p.Dir))
   289  			}
   290  		}
   291  	}
   292  
   293  	for _, decl := range pf.Decls {
   294  		d, ok := decl.(*ast.ImportDecl)
   295  		if !ok {
   296  			continue
   297  		}
   298  		for _, spec := range d.Specs {
   299  			quoted := spec.Path.Value
   300  			path, err := strconv.Unquote(quoted)
   301  			if err != nil {
   302  				badFile(errors.Newf(
   303  					spec.Path.Pos(),
   304  					"%s: parser returned invalid quoted string: <%s>", fullPath, quoted,
   305  				))
   306  			}
   307  			if !isTest || fp.c.Tests {
   308  				fp.imported[path] = append(fp.imported[path], spec.Pos())
   309  			}
   310  		}
   311  	}
   312  	switch {
   313  	case isTest:
   314  		if fp.c.Tests {
   315  			p.BuildFiles = append(p.BuildFiles, file)
   316  		} else {
   317  			file.ExcludeReason = excludeError{errors.Newf(pos,
   318  				"_test.cue files excluded in non-test mode")}
   319  			p.IgnoredFiles = append(p.IgnoredFiles, file)
   320  		}
   321  	case isTool:
   322  		if fp.c.Tools {
   323  			p.BuildFiles = append(p.BuildFiles, file)
   324  		} else {
   325  			file.ExcludeReason = excludeError{errors.Newf(pos,
   326  				"_tool.cue files excluded in non-cmd mode")}
   327  			p.IgnoredFiles = append(p.IgnoredFiles, file)
   328  		}
   329  	default:
   330  		p.BuildFiles = append(p.BuildFiles, file)
   331  	}
   332  	return true
   333  }
   334  
   335  func findimportComment(data []byte) (s string, line int) {
   336  	// expect keyword package
   337  	word, data := parseWord(data)
   338  	if string(word) != "package" {
   339  		return "", 0
   340  	}
   341  
   342  	// expect package name
   343  	_, data = parseWord(data)
   344  
   345  	// now ready for import comment, a // comment
   346  	// beginning and ending on the current line.
   347  	for len(data) > 0 && (data[0] == ' ' || data[0] == '\t' || data[0] == '\r') {
   348  		data = data[1:]
   349  	}
   350  
   351  	var comment []byte
   352  	switch {
   353  	case bytes.HasPrefix(data, slashSlash):
   354  		i := bytes.Index(data, newline)
   355  		if i < 0 {
   356  			i = len(data)
   357  		}
   358  		comment = data[2:i]
   359  	}
   360  	comment = bytes.TrimSpace(comment)
   361  
   362  	// split comment into `import`, `"pkg"`
   363  	word, arg := parseWord(comment)
   364  	if string(word) != "import" {
   365  		return "", 0
   366  	}
   367  
   368  	line = 1 + bytes.Count(data[:cap(data)-cap(arg)], newline)
   369  	return strings.TrimSpace(string(arg)), line
   370  }
   371  
   372  var (
   373  	slashSlash = []byte("//")
   374  	newline    = []byte("\n")
   375  )
   376  
   377  // skipSpaceOrComment returns data with any leading spaces or comments removed.
   378  func skipSpaceOrComment(data []byte) []byte {
   379  	for len(data) > 0 {
   380  		switch data[0] {
   381  		case ' ', '\t', '\r', '\n':
   382  			data = data[1:]
   383  			continue
   384  		case '/':
   385  			if bytes.HasPrefix(data, slashSlash) {
   386  				i := bytes.Index(data, newline)
   387  				if i < 0 {
   388  					return nil
   389  				}
   390  				data = data[i+1:]
   391  				continue
   392  			}
   393  		}
   394  		break
   395  	}
   396  	return data
   397  }
   398  
   399  // parseWord skips any leading spaces or comments in data
   400  // and then parses the beginning of data as an identifier or keyword,
   401  // returning that word and what remains after the word.
   402  func parseWord(data []byte) (word, rest []byte) {
   403  	data = skipSpaceOrComment(data)
   404  
   405  	// Parse past leading word characters.
   406  	rest = data
   407  	for {
   408  		r, size := utf8.DecodeRune(rest)
   409  		if unicode.IsLetter(r) || '0' <= r && r <= '9' || r == '_' {
   410  			rest = rest[size:]
   411  			continue
   412  		}
   413  		break
   414  	}
   415  
   416  	word = data[:len(data)-len(rest)]
   417  	if len(word) == 0 {
   418  		return nil, nil
   419  	}
   420  
   421  	return word, rest
   422  }
   423  
   424  func cleanImports(m map[string][]token.Pos) ([]string, map[string][]token.Pos) {
   425  	all := make([]string, 0, len(m))
   426  	for path := range m {
   427  		all = append(all, path)
   428  	}
   429  	sort.Strings(all)
   430  	return all, m
   431  }
   432  
   433  // isLocalImport reports whether the import path is
   434  // a local import path, like ".", "..", "./foo", or "../foo".
   435  func isLocalImport(path string) bool {
   436  	return path == "." || path == ".." ||
   437  		strings.HasPrefix(path, "./") || strings.HasPrefix(path, "../")
   438  }
   439  
   440  // warnUnmatched warns about patterns that didn't match any packages.
   441  func warnUnmatched(matches []*match) {
   442  	for _, m := range matches {
   443  		if len(m.Pkgs) == 0 {
   444  			m.Err =
   445  				errors.Newf(token.NoPos, "cue: %q matched no packages\n", m.Pattern)
   446  		}
   447  	}
   448  }
   449  
   450  // cleanPatterns returns the patterns to use for the given
   451  // command line. It canonicalizes the patterns but does not
   452  // evaluate any matches.
   453  func cleanPatterns(patterns []string) []string {
   454  	if len(patterns) == 0 {
   455  		return []string{"."}
   456  	}
   457  	var out []string
   458  	for _, a := range patterns {
   459  		// Arguments are supposed to be import paths, but
   460  		// as a courtesy to Windows developers, rewrite \ to /
   461  		// in command-line arguments. Handles .\... and so on.
   462  		if filepath.Separator == '\\' {
   463  			a = strings.Replace(a, `\`, `/`, -1)
   464  		}
   465  
   466  		// Put argument in canonical form, but preserve leading ./.
   467  		if strings.HasPrefix(a, "./") {
   468  			a = "./" + pathpkg.Clean(a)
   469  			if a == "./." {
   470  				a = "."
   471  			}
   472  		} else {
   473  			a = pathpkg.Clean(a)
   474  		}
   475  		out = append(out, a)
   476  	}
   477  	return out
   478  }
   479  
   480  // isMetaPackage checks if name is a reserved package name that expands to multiple packages.
   481  func isMetaPackage(name string) bool {
   482  	return name == "std" || name == "cmd" || name == "all"
   483  }
   484  
   485  // hasFilepathPrefix reports whether the path s begins with the
   486  // elements in prefix.
   487  func hasFilepathPrefix(s, prefix string) bool {
   488  	switch {
   489  	default:
   490  		return false
   491  	case len(s) == len(prefix):
   492  		return s == prefix
   493  	case len(s) > len(prefix):
   494  		if prefix != "" && prefix[len(prefix)-1] == filepath.Separator {
   495  			return strings.HasPrefix(s, prefix)
   496  		}
   497  		return s[len(prefix)] == filepath.Separator && s[:len(prefix)] == prefix
   498  	}
   499  }
   500  

View as plain text