pkgsrc-Changes archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
CVS commit: pkgsrc/pkgtools/pkglint
Module Name: pkgsrc
Committed By: rillig
Date: Wed Nov 27 22:10:08 UTC 2019
Modified Files:
pkgsrc/pkgtools/pkglint: Makefile PLIST
pkgsrc/pkgtools/pkglint/files: autofix.go autofix_test.go check_test.go
licenses.go licenses_test.go line.go line_test.go logging.go
logging_test.go mkline.go mkline_test.go mklinechecker.go
mklinechecker_test.go mklineparser.go mklines.go mkparser.go
mkparser_test.go mkshparser.go mkshparser_test.go mkshtypes_test.go
mkshwalker_test.go mktypes.go mktypes_test.go package.go
package_test.go path.go path_test.go pkglint.1 pkglint.go
pkglint_test.go pkgsrc.go pkgsrc_test.go plist_test.go shell.go
shell_test.go shtokenizer.go shtokenizer_test.go shtypes_test.go
substcontext.go testnames_test.go util.go util_test.go
varalignblock.go vardefs.go vardefs_test.go vartypecheck.go
vartypecheck_test.go
pkgsrc/pkgtools/pkglint/files/getopt: getopt_test.go
pkgsrc/pkgtools/pkglint/files/histogram: histogram_test.go
pkgsrc/pkgtools/pkglint/files/intqa: ideas.go
pkgsrc/pkgtools/pkglint/files/licenses: licenses_test.go
pkgsrc/pkgtools/pkglint/files/pkgver: vercmp_test.go
pkgsrc/pkgtools/pkglint/files/textproc: lexer_test.go
pkgsrc/pkgtools/pkglint/files/trace: tracing_test.go
Added Files:
pkgsrc/pkgtools/pkglint/files: mklexer.go mklexer_test.go
pkgsrc/pkgtools/pkglint/files/intqa: qa.go qa_test.go
Removed Files:
pkgsrc/pkgtools/pkglint/files/intqa: testnames.go testnames_test.go
Log Message:
pkgtools/pkglint: update to 19.3.11
Changes since 19.3.10:
The check for buildlink3.mk files that are included conditionally in one
place and unconditionally in another place have been refined. Now they
also work in cases that do not involve any variables, such as when the
condition is a mere exists(filename).
References to variables that use parentheses instead of the usual braces
produce a warning now, even if pkglint cannot fix them automatically.
This affects only a few instances where more than one such variable
reference appeared in a single line.
The --log-verbose command line option has been removed since it does not
have any practical use other than improving the performance during
pkglint development itself. Because of that it hadn't even been
mentioned in the manual page.
Warnings for missing license files now report the path to the license
file relative to the line where the warning occurs, like everywhere
else.
To generate a diff of this commit:
cvs rdiff -u -r1.610 -r1.611 pkgsrc/pkgtools/pkglint/Makefile
cvs rdiff -u -r1.17 -r1.18 pkgsrc/pkgtools/pkglint/PLIST
cvs rdiff -u -r1.30 -r1.31 pkgsrc/pkgtools/pkglint/files/autofix.go \
pkgsrc/pkgtools/pkglint/files/logging.go
cvs rdiff -u -r1.31 -r1.32 pkgsrc/pkgtools/pkglint/files/autofix_test.go
cvs rdiff -u -r1.55 -r1.56 pkgsrc/pkgtools/pkglint/files/check_test.go
cvs rdiff -u -r1.27 -r1.28 pkgsrc/pkgtools/pkglint/files/licenses.go
cvs rdiff -u -r1.25 -r1.26 pkgsrc/pkgtools/pkglint/files/licenses_test.go
cvs rdiff -u -r1.39 -r1.40 pkgsrc/pkgtools/pkglint/files/line.go \
pkgsrc/pkgtools/pkglint/files/plist_test.go
cvs rdiff -u -r1.19 -r1.20 pkgsrc/pkgtools/pkglint/files/line_test.go \
pkgsrc/pkgtools/pkglint/files/shtokenizer_test.go
cvs rdiff -u -r1.22 -r1.23 pkgsrc/pkgtools/pkglint/files/logging_test.go
cvs rdiff -u -r0 -r1.1 pkgsrc/pkgtools/pkglint/files/mklexer.go \
pkgsrc/pkgtools/pkglint/files/mklexer_test.go
cvs rdiff -u -r1.64 -r1.65 pkgsrc/pkgtools/pkglint/files/mkline.go
cvs rdiff -u -r1.72 -r1.73 pkgsrc/pkgtools/pkglint/files/mkline_test.go
cvs rdiff -u -r1.54 -r1.55 pkgsrc/pkgtools/pkglint/files/mklinechecker.go
cvs rdiff -u -r1.49 -r1.50 \
pkgsrc/pkgtools/pkglint/files/mklinechecker_test.go \
pkgsrc/pkgtools/pkglint/files/shell.go
cvs rdiff -u -r1.4 -r1.5 pkgsrc/pkgtools/pkglint/files/mklineparser.go
cvs rdiff -u -r1.60 -r1.61 pkgsrc/pkgtools/pkglint/files/mklines.go \
pkgsrc/pkgtools/pkglint/files/vartypecheck_test.go
cvs rdiff -u -r1.36 -r1.37 pkgsrc/pkgtools/pkglint/files/mkparser.go \
pkgsrc/pkgtools/pkglint/files/pkgsrc_test.go \
pkgsrc/pkgtools/pkglint/files/util_test.go
cvs rdiff -u -r1.35 -r1.36 pkgsrc/pkgtools/pkglint/files/mkparser_test.go
cvs rdiff -u -r1.17 -r1.18 pkgsrc/pkgtools/pkglint/files/mkshparser.go \
pkgsrc/pkgtools/pkglint/files/mktypes_test.go
cvs rdiff -u -r1.20 -r1.21 pkgsrc/pkgtools/pkglint/files/mkshparser_test.go \
pkgsrc/pkgtools/pkglint/files/mktypes.go \
pkgsrc/pkgtools/pkglint/files/shtokenizer.go
cvs rdiff -u -r1.2 -r1.3 pkgsrc/pkgtools/pkglint/files/mkshtypes_test.go
cvs rdiff -u -r1.10 -r1.11 pkgsrc/pkgtools/pkglint/files/mkshwalker_test.go
cvs rdiff -u -r1.69 -r1.70 pkgsrc/pkgtools/pkglint/files/package.go
cvs rdiff -u -r1.59 -r1.60 pkgsrc/pkgtools/pkglint/files/package_test.go \
pkgsrc/pkgtools/pkglint/files/util.go
cvs rdiff -u -r1.1 -r1.2 pkgsrc/pkgtools/pkglint/files/path.go \
pkgsrc/pkgtools/pkglint/files/path_test.go
cvs rdiff -u -r1.56 -r1.57 pkgsrc/pkgtools/pkglint/files/pkglint.1
cvs rdiff -u -r1.65 -r1.66 pkgsrc/pkgtools/pkglint/files/pkglint.go
cvs rdiff -u -r1.50 -r1.51 pkgsrc/pkgtools/pkglint/files/pkglint_test.go
cvs rdiff -u -r1.42 -r1.43 pkgsrc/pkgtools/pkglint/files/pkgsrc.go
cvs rdiff -u -r1.57 -r1.58 pkgsrc/pkgtools/pkglint/files/shell_test.go
cvs rdiff -u -r1.9 -r1.10 pkgsrc/pkgtools/pkglint/files/shtypes_test.go \
pkgsrc/pkgtools/pkglint/files/testnames_test.go
cvs rdiff -u -r1.29 -r1.30 pkgsrc/pkgtools/pkglint/files/substcontext.go
cvs rdiff -u -r1.8 -r1.9 pkgsrc/pkgtools/pkglint/files/varalignblock.go
cvs rdiff -u -r1.78 -r1.79 pkgsrc/pkgtools/pkglint/files/vardefs.go
cvs rdiff -u -r1.24 -r1.25 pkgsrc/pkgtools/pkglint/files/vardefs_test.go
cvs rdiff -u -r1.67 -r1.68 pkgsrc/pkgtools/pkglint/files/vartypecheck.go
cvs rdiff -u -r1.12 -r1.13 \
pkgsrc/pkgtools/pkglint/files/getopt/getopt_test.go
cvs rdiff -u -r1.3 -r1.4 \
pkgsrc/pkgtools/pkglint/files/histogram/histogram_test.go
cvs rdiff -u -r1.2 -r1.3 pkgsrc/pkgtools/pkglint/files/intqa/ideas.go
cvs rdiff -u -r0 -r1.1 pkgsrc/pkgtools/pkglint/files/intqa/qa.go \
pkgsrc/pkgtools/pkglint/files/intqa/qa_test.go
cvs rdiff -u -r1.6 -r0 pkgsrc/pkgtools/pkglint/files/intqa/testnames.go
cvs rdiff -u -r1.3 -r0 pkgsrc/pkgtools/pkglint/files/intqa/testnames_test.go
cvs rdiff -u -r1.7 -r1.8 \
pkgsrc/pkgtools/pkglint/files/licenses/licenses_test.go
cvs rdiff -u -r1.7 -r1.8 pkgsrc/pkgtools/pkglint/files/pkgver/vercmp_test.go
cvs rdiff -u -r1.8 -r1.9 pkgsrc/pkgtools/pkglint/files/textproc/lexer_test.go
cvs rdiff -u -r1.5 -r1.6 pkgsrc/pkgtools/pkglint/files/trace/tracing_test.go
Please note that diffs are not public domain; they are subject to the
copyright notices on the relevant files.
Modified files:
Index: pkgsrc/pkgtools/pkglint/Makefile
diff -u pkgsrc/pkgtools/pkglint/Makefile:1.610 pkgsrc/pkgtools/pkglint/Makefile:1.611
--- pkgsrc/pkgtools/pkglint/Makefile:1.610 Sat Nov 23 23:35:55 2019
+++ pkgsrc/pkgtools/pkglint/Makefile Wed Nov 27 22:10:06 2019
@@ -1,6 +1,6 @@
-# $NetBSD: Makefile,v 1.610 2019/11/23 23:35:55 rillig Exp $
+# $NetBSD: Makefile,v 1.611 2019/11/27 22:10:06 rillig Exp $
-PKGNAME= pkglint-19.3.10
+PKGNAME= pkglint-19.3.11
CATEGORIES= pkgtools
DISTNAME= tools
MASTER_SITES= ${MASTER_SITE_GITHUB:=golang/}
Index: pkgsrc/pkgtools/pkglint/PLIST
diff -u pkgsrc/pkgtools/pkglint/PLIST:1.17 pkgsrc/pkgtools/pkglint/PLIST:1.18
--- pkgsrc/pkgtools/pkglint/PLIST:1.17 Sat Nov 23 23:35:55 2019
+++ pkgsrc/pkgtools/pkglint/PLIST Wed Nov 27 22:10:06 2019
@@ -1,4 +1,4 @@
-@comment $NetBSD: PLIST,v 1.17 2019/11/23 23:35:55 rillig Exp $
+@comment $NetBSD: PLIST,v 1.18 2019/11/27 22:10:06 rillig Exp $
bin/pkglint
gopkg/pkg/${GO_PLATFORM}/netbsd.org/pkglint.a
gopkg/pkg/${GO_PLATFORM}/netbsd.org/pkglint/getopt.a
@@ -30,8 +30,8 @@ gopkg/src/netbsd.org/pkglint/getopt/geto
gopkg/src/netbsd.org/pkglint/histogram/histogram.go
gopkg/src/netbsd.org/pkglint/histogram/histogram_test.go
gopkg/src/netbsd.org/pkglint/intqa/ideas.go
-gopkg/src/netbsd.org/pkglint/intqa/testnames.go
-gopkg/src/netbsd.org/pkglint/intqa/testnames_test.go
+gopkg/src/netbsd.org/pkglint/intqa/qa.go
+gopkg/src/netbsd.org/pkglint/intqa/qa_test.go
gopkg/src/netbsd.org/pkglint/licenses.go
gopkg/src/netbsd.org/pkglint/licenses/licenses.go
gopkg/src/netbsd.org/pkglint/licenses/licenses.y
@@ -49,6 +49,8 @@ gopkg/src/netbsd.org/pkglint/lines.go
gopkg/src/netbsd.org/pkglint/lines_test.go
gopkg/src/netbsd.org/pkglint/logging.go
gopkg/src/netbsd.org/pkglint/logging_test.go
+gopkg/src/netbsd.org/pkglint/mklexer.go
+gopkg/src/netbsd.org/pkglint/mklexer_test.go
gopkg/src/netbsd.org/pkglint/mkline.go
gopkg/src/netbsd.org/pkglint/mkline_test.go
gopkg/src/netbsd.org/pkglint/mklinechecker.go
Index: pkgsrc/pkgtools/pkglint/files/autofix.go
diff -u pkgsrc/pkgtools/pkglint/files/autofix.go:1.30 pkgsrc/pkgtools/pkglint/files/autofix.go:1.31
--- pkgsrc/pkgtools/pkglint/files/autofix.go:1.30 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/autofix.go Wed Nov 27 22:10:07 2019
@@ -29,7 +29,6 @@ type autofixShortTerm struct {
diagFormat string // Is logged only if it couldn't be fixed automatically
diagArgs []interface{} //
explanation []string // Is printed together with the diagnostic
- anyway bool // Print the diagnostic even if it cannot be autofixed
}
type autofixAction struct {
@@ -297,15 +296,6 @@ func (fix *Autofix) Describef(lineno int
fix.actions = append(fix.actions, autofixAction{sprintf(format, args...), lineno})
}
-// Anyway has the effect of showing the diagnostic even when nothing can
-// be fixed automatically.
-//
-// As usual, the diagnostic is only shown if neither --show-autofix nor
-// --autofix mode is given.
-func (fix *Autofix) Anyway() {
- fix.anyway = !G.Logger.IsAutofix()
-}
-
// Apply does the actual work.
// Depending on the pkglint mode, it either:
//
@@ -330,7 +320,7 @@ func (fix *Autofix) Apply() {
fix.autofixShortTerm = autofixShortTerm{}
}
- if !(G.Logger.Relevant(fix.diagFormat) && (len(fix.actions) > 0 || fix.anyway)) {
+ if !(G.Logger.Relevant(fix.diagFormat) && (len(fix.actions) > 0 || !G.Logger.IsAutofix())) {
reset()
return
}
@@ -488,12 +478,12 @@ func SaveAutofixChanges(lines *Lines) (a
}
err := tmpName.WriteString(text.String())
if err != nil {
- G.Logger.Errorf(tmpName, "Cannot write: %s", err)
+ G.Logger.TechErrorf(tmpName, "Cannot write: %s", err)
continue
}
err = tmpName.Rename(filename)
if err != nil {
- G.Logger.Errorf(tmpName, "Cannot overwrite with autofixed content: %s", err)
+ G.Logger.TechErrorf(tmpName, "Cannot overwrite with autofixed content: %s", err)
continue
}
autofixed = true
Index: pkgsrc/pkgtools/pkglint/files/logging.go
diff -u pkgsrc/pkgtools/pkglint/files/logging.go:1.30 pkgsrc/pkgtools/pkglint/files/logging.go:1.31
--- pkgsrc/pkgtools/pkglint/files/logging.go:1.30 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/logging.go Wed Nov 27 22:10:07 2019
@@ -18,6 +18,7 @@ type Logger struct {
suppressExpl bool
prevLine *Line
+ verbose bool // allow duplicate diagnostics, even in the same line
logged Once
explained Once
histo *histogram.Histogram
@@ -34,7 +35,6 @@ type LoggerOpts struct {
Autofix,
Explain,
ShowSource,
- LogVerbose,
GccOutput,
Quiet bool
}
@@ -52,8 +52,6 @@ var (
AutofixLogLevel = &LogLevel{"AUTOFIX", "autofix"}
)
-var dummyLine = NewLineMulti("", 0, 0, "", nil)
-
// Explain outputs an explanation for the preceding diagnostic
// if the --explain option is given. Otherwise it just records
// that an explanation is available.
@@ -126,7 +124,7 @@ func (l *Logger) Diag(line *Line, level
}
func (l *Logger) FirstTime(filename Path, linenos, msg string) bool {
- if l.Opts.LogVerbose {
+ if l.verbose {
return true
}
@@ -283,13 +281,13 @@ func (l *Logger) Logf(level *LogLevel, f
}
}
-// Errorf logs a technical error on the error output.
+// TechErrorf logs a technical error on the error output.
//
// location must be a slash-separated filename, such as the one in
// Location.Filename. It may be followed by the usual ":123" for line numbers.
//
// For diagnostics, use Logf instead.
-func (l *Logger) Errorf(location Path, format string, args ...interface{}) {
+func (l *Logger) TechErrorf(location Path, format string, args ...interface{}) {
msg := sprintf(format, args...)
var diag string
if l.Opts.GccOutput {
@@ -359,6 +357,7 @@ type SeparatorWriter struct {
}
func NewSeparatorWriter(out io.Writer) *SeparatorWriter {
+ assertNotNil(out)
return &SeparatorWriter{out, 3, bytes.Buffer{}}
}
Index: pkgsrc/pkgtools/pkglint/files/autofix_test.go
diff -u pkgsrc/pkgtools/pkglint/files/autofix_test.go:1.31 pkgsrc/pkgtools/pkglint/files/autofix_test.go:1.32
--- pkgsrc/pkgtools/pkglint/files/autofix_test.go:1.31 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/autofix_test.go Wed Nov 27 22:10:07 2019
@@ -140,7 +140,7 @@ func (s *Suite) Test_Autofix__lonely_sou
t := s.Init(c)
t.SetUpCommandLine("-Wall", "--source")
- G.Logger.Opts.LogVerbose = false // For realistic conditions; otherwise all diagnostics are logged.
+ G.Logger.verbose = false // For realistic conditions; otherwise all diagnostics are logged.
t.SetUpPackage("x11/xorg-cf-files",
".include \"../../x11/xorgproto/buildlink3.mk\"")
@@ -173,7 +173,7 @@ func (s *Suite) Test_Autofix__lonely_sou
t := s.Init(c)
t.SetUpCommandLine("-Wall", "--source", "--explain")
- G.Logger.Opts.LogVerbose = false // For realistic conditions; otherwise all diagnostics are logged.
+ G.Logger.verbose = false // For realistic conditions; otherwise all diagnostics are logged.
t.SetUpPackage("print/tex-bibtex8",
"MAKE_FLAGS+=\tCFLAGS=${CFLAGS.${PKGSRC_COMPILER}}")
@@ -245,7 +245,8 @@ func (s *Suite) Test_Autofix__show_autof
// shown even when they cannot be autofixed.
//
// This is typical when an autofix is provided for simple scenarios,
-// but the code actually found is a little more complicated.
+// but the code actually found is a little more complicated, like needing
+// special escaping for some of the characters or containing linebreaks.
func (s *Suite) Test_Autofix__show_unfixable_diagnostics_in_default_mode(c *check.C) {
t := s.Init(c)
@@ -258,17 +259,11 @@ func (s *Suite) Test_Autofix__show_unfix
lines.Lines[0].Warnf("This warning is shown since the --show-autofix option is not given.")
fix := lines.Lines[1].Autofix()
- fix.Warnf("This warning cannot be fixed and is therefore not shown.")
+ fix.Warnf("This warning cannot be fixed, nevertheless it is shown (1).")
fix.Replace("XXX", "TODO")
fix.Apply()
- fix.Warnf("This warning cannot be fixed automatically but should be shown anyway.")
- fix.Replace("XXX", "TODO")
- fix.Anyway()
- fix.Apply()
-
- // If this warning should ever appear it is probably because fix.anyway is not reset properly.
- fix.Warnf("This warning cannot be fixed and is therefore not shown.")
+ fix.Warnf("This warning cannot be fixed, nevertheless it is shown (2).")
fix.Replace("XXX", "TODO")
fix.Apply()
@@ -279,7 +274,8 @@ func (s *Suite) Test_Autofix__show_unfix
"WARN: Makefile:1: This warning is shown since the --show-autofix option is not given.",
"",
">\tline2",
- "WARN: Makefile:2: This warning cannot be fixed automatically but should be shown anyway.",
+ "WARN: Makefile:2: This warning cannot be fixed, nevertheless it is shown (1).",
+ "WARN: Makefile:2: This warning cannot be fixed, nevertheless it is shown (2).",
"",
">\tline3",
"WARN: Makefile:3: This warning is also shown.")
@@ -321,7 +317,7 @@ func (s *Suite) Test_Autofix__suppress_u
"+\tTODO")
}
-// If an Autofix doesn't do anything, it must not log any diagnostics.
+// If an Autofix doesn't do anything, it nevertheless logs the diagnostics.
func (s *Suite) Test_Autofix__noop_replace(c *check.C) {
t := s.Init(c)
@@ -332,8 +328,14 @@ func (s *Suite) Test_Autofix__noop_repla
fix.ReplaceRegex(`\b[A-Z]{3,}\b`, "---censored---", -1)
fix.Apply()
- // No output since there was no all-uppercase word in the text.
- t.CheckOutputEmpty()
+ // This warning is wrong. This test therefore demonstrates that each
+ // autofix must be properly guarded to only apply when it actually
+ // does something.
+ //
+ // As of November 2019 there is no Rollback method since it was not
+ // needed yet.
+ t.CheckOutputLines(
+ "WARN: Makefile:14: All-uppercase words should not be used at all.")
}
func (s *Suite) Test_Autofix_Warnf__duplicate(c *check.C) {
@@ -550,9 +552,7 @@ func (s *Suite) Test_Autofix_ReplaceAt(c
t := s.Init(c)
lines := func(lines ...string) []string { return lines }
- diagnostics := lines
- autofixes := lines
- test := func(texts []string, rawIndex int, column int, from, to string, diagnostics []string, autofixes []string) {
+ test := func(texts []string, rawIndex int, column int, from, to string, diagnostics ...string) {
mainPart := func() {
mklines := t.NewMkLines("filename.mk", texts...)
@@ -562,17 +562,10 @@ func (s *Suite) Test_Autofix_ReplaceAt(c
fix := mkline.Autofix()
fix.Notef("Should be appended instead of assigned.")
fix.ReplaceAt(rawIndex, column, from, to)
- fix.Anyway()
fix.Apply()
}
- t.SetUpCommandLine("-Wall")
- mainPart()
- t.CheckOutput(diagnostics)
-
- t.SetUpCommandLine("-Wall", "--autofix")
- mainPart()
- t.CheckOutput(autofixes)
+ t.ExpectDiagnosticsAutofix(mainPart, diagnostics...)
}
test(
@@ -580,10 +573,9 @@ func (s *Suite) Test_Autofix_ReplaceAt(c
"VAR=value1 \\",
"\tvalue2"),
0, 3, "=", "+=",
- diagnostics(
- "NOTE: filename.mk:1: Should be appended instead of assigned."),
- autofixes(
- "AUTOFIX: filename.mk:1: Replacing \"=\" with \"+=\"."))
+
+ "NOTE: filename.mk:1: Should be appended instead of assigned.",
+ "AUTOFIX: filename.mk:1: Replacing \"=\" with \"+=\".")
// If the text at the precisely given position does not match,
// the note is still printed because of the fix.Anyway(), but
@@ -593,21 +585,19 @@ func (s *Suite) Test_Autofix_ReplaceAt(c
"VAR=value1 \\",
"\tvalue2"),
0, 3, "?", "+=",
- diagnostics(
- "NOTE: filename.mk:1--2: Should be appended instead of assigned."),
- autofixes(
- nil...))
+
+ "NOTE: filename.mk:1--2: Should be appended instead of assigned.")
// Getting the line number wrong is a strange programming error, and
// there does not need to be any code checking for this in the main code.
t.ExpectPanicMatches(
- func() { test(lines("VAR=value"), 10, 3, "from", "to", nil, nil) },
+ func() { test(lines("VAR=value"), 10, 3, "from", "to", nil...) },
`runtime error: index out of range.*`)
// It is a programming error to replace a string with itself, since that
// would produce confusing diagnostics.
t.ExpectAssert(
- func() { test(lines("VAR=value"), 0, 4, "value", "value", nil, nil) })
+ func() { test(lines("VAR=value"), 0, 4, "value", "value", nil...) })
// Getting the column number wrong may happen when a previous replacement
// has made the string shorter than before, therefore no panic in this case.
@@ -616,10 +606,8 @@ func (s *Suite) Test_Autofix_ReplaceAt(c
"VAR=value1 \\",
"\tvalue2"),
0, 20, "?", "+=",
- diagnostics(
- "NOTE: filename.mk:1--2: Should be appended instead of assigned."),
- autofixes(
- nil...))
+
+ "NOTE: filename.mk:1--2: Should be appended instead of assigned.")
}
func (s *Suite) Test_Autofix_ReplaceRegex__show_autofix(c *check.C) {
@@ -860,7 +848,7 @@ func (s *Suite) Test_Autofix_Custom__in_
// With the default command line options, this warning is printed.
// With the --show-autofix option this warning is NOT printed since it
// cannot be fixed automatically.
-func (s *Suite) Test_Autofix_Anyway__options(c *check.C) {
+func (s *Suite) Test_Autofix_Apply__show_autofix_option(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("--show-autofix")
@@ -870,7 +858,6 @@ func (s *Suite) Test_Autofix_Anyway__opt
fix.Warnf("This autofix doesn't match.")
fix.Replace("doesn't match", "")
- fix.Anyway()
fix.Apply()
t.CheckOutputEmpty()
@@ -879,14 +866,13 @@ func (s *Suite) Test_Autofix_Anyway__opt
fix.Warnf("This autofix doesn't match.")
fix.Replace("doesn't match", "")
- fix.Anyway()
fix.Apply()
t.CheckOutputLines(
"WARN: filename:3: This autofix doesn't match.")
}
-func (s *Suite) Test_Autofix_Anyway__autofix_option(c *check.C) {
+func (s *Suite) Test_Autofix_Apply__autofix_option(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("--autofix")
@@ -895,17 +881,13 @@ func (s *Suite) Test_Autofix_Anyway__aut
fix := line.Autofix()
fix.Notef("This line is quite short.")
fix.Replace("not found", "needle")
- fix.Anyway()
fix.Apply()
- // The replacement text is not found, therefore the note should not be logged.
- // Because of fix.Anyway, the note should be logged anyway.
- // But because of the --autofix option, the note should not be logged.
- // Therefore, in the end nothing is shown in this case.
+ // Because of the --autofix option, the note is not logged.
t.CheckOutputEmpty()
}
-func (s *Suite) Test_Autofix_Anyway__autofix_and_show_autofix_options(c *check.C) {
+func (s *Suite) Test_Autofix_Apply__autofix_and_show_autofix_options_no_match(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("--autofix", "--show-autofix")
@@ -914,27 +896,11 @@ func (s *Suite) Test_Autofix_Anyway__aut
fix := line.Autofix()
fix.Notef("This line is quite short.")
fix.Replace("not found", "needle")
- fix.Anyway()
fix.Apply()
- // The text to be replaced is not found. Because nothing is fixed here,
- // there's no need to log anything.
- //
- // But fix.Anyway is set, therefore the diagnostic should be logged even
- // though it cannot be fixed automatically. This comes handy in situations
- // where simple cases can be fixed automatically and more complex cases
- // (often involving special characters that need to be escaped properly)
- // should nevertheless result in a diagnostics.
- //
- // The --autofix option is set, which means that the diagnostics don't
- // get logged, only the actual fixes do.
- //
- // But then there's also the --show-autofix option, which logs the
- // corresponding diagnostic for each autofix that actually changes
- // something. But this autofix doesn't change anything, therefore even
- // the --show-autofix doesn't have an effect.
- //
- // Therefore, in the end nothing is logged in this case.
+ // Since at least one of the --show-autofix or --autofix options is given,
+ // only those autofixes that actually change something are logged.
+ // This one doesn't find the "not found" text, therefore it is not logged.
t.CheckOutputEmpty()
}
@@ -1045,7 +1011,6 @@ func (s *Suite) Test_Autofix_Apply__anyw
fix := mklines.mklines[1].Autofix()
fix.Errorf("From must be To.")
fix.Replace("from", "to")
- fix.Anyway()
fix.Apply()
mklines.SaveAutofixChanges()
@@ -1064,7 +1029,6 @@ func (s *Suite) Test_Autofix_Apply__sour
fix := lines.Lines[0].Autofix()
fix.Notef("Word should be replaced, but pkglint is not sure which one.")
fix.Replace("word", "replacement")
- fix.Anyway()
fix.Apply()
lines.SaveAutofixChanges()
@@ -1169,7 +1133,8 @@ func (s *Suite) Test_Autofix_setDiag__no
fix.Replace("from", "to")
fix.Apply()
- t.CheckOutputEmpty()
+ t.CheckOutputLines(
+ "NOTE: file.mk:123: Note.")
}
func (s *Suite) Test_Autofix_setDiag__bad_call_sequence(c *check.C) {
Index: pkgsrc/pkgtools/pkglint/files/check_test.go
diff -u pkgsrc/pkgtools/pkglint/files/check_test.go:1.55 pkgsrc/pkgtools/pkglint/files/check_test.go:1.56
--- pkgsrc/pkgtools/pkglint/files/check_test.go:1.55 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/check_test.go Wed Nov 27 22:10:07 2019
@@ -88,23 +88,10 @@ func (s *Suite) TearDownTest(c *check.C)
assertNil(err, "Cannot chdir back to previous dir: %s", err)
if t.seenSetupPkgsrc > 0 && !t.seenFinish && !t.seenMain {
- t.Errorf("After t.SetupPkgsrc(), either t.FinishSetUp() or t.Main() must be called.")
- }
-
- if out := t.Output(); out != "" {
- var msg strings.Builder
- msg.WriteString("\n")
- _, _ = fmt.Fprintf(&msg, "Unchecked output in %s; check with:\n", c.TestName())
- msg.WriteString("\n")
- msg.WriteString("t.CheckOutputLines(\n")
- lines := strings.Split(strings.TrimSpace(out), "\n")
- for i, line := range lines {
- _, _ = fmt.Fprintf(&msg, "\t%q%s\n", line, condStr(i == len(lines)-1, ")", ","))
- }
- _, _ = fmt.Fprintf(&msg, "\n")
- _, _ = os.Stderr.WriteString(msg.String())
+ t.InternalErrorf("After t.SetupPkgsrc(), either t.FinishSetUp() or t.Main() must be called.")
}
+ t.ReportUncheckedOutput()
t.tmpdir = ""
t.DisableTracing()
@@ -160,7 +147,9 @@ func (t *Tester) SetUpCommandLine(args .
//
// It also reveals diagnostics that are logged multiple times per
// line and thus can easily get annoying to the pkgsrc developers.
- G.Logger.Opts.LogVerbose = true
+ //
+ // To avoid running a check multiple times, see Line.once or MkLines.once.
+ G.Logger.verbose = true
t.seenSetUpCommandLine = true
}
@@ -623,7 +612,10 @@ func (t *Tester) SetUpHierarchy() (
case string:
addLine(arg)
case *MkLines:
- text := sprintf(".include %q", relpath(filename.Dir(), arg.lines.Filename))
+ fromDir := G.Pkgsrc.File(filename.Dir())
+ to := G.Pkgsrc.File(arg.lines.Filename)
+ rel := G.Pkgsrc.Relpath(fromDir, to)
+ text := sprintf(".include %q", rel)
addLine(text)
lines = append(lines, arg.lines.Lines...)
default:
@@ -678,14 +670,14 @@ func (s *Suite) Test_Tester_SetUpHierarc
func (t *Tester) FinishSetUp() {
if t.seenSetupPkgsrc == 0 {
- t.Errorf("Unnecessary t.FinishSetUp() since t.SetUpPkgsrc() has not been called.")
+ t.InternalErrorf("Unnecessary t.FinishSetUp() since t.SetUpPkgsrc() has not been called.")
}
if !t.seenFinish {
t.seenFinish = true
G.Pkgsrc.LoadInfrastructure()
} else {
- t.Errorf("Redundant t.FinishSetup() since it was called multiple times.")
+ t.InternalErrorf("Redundant t.FinishSetup() since it was called multiple times.")
}
}
@@ -698,11 +690,11 @@ func (t *Tester) FinishSetUp() {
// Does not work in combination with SetUpOption.
func (t *Tester) Main(args ...string) int {
if t.seenFinish && !t.seenMain {
- t.Errorf("Calling t.FinishSetup() before t.Main() is redundant " +
+ t.InternalErrorf("Calling t.FinishSetup() before t.Main() is redundant " +
"since t.Main() loads the pkgsrc infrastructure.")
}
if t.seenSetUpCommandLine {
- t.Errorf("Calling t.SetupCommandLine() before t.Main() is redundant " +
+ t.InternalErrorf("Calling t.SetupCommandLine() before t.Main() is redundant " +
"since t.Main() accepts the command line options directly.")
}
@@ -741,7 +733,10 @@ func (t *Tester) CheckDeepEquals(actual
return t.c.Check(actual, check.DeepEquals, expected)
}
-func (t *Tester) Errorf(format string, args ...interface{}) {
+// InternalErrorf reports a consistency error in the tests.
+func (t *Tester) InternalErrorf(format string, args ...interface{}) {
+ // It is not possible to panic here since check.v1 would then
+ // ignore all subsequent tests.
_, _ = fmt.Fprintf(os.Stderr, "In %s: %s\n", t.testName, sprintf(format, args...))
}
@@ -817,6 +812,18 @@ func (t *Tester) ExpectAssert(action fun
t.Check(action, check.Panics, "Pkglint internal error")
}
+// ExpectDiagnosticsAutofix first runs the given action with -Wall, and
+// then another time with -Wall --autofix.
+func (t *Tester) ExpectDiagnosticsAutofix(action func(), diagnostics ...string) {
+ t.SetUpCommandLine("-Wall")
+ action()
+
+ t.SetUpCommandLine("-Wall", "--autofix")
+ action()
+
+ t.CheckOutput(diagnostics)
+}
+
// NewRawLines creates lines from line numbers and raw text, including newlines.
//
// Arguments are sequences of either (lineno, orignl) or (lineno, orignl, textnl).
@@ -1174,3 +1181,22 @@ func (t *Tester) Shquote(format string,
}
return sprintf(format, subs...)
}
+
+func (t *Tester) ReportUncheckedOutput() {
+ out := t.Output()
+ if out == "" {
+ return
+ }
+
+ var msg strings.Builder
+ msg.WriteString("\n")
+ _, _ = fmt.Fprintf(&msg, "Unchecked output in %s; check with:\n", t.testName)
+ msg.WriteString("\n")
+ msg.WriteString("t.CheckOutputLines(\n")
+ lines := strings.Split(strings.TrimSpace(out), "\n")
+ for i, line := range lines {
+ _, _ = fmt.Fprintf(&msg, "\t%q%s\n", line, condStr(i == len(lines)-1, ")", ","))
+ }
+ _, _ = fmt.Fprintf(&msg, "\n")
+ _, _ = os.Stderr.WriteString(msg.String())
+}
Index: pkgsrc/pkgtools/pkglint/files/licenses.go
diff -u pkgsrc/pkgtools/pkglint/files/licenses.go:1.27 pkgsrc/pkgtools/pkglint/files/licenses.go:1.28
--- pkgsrc/pkgtools/pkglint/files/licenses.go:1.27 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/licenses.go Wed Nov 27 22:10:07 2019
@@ -36,7 +36,8 @@ func (lc *LicenseChecker) checkName(lice
}
if !licenseFile.IsFile() {
- lc.MkLine.Warnf("License file %s does not exist.", cleanpath(licenseFile))
+ lc.MkLine.Warnf("License file %s does not exist.",
+ lc.MkLine.PathToFile(licenseFile))
}
switch license {
Index: pkgsrc/pkgtools/pkglint/files/licenses_test.go
diff -u pkgsrc/pkgtools/pkglint/files/licenses_test.go:1.25 pkgsrc/pkgtools/pkglint/files/licenses_test.go:1.26
--- pkgsrc/pkgtools/pkglint/files/licenses_test.go:1.25 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/licenses_test.go Wed Nov 27 22:10:07 2019
@@ -11,7 +11,7 @@ func (s *Suite) Test_LicenseChecker_Chec
"The licenses for most software are designed to take away ...")
test := func(licenseValue string, diagnostics ...string) {
- mklines := t.NewMkLines("Makefile",
+ mklines := t.SetUpFileMkLines("Makefile",
"LICENSE=\t"+licenseValue)
mklines.ForEach(func(mkline *MkLine) {
@@ -22,25 +22,33 @@ func (s *Suite) Test_LicenseChecker_Chec
}
test("gpl-v2",
- "WARN: Makefile:1: License file ~/licenses/gpl-v2 does not exist.")
+ "WARN: ~/Makefile:1: License file licenses/gpl-v2 does not exist.")
test("no-profit shareware",
- "ERROR: Makefile:1: Parse error for license condition \"no-profit shareware\".")
+ "ERROR: ~/Makefile:1: Parse error for license condition \"no-profit shareware\".")
test("no-profit AND shareware",
- "WARN: Makefile:1: License file ~/licenses/no-profit does not exist.",
- "ERROR: Makefile:1: License \"no-profit\" must not be used.",
- "WARN: Makefile:1: License file ~/licenses/shareware does not exist.",
- "ERROR: Makefile:1: License \"shareware\" must not be used.")
+ "WARN: ~/Makefile:1: License file licenses/no-profit does not exist.",
+ "ERROR: ~/Makefile:1: License \"no-profit\" must not be used.",
+ "WARN: ~/Makefile:1: License file licenses/shareware does not exist.",
+ "ERROR: ~/Makefile:1: License \"shareware\" must not be used.")
test("gnu-gpl-v2",
nil...)
test("gnu-gpl-v2 AND gnu-gpl-v2 OR gnu-gpl-v2",
- "ERROR: Makefile:1: AND and OR operators in license conditions can only be combined using parentheses.")
+ "ERROR: ~/Makefile:1: AND and OR operators in license conditions "+
+ "can only be combined using parentheses.")
+
+ test("gnu-gpl-v2 AND (gnu-gpl-v2) OR gnu-gpl-v2",
+ "ERROR: ~/Makefile:1: AND and OR operators in license conditions "+
+ "can only be combined using parentheses.")
test("(gnu-gpl-v2 OR gnu-gpl-v2) AND gnu-gpl-v2",
nil...)
+
+ test("gnu-gpl-v2 OR (gnu-gpl-v2 AND gnu-gpl-v2)",
+ nil...)
}
func (s *Suite) Test_LicenseChecker_checkName__LICENSE_FILE(c *check.C) {
Index: pkgsrc/pkgtools/pkglint/files/line.go
diff -u pkgsrc/pkgtools/pkglint/files/line.go:1.39 pkgsrc/pkgtools/pkglint/files/line.go:1.40
--- pkgsrc/pkgtools/pkglint/files/line.go:1.39 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/line.go Wed Nov 27 22:10:07 2019
@@ -119,7 +119,7 @@ func (line *Line) RefToLocation(other Lo
// This is typically used for arguments in diagnostics, which should always be
// relative to the line with which the diagnostic is associated.
func (line *Line) PathToFile(filePath Path) Path {
- return relpath(line.Filename.Dir(), filePath)
+ return G.Pkgsrc.Relpath(line.Filename.Dir(), filePath)
}
func (line *Line) IsMultiline() bool {
@@ -182,6 +182,10 @@ func (line *Line) String() string {
func (line *Line) Autofix() *Autofix {
if line.autofix == nil {
line.autofix = NewAutofix(line)
+ } else {
+ // This assertion fails if an Autofix is reused before
+ // its Apply method is called.
+ assert(line.autofix.autofixShortTerm.diagFormat == "")
}
return line.autofix
}
Index: pkgsrc/pkgtools/pkglint/files/plist_test.go
diff -u pkgsrc/pkgtools/pkglint/files/plist_test.go:1.39 pkgsrc/pkgtools/pkglint/files/plist_test.go:1.40
--- pkgsrc/pkgtools/pkglint/files/plist_test.go:1.39 Sun Nov 17 01:26:25 2019
+++ pkgsrc/pkgtools/pkglint/files/plist_test.go Wed Nov 27 22:10:07 2019
@@ -707,7 +707,7 @@ func (s *Suite) Test_PlistChecker_checkP
// This variant is typical for recursive runs of pkglint.
G.Check("./graphics/gnome-icon-theme-extras")
- // Up to March 2019, a bug in relpath produced different behavior
+ // Up to March 2019, a bug in Relpath produced different behavior
// depending on the leading dot.
t.CheckOutputEmpty()
}
Index: pkgsrc/pkgtools/pkglint/files/line_test.go
diff -u pkgsrc/pkgtools/pkglint/files/line_test.go:1.19 pkgsrc/pkgtools/pkglint/files/line_test.go:1.20
--- pkgsrc/pkgtools/pkglint/files/line_test.go:1.19 Sun Jul 14 21:25:47 2019
+++ pkgsrc/pkgtools/pkglint/files/line_test.go Wed Nov 27 22:10:07 2019
@@ -53,3 +53,21 @@ func (s *Suite) Test_Line_Fatalf__trace(
"TRACE: - (*Suite).Test_Line_Fatalf__trace.func2()",
"FATAL: filename:123: Cannot continue because of \"reason 1\" and \"reason 2\".")
}
+
+func (s *Suite) Test_Line_Autofix__reuse_incomplete(c *check.C) {
+ t := s.Init(c)
+
+ line := t.NewLine("filename.mk", 1, "")
+
+ fix := line.Autofix()
+ fix.Warnf("Warning.")
+ // For some reason, the other required calls are left out.
+
+ t.ExpectAssert(func() { _ = line.Autofix() })
+
+ // Properly finish the standard call sequence for an Autofix.
+ fix.Apply()
+
+ t.CheckOutputLines(
+ "WARN: filename.mk:1: Warning.")
+}
Index: pkgsrc/pkgtools/pkglint/files/shtokenizer_test.go
diff -u pkgsrc/pkgtools/pkglint/files/shtokenizer_test.go:1.19 pkgsrc/pkgtools/pkglint/files/shtokenizer_test.go:1.20
--- pkgsrc/pkgtools/pkglint/files/shtokenizer_test.go:1.19 Sun Nov 17 01:26:25 2019
+++ pkgsrc/pkgtools/pkglint/files/shtokenizer_test.go Wed Nov 27 22:10:07 2019
@@ -90,9 +90,13 @@ func (s *Suite) Test_ShTokenizer__fuzzin
fuzzer.Char("\"'`$();|_#", 10)
fuzzer.Range('a', 'z', 5)
+ // This "real" line is necessary because the autofix
+ // in MkParser.varUseBrace checks this.
+ line := t.NewLine("Makefile", 1, "\t:")
+
defer fuzzer.CheckOk()
for i := 0; i < 1000; i++ {
- tokenizer := NewShTokenizer(dummyLine, fuzzer.Generate(50), false)
+ tokenizer := NewShTokenizer(line, fuzzer.Generate(50), false)
tokenizer.ShAtoms()
t.Output() // Discard the output, only react on panics.
}
@@ -105,7 +109,8 @@ func (s *Suite) Test_ShTokenizer_ShAtom(
// testRest ensures that the given string is parsed to the expected
// atoms, and returns the remaining text.
testRest := func(s string, expectedAtoms []*ShAtom, expectedRest string) {
- p := NewShTokenizer(dummyLine, s, false)
+ line := t.NewLine("filename.mk", 1, "")
+ p := NewShTokenizer(line, s, false)
actualAtoms := p.ShAtoms()
@@ -512,7 +517,8 @@ func (s *Suite) Test_ShTokenizer_ShAtom_
t := s.Init(c)
test := func(input, expectedOutput string) {
- p := NewShTokenizer(dummyLine, input, false)
+ line := t.NewLine("filename.mk", 1, "")
+ p := NewShTokenizer(line, input, false)
q := shqPlain
result := ""
for {
@@ -601,7 +607,8 @@ func (s *Suite) Test_ShTokenizer_ShToken
// testRest ensures that the given string is parsed to the expected
// tokens, and returns the remaining text.
testRest := func(str string, expected ...string) string {
- p := NewShTokenizer(dummyLine, str, false)
+ line := t.NewLine("testRest.mk", 1, "")
+ p := NewShTokenizer(line, str, false)
for _, exp := range expected {
t.CheckEquals(p.ShToken().MkText, exp)
}
@@ -609,7 +616,8 @@ func (s *Suite) Test_ShTokenizer_ShToken
}
test := func(str string, expected ...string) {
- p := NewShTokenizer(dummyLine, str, false)
+ line := t.NewLine("test.mk", 1, "")
+ p := NewShTokenizer(line, str, false)
for _, exp := range expected {
t.CheckEquals(p.ShToken().MkText, exp)
}
@@ -618,7 +626,8 @@ func (s *Suite) Test_ShTokenizer_ShToken
}
testNil := func(str string) {
- p := NewShTokenizer(dummyLine, str, false)
+ line := t.NewLine("testNil.mk", 1, "")
+ p := NewShTokenizer(line, str, false)
c.Check(p.ShToken(), check.IsNil)
t.CheckEquals(p.Rest(), "")
t.CheckOutputEmpty()
Index: pkgsrc/pkgtools/pkglint/files/logging_test.go
diff -u pkgsrc/pkgtools/pkglint/files/logging_test.go:1.22 pkgsrc/pkgtools/pkglint/files/logging_test.go:1.23
--- pkgsrc/pkgtools/pkglint/files/logging_test.go:1.22 Sun Nov 17 01:26:25 2019
+++ pkgsrc/pkgtools/pkglint/files/logging_test.go Wed Nov 27 22:10:07 2019
@@ -248,6 +248,10 @@ func (s *Suite) Test_Logger_Diag__show_s
func (s *Suite) Test_Logger_Diag__source_duplicates(c *check.C) {
t := s.Init(c)
+ // Up to pkglint 19.3.10, this variable had been reset during
+ // command line parsing. In 19.3.11 the command line option has
+ // been removed, therefore it must be reset manually.
+ G.Logger.verbose = false
t.SetUpPkgsrc()
t.CreateFileLines("category/dependency/patches/patch-aa",
CvsID,
@@ -682,7 +686,7 @@ func (s *Suite) Test_Logger_Logf__duplic
t := s.Init(c)
t.SetUpCommandLine("--explain")
- G.Logger.Opts.LogVerbose = false
+ G.Logger.verbose = false
line := t.NewLine("README.txt", 123, "text")
// Is logged because it is the first appearance of this warning.
@@ -780,7 +784,7 @@ func (s *Suite) Test_Logger_Logf__duplic
t := s.Init(c)
t.SetUpCommandLine("--explain", "--autofix")
- G.Logger.Opts.LogVerbose = false // See SetUpTest
+ G.Logger.verbose = false // See SetUpTest
line := t.NewLine("README.txt", 123, "text")
fix := line.Autofix()
@@ -801,12 +805,12 @@ func (s *Suite) Test_Logger_Logf__panic(
"Pkglint internal error: Diagnostic format \"No period\" must end in a period.")
}
-func (s *Suite) Test_Logger_Errorf__gcc_format(c *check.C) {
+func (s *Suite) Test_Logger_TechErrorf__gcc_format(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("--gcc-output-format")
- G.Logger.Errorf("filename", "Cannot be opened for %s.", "reading")
+ G.Logger.TechErrorf("filename", "Cannot be opened for %s.", "reading")
t.CheckOutputLines(
"filename: error: Cannot be opened for reading.")
Index: pkgsrc/pkgtools/pkglint/files/mkline.go
diff -u pkgsrc/pkgtools/pkglint/files/mkline.go:1.64 pkgsrc/pkgtools/pkglint/files/mkline.go:1.65
--- pkgsrc/pkgtools/pkglint/files/mkline.go:1.64 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/mkline.go Wed Nov 27 22:10:07 2019
@@ -330,9 +330,8 @@ func (mkline *MkLine) Tokenize(text stri
if warn {
line = mkline.Line
}
- p := NewMkParser(line, text)
- tokens = p.MkTokens()
- rest = p.Rest()
+ p := NewMkLexer(text, line)
+ tokens, rest = p.MkTokens()
}
if warn && rest != "" {
@@ -502,9 +501,8 @@ func (mkline *MkLine) ValueTokens() ([]*
// No error checking here since all this has already been done when the
// whole line was parsed in MkLineParser.Parse.
- p := NewMkParser(nil, value)
- assign.valueMk = p.MkTokens()
- assign.valueMkRest = p.Rest()
+ p := NewMkLexer(value, nil)
+ assign.valueMk, assign.valueMkRest = p.MkTokens()
return assign.valueMk, assign.valueMkRest
}
@@ -545,7 +543,8 @@ func (mkline *MkLine) Fields() []string
func (*MkLine) WithoutMakeVariables(value string) string {
var valueNovar strings.Builder
- for _, token := range NewMkParser(nil, value).MkTokens() {
+ tokens, _ := NewMkLexer(value, nil).MkTokens()
+ for _, token := range tokens {
if token.Varuse == nil {
valueNovar.WriteString(token.Text)
}
@@ -567,7 +566,7 @@ func (mkline *MkLine) ResolveVarsInRelat
tmp := relativePath
if tmp.ContainsText("PKGSRCDIR") {
- pkgsrcdir := relpath(basedir, G.Pkgsrc.File("."))
+ pkgsrcdir := G.Pkgsrc.Relpath(basedir, G.Pkgsrc.File("."))
if G.Testing {
// Relative pkgsrc paths usually only contain two or three levels.
@@ -789,7 +788,8 @@ func (mkline *MkLine) ForEachUsed(action
return
}
- for _, token := range NewMkParser(nil, text).MkTokens() {
+ tokens, _ := NewMkLexer(text, nil).MkTokens()
+ for _, token := range tokens {
if token.Varuse != nil {
searchInVarUse(token.Varuse, time)
}
Index: pkgsrc/pkgtools/pkglint/files/mkline_test.go
diff -u pkgsrc/pkgtools/pkglint/files/mkline_test.go:1.72 pkgsrc/pkgtools/pkglint/files/mkline_test.go:1.73
--- pkgsrc/pkgtools/pkglint/files/mkline_test.go:1.72 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/mkline_test.go Wed Nov 27 22:10:07 2019
@@ -282,7 +282,9 @@ func (s *Suite) Test_MkLine_ValueFields_
url := "http://registry.gimp.org/file/fix-ca.c?action=download&id=9884&file="
mkline := t.NewMkLine("filename.mk", 123, "MASTER_SITES=\t"+url)
- words, rest := splitIntoShellTokens(dummyLine, url) // Doesn't really make sense
+ // Splitting a URL into shell tokens doesn't really make sense,
+ // but is used here anyway.
+ words, rest := splitIntoShellTokens(mkline.Line, url)
t.CheckDeepEquals(words, []string{
"http://registry.gimp.org/file/fix-ca.c?action=download",
Index: pkgsrc/pkgtools/pkglint/files/mklinechecker.go
diff -u pkgsrc/pkgtools/pkglint/files/mklinechecker.go:1.54 pkgsrc/pkgtools/pkglint/files/mklinechecker.go:1.55
--- pkgsrc/pkgtools/pkglint/files/mklinechecker.go:1.54 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/mklinechecker.go Wed Nov 27 22:10:07 2019
@@ -387,7 +387,7 @@ func (ck MkLineChecker) checkTextVarUse(
defer trace.Call(vartype, time)()
}
- tokens := NewMkParser(nil, text).MkTokens()
+ tokens, _ := NewMkLexer(text, nil).MkTokens()
for i, token := range tokens {
if token.Varuse != nil {
spaceLeft := i-1 < 0 || matches(tokens[i-1].Text, `[\t ]$`)
@@ -814,7 +814,6 @@ func (ck MkLineChecker) checkVarUseQuoti
fix.Explain(
seeGuide("Echoing a string exactly as-is", "echo-literal"))
fix.Replace("${"+varname+mod+"}", "${"+varname+correctMod+"}")
- fix.Anyway()
fix.Apply()
} else {
mkline.Warnf("Please use ${%s%s} instead of ${%s%s} and make sure"+
@@ -1103,7 +1102,6 @@ func (ck MkLineChecker) checkVarassignRi
if len(categories) > 1 && categories[1] == expected {
fix.Replace(categories[0]+" "+categories[1], categories[1]+" "+categories[0])
}
- fix.Anyway()
fix.Apply()
}
@@ -1341,7 +1339,7 @@ func (ck MkLineChecker) checkInclude() {
case includedFile.HasSuffixPath("intltool/buildlink3.mk"):
mkline.Warnf("Please write \"USE_TOOLS+= intltool\" instead of this line.")
- case includedFile.HasSuffixText("/builtin.mk"): // TODO: maybe HasSuffixPath
+ case includedFile != "builtin.mk" && includedFile.HasSuffixPath("builtin.mk"):
if mkline.Basename != "hacks.mk" && !mkline.HasRationale() {
fix := mkline.Autofix()
fix.Errorf("%s must not be included directly. Include \"%s/buildlink3.mk\" instead.", includedFile, includedFile.Dir())
@@ -1671,7 +1669,6 @@ func (ck MkLineChecker) simplifyConditio
"An entirely different case is when the pattern contains wildcards like ^, *, $.",
"In such a case, using the :M or :N modifiers is useful and preferred.")
fix.Replace(replace(varname, positive, pattern))
- fix.Anyway()
fix.Apply()
}
}
@@ -1741,7 +1738,6 @@ func (ck MkLineChecker) checkCompareVarS
"Therefore, comparing it using == or != leads to wrong results in these cases.")
fix.Replace("${PKGSRC_COMPILER} "+op+" "+value, "${PKGSRC_COMPILER:"+matchOp+value+"}")
fix.Replace("${PKGSRC_COMPILER} "+op+" \""+value+"\"", "${PKGSRC_COMPILER:"+matchOp+value+"}")
- fix.Anyway()
fix.Apply()
}
@@ -1807,10 +1803,10 @@ func (ck MkLineChecker) checkDependencyR
}
func (ck MkLineChecker) checkDependencyTarget(target string, allowedTargets map[string]bool) {
- if target == ".PHONY" ||
- target == ".ORDER" ||
- NewMkParser(nil, target).VarUse() != nil ||
- allowedTargets[target] {
+ if target == ".PHONY" || target == ".ORDER" || allowedTargets[target] {
+ return
+ }
+ if NewMkLexer(target, nil).VarUse() != nil {
return
}
Index: pkgsrc/pkgtools/pkglint/files/mklinechecker_test.go
diff -u pkgsrc/pkgtools/pkglint/files/mklinechecker_test.go:1.49 pkgsrc/pkgtools/pkglint/files/mklinechecker_test.go:1.50
--- pkgsrc/pkgtools/pkglint/files/mklinechecker_test.go:1.49 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/mklinechecker_test.go Wed Nov 27 22:10:07 2019
@@ -57,7 +57,7 @@ func (s *Suite) Test_MkLineChecker_Check
// relative path fails since that depends on the actual file system,
// not on syntactical paths; see os.Stat in CheckRelativePath.
//
- // TODO: Refactor relpath to be independent of a filesystem.
+ // TODO: Refactor Relpath to be independent of a filesystem.
mklines.Check()
@@ -102,7 +102,9 @@ func (s *Suite) Test_MkLineChecker_Check
t.CheckOutputLines(
"WARN: x11/xkeyboard-config/Makefile:3: "+
"Invalid part \"/xkbcomp\" after variable name \"${XKBBASE}\".",
- // TODO: Avoid this duplicate diagnostic.
+ // TODO: Avoid these duplicate diagnostics.
+ "WARN: x11/xkeyboard-config/Makefile:3: "+
+ "Invalid part \"/xkbcomp\" after variable name \"${XKBBASE}\".",
"WARN: x11/xkeyboard-config/Makefile:3: "+
"Invalid part \"/xkbcomp\" after variable name \"${XKBBASE}\".",
"WARN: x11/xkeyboard-config/Makefile:3: XKBBASE is used but not defined.")
@@ -125,6 +127,7 @@ func (s *Suite) Test_MkLineChecker_check
mklines.Check()
t.CheckOutputLines(
+ "NOTE: ~/filename.mk:2--3: Trailing whitespace.",
"WARN: ~/filename.mk:3: This line looks empty but continues the previous line.")
}
@@ -2240,6 +2243,22 @@ func (s *Suite) Test_MkLineChecker_check
"Include \"../../category/package/buildlink3.mk\" instead.")
}
+func (s *Suite) Test_MkLineChecker_checkInclude__buildlink3_mk_includes_builtin_mk(c *check.C) {
+ t := s.Init(c)
+
+ t.SetUpPkgsrc()
+ mklines := t.SetUpFileMkLines("category/package/buildlink3.mk",
+ MkCvsID,
+ ".include \"builtin.mk\"")
+ t.CreateFileLines("category/package/builtin.mk",
+ MkCvsID)
+ t.FinishSetUp()
+
+ mklines.Check()
+
+ t.CheckOutputEmpty()
+}
+
func (s *Suite) Test_MkLineChecker_checkInclude__builtin_mk_rationale(c *check.C) {
t := s.Init(c)
Index: pkgsrc/pkgtools/pkglint/files/shell.go
diff -u pkgsrc/pkgtools/pkglint/files/shell.go:1.49 pkgsrc/pkgtools/pkglint/files/shell.go:1.50
--- pkgsrc/pkgtools/pkglint/files/shell.go:1.49 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/shell.go Wed Nov 27 22:10:07 2019
@@ -116,7 +116,7 @@ func (scc *SimpleCommandChecker) handleC
}
shellword := scc.strcmd.Name
- varuse := NewMkParser(nil, shellword).VarUse()
+ varuse := NewMkLexer(shellword, nil).VarUse()
if varuse == nil {
return false
}
Index: pkgsrc/pkgtools/pkglint/files/mklineparser.go
diff -u pkgsrc/pkgtools/pkglint/files/mklineparser.go:1.4 pkgsrc/pkgtools/pkglint/files/mklineparser.go:1.5
--- pkgsrc/pkgtools/pkglint/files/mklineparser.go:1.4 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/mklineparser.go Wed Nov 27 22:10:07 2019
@@ -85,6 +85,7 @@ func (p MkLineParser) parseVarassign(lin
fix.Notef("Unnecessary space after variable name %q.", varname)
fix.Replace(varname+a.spaceAfterVarname+op.String(), varname+op.String())
fix.Apply()
+ // FIXME: Preserve the alignment of the variable value.
}
}
@@ -299,7 +300,7 @@ func (MkLineParser) split(line *Line, te
mainWithSpaces = text
}
- parser := NewMkParser(line, mainWithSpaces)
+ parser := NewMkLexer(mainWithSpaces, line)
lexer := parser.lexer
parseOther := func() string {
Index: pkgsrc/pkgtools/pkglint/files/mklines.go
diff -u pkgsrc/pkgtools/pkglint/files/mklines.go:1.60 pkgsrc/pkgtools/pkglint/files/mklines.go:1.61
--- pkgsrc/pkgtools/pkglint/files/mklines.go:1.60 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/mklines.go Wed Nov 27 22:10:07 2019
@@ -167,7 +167,7 @@ func (mklines *MkLines) collectDocumente
commentLines++
- parser := NewMkParser(nil, words[1])
+ parser := NewMkLexer(words[1], nil)
varname := parser.Varname()
if len(varname) < 3 {
break
Index: pkgsrc/pkgtools/pkglint/files/vartypecheck_test.go
diff -u pkgsrc/pkgtools/pkglint/files/vartypecheck_test.go:1.60 pkgsrc/pkgtools/pkglint/files/vartypecheck_test.go:1.61
--- pkgsrc/pkgtools/pkglint/files/vartypecheck_test.go:1.60 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/vartypecheck_test.go Wed Nov 27 22:10:07 2019
@@ -897,13 +897,14 @@ func (s *Suite) Test_VartypeCheck_LdFlag
func (s *Suite) Test_VartypeCheck_License(c *check.C) {
t := s.Init(c)
+ t.Chdir(".")
t.SetUpPkgsrc() // Adds the gnu-gpl-v2 and 2-clause-bsd licenses
t.SetUpPackage("category/package")
t.FinishSetUp()
G.Pkg = NewPackage(t.File("category/package"))
- mklines := t.NewMkLines("perl5.mk",
+ mklines := t.SetUpFileMkLines("perl5.mk",
MkCvsID,
"PERL5_LICENSE= gnu-gpl-v2 OR artistic")
// Also registers the PERL5_LICENSE variable in the package.
@@ -920,7 +921,7 @@ func (s *Suite) Test_VartypeCheck_Licens
vt.Output(
"ERROR: filename.mk:2: Parse error for license condition \"AND mit\".",
- "WARN: filename.mk:3: License file ~/licenses/artistic does not exist.",
+ "WARN: filename.mk:3: License file licenses/artistic does not exist.",
"ERROR: filename.mk:4: Parse error for license condition \"${UNKNOWN_LICENSE}\".")
vt.Op(opAssignAppend)
@@ -930,7 +931,7 @@ func (s *Suite) Test_VartypeCheck_Licens
vt.Output(
"ERROR: filename.mk:11: Parse error for appended license condition \"gnu-gpl-v2\".",
- "WARN: filename.mk:12: License file ~/licenses/mit does not exist.")
+ "WARN: filename.mk:12: License file licenses/mit does not exist.")
}
func (s *Suite) Test_VartypeCheck_MachineGnuPlatform(c *check.C) {
Index: pkgsrc/pkgtools/pkglint/files/mkparser.go
diff -u pkgsrc/pkgtools/pkglint/files/mkparser.go:1.36 pkgsrc/pkgtools/pkglint/files/mkparser.go:1.37
--- pkgsrc/pkgtools/pkglint/files/mkparser.go:1.36 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/mkparser.go Wed Nov 27 22:10:07 2019
@@ -1,7 +1,6 @@
package pkglint
import (
- "netbsd.org/pkglint/regex"
"netbsd.org/pkglint/textproc"
"strings"
)
@@ -10,6 +9,7 @@ import (
// things related to Makefiles.
type MkParser struct {
Line *Line
+ mklex *MkLexer
lexer *textproc.Lexer
EmitWarnings bool
}
@@ -31,387 +31,13 @@ func (p *MkParser) Rest() string {
// which means the # is a normal character and does not introduce a Makefile comment.
// For VarUse, this distinction is irrelevant.
func NewMkParser(line *Line, text string) *MkParser {
- return &MkParser{line, textproc.NewLexer(text), line != nil}
-}
-
-// MkTokens splits a text like in the following example:
-// Text${VAR:Mmodifier}${VAR2}more text${VAR3}
-// into tokens like these:
-// Text
-// ${VAR:Mmodifier}
-// ${VAR2}
-// more text
-// ${VAR3}
-func (p *MkParser) MkTokens() []*MkToken {
- lexer := p.lexer
-
- var tokens []*MkToken
- for !p.EOF() {
- mark := lexer.Mark()
- if varuse := p.VarUse(); varuse != nil {
- tokens = append(tokens, &MkToken{Text: lexer.Since(mark), Varuse: varuse})
- continue
- }
-
- for lexer.NextBytesFunc(func(b byte) bool { return b != '$' }) != "" || lexer.SkipString("$$") {
- }
- text := lexer.Since(mark)
- if text != "" {
- tokens = append(tokens, &MkToken{Text: text})
- continue
- }
-
- break
- }
- return tokens
-}
-
-func (p *MkParser) VarUse() *MkVarUse {
- rest := p.lexer.Rest()
- if len(rest) < 2 || rest[0] != '$' {
- return nil
- }
-
- switch rest[1] {
- case '{', '(':
- return p.varUseBrace(rest[1] == '(')
-
- case '$':
- // This is an escaped dollar character and not a variable use.
- return nil
-
- case '@', '<', ' ':
- // These variable names are known to exist.
- //
- // Many others are also possible but not used in practice.
- // In particular, when parsing the :C or :S modifier,
- // the $ must not be interpreted as a variable name,
- // even when it looks like $/ could refer to the "/" variable.
- //
- // TODO: Find out whether $" is a variable use when it appears in the :M modifier.
- p.lexer.Skip(2)
- return &MkVarUse{rest[1:2], nil}
-
- default:
- return p.varUseAlnum()
- }
-}
-
-// varUseBrace parses:
-// ${VAR}
-// ${arbitrary text:L}
-// ${variable with invalid chars}
-// $(PARENTHESES)
-// ${VAR:Mpattern:C,:,colon,g:Q:Q:Q}
-func (p *MkParser) varUseBrace(usingRoundParen bool) *MkVarUse {
- lexer := p.lexer
-
- beforeDollar := lexer.Mark()
- lexer.Skip(2)
-
- closing := byte('}')
- if usingRoundParen {
- closing = ')'
- }
-
- beforeVarname := lexer.Mark()
- varname := p.Varname()
- p.varUseText(closing)
- varExpr := lexer.Since(beforeVarname)
-
- modifiers := p.VarUseModifiers(varExpr, closing)
-
- closed := lexer.SkipByte(closing)
-
- if p.EmitWarnings {
- if !closed {
- p.Line.Warnf("Missing closing %q for %q.", string(rune(closing)), varExpr)
- }
-
- if usingRoundParen && closed {
- parenVaruse := lexer.Since(beforeDollar)
- edit := []byte(parenVaruse)
- edit[1] = '{'
- edit[len(edit)-1] = '}'
- bracesVaruse := string(edit)
-
- fix := p.Line.Autofix()
- fix.Warnf("Please use curly braces {} instead of round parentheses () for %s.", varExpr)
- fix.Replace(parenVaruse, bracesVaruse)
- fix.Apply()
- }
-
- if len(varExpr) > len(varname) && !(&MkVarUse{varExpr, modifiers}).IsExpression() {
- p.Line.Warnf("Invalid part %q after variable name %q.", varExpr[len(varname):], varname)
- }
- }
-
- return &MkVarUse{varExpr, modifiers}
-}
-
-func (p *MkParser) varUseAlnum() *MkVarUse {
- lexer := p.lexer
-
- apparentVarname := textproc.NewLexer(lexer.Rest()[1:]).NextBytesSet(textproc.AlnumU)
- if apparentVarname == "" {
- return nil
- }
-
- lexer.Skip(2)
-
- if p.EmitWarnings {
- if len(apparentVarname) > 1 {
- p.Line.Errorf("$%[1]s is ambiguous. Use ${%[1]s} if you mean a Make variable or $$%[1]s if you mean a shell variable.",
- apparentVarname)
- p.Line.Explain(
- "Only the first letter after the dollar is the variable name.",
- "Everything following it is normal text, even if it looks like a variable name to human readers.")
- } else {
- p.Line.Warnf("$%[1]s is ambiguous. Use ${%[1]s} if you mean a Make variable or $$%[1]s if you mean a shell variable.", apparentVarname)
- p.Line.Explain(
- "In its current form, this variable is parsed as a Make variable.",
- "For human readers though, $x looks more like a shell variable than a Make variable,",
- "since Make variables are usually written using braces (BSD-style) or parentheses (GNU-style).")
- }
- }
-
- return &MkVarUse{apparentVarname[:1], nil}
-}
-
-// VarUseModifiers parses the modifiers of a variable being used, such as :Q, :Mpattern.
-//
-// See the bmake manual page.
-func (p *MkParser) VarUseModifiers(varname string, closing byte) []MkVarUseModifier {
- lexer := p.lexer
-
- var modifiers []MkVarUseModifier
- // The :S and :C modifiers may be chained without using the : as separator.
- mayOmitColon := false
-
- for lexer.SkipByte(':') || mayOmitColon {
- modifier := p.varUseModifier(varname, closing)
- if modifier != "" {
- modifiers = append(modifiers, MkVarUseModifier{modifier})
- }
- mayOmitColon = modifier != "" && (modifier[0] == 'S' || modifier[0] == 'C')
- }
- return modifiers
-}
-
-// varUseModifier parses a single variable modifier such as :Q or :S,from,to,.
-// The actual parsing starts after the leading colon.
-func (p *MkParser) varUseModifier(varname string, closing byte) string {
- lexer := p.lexer
- mark := lexer.Mark()
-
- switch lexer.PeekByte() {
- case 'E', 'H', 'L', 'O', 'Q', 'R', 'T', 's', 't', 'u':
- mod := lexer.NextBytesSet(textproc.Alnum)
-
- switch mod {
- case
- "E", // Extension, e.g. path/file.suffix => suffix
- "H", // Head, e.g. dir/subdir/file.suffix => dir/subdir
- "L", // XXX: Shouldn't this be handled specially?
- "O", // Order alphabetically
- "Ox", // Shuffle
- "Q", // Quote shell meta-characters
- "R", // Strip the file suffix, e.g. path/file.suffix => file
- "T", // Basename, e.g. path/file.suffix => file.suffix
- "sh", // Evaluate the variable value as shell command
- "tA", // Try to convert to absolute path
- "tW", // Causes the value to be treated as a single word
- "tl", // To lowercase
- "tu", // To uppercase
- "tw", // Causes the value to be treated as list of words
- "u": // Remove adjacent duplicate words (like uniq(1))
- return mod
- }
-
- if hasPrefix(mod, "ts") {
- // See devel/bmake/files/var.c:/case 't'
- sep := mod[2:] + p.varUseText(closing)
- switch {
- case sep == "":
- lexer.SkipString(":")
- case len(sep) == 1:
- break
- case matches(sep, `^\\\d+`):
- break
- default:
- if p.EmitWarnings {
- p.Line.Warnf("Invalid separator %q for :ts modifier of %q.", sep, varname)
- p.Line.Explain(
- "The separator for the :ts modifier must be either a single character",
- "or an escape sequence like \\t or \\n or an octal or decimal escape",
- "sequence; see the bmake man page for further details.")
- }
- }
- return lexer.Since(mark)
- }
-
- case '=', 'D', 'M', 'N', 'U':
- lexer.Skip(1)
- re := regcomp(regex.Pattern(condStr(closing == '}', `^([^$:\\}]|\$\$|\\.)+`, `^([^$:\\)]|\$\$|\\.)+`)))
- for p.VarUse() != nil || lexer.SkipRegexp(re) {
- }
- arg := lexer.Since(mark)
- return strings.Replace(arg, "\\:", ":", -1)
-
- case 'C', 'S':
- if ok, _, _, _, _ := p.varUseModifierSubst(closing); ok {
- return lexer.Since(mark)
- }
-
- case '@':
- if p.varUseModifierAt(lexer, varname) {
- return lexer.Since(mark)
- }
-
- case '[':
- if lexer.SkipRegexp(regcomp(`^\[(?:[-.\d]+|#)\]`)) {
- return lexer.Since(mark)
- }
-
- case '?':
- lexer.Skip(1)
- p.varUseText(closing)
- if lexer.SkipByte(':') {
- p.varUseText(closing)
- return lexer.Since(mark)
- }
- }
-
- lexer.Reset(mark)
-
- re := regcomp(regex.Pattern(condStr(closing == '}', `^([^:$}]|\$\$)+`, `^([^:$)]|\$\$)+`)))
- for p.VarUse() != nil || lexer.SkipRegexp(re) {
- }
- modifier := lexer.Since(mark)
-
- // ${SOURCES:%.c=%.o} or ${:!uname -a!:[2]}
- if contains(modifier, "=") || (hasPrefix(modifier, "!") && hasSuffix(modifier, "!")) {
- return modifier
- }
-
- if p.EmitWarnings && modifier != "" {
- p.Line.Warnf("Invalid variable modifier %q for %q.", modifier, varname)
- }
-
- return ""
-}
-
-// varUseText parses any text up to the next colon or closing mark.
-// Nested variable uses are parsed as well.
-//
-// This is used for the :L and :? modifiers since they accept arbitrary
-// text as the "variable name" and effectively interpret it as the variable
-// value instead.
-func (p *MkParser) varUseText(closing byte) string {
- lexer := p.lexer
- start := lexer.Mark()
- re := regcomp(regex.Pattern(condStr(closing == '}', `^([^$:}]|\$\$)+`, `^([^$:)]|\$\$)+`)))
- for p.VarUse() != nil || lexer.SkipRegexp(re) {
- }
- return lexer.Since(start)
-}
-
-// varUseModifierSubst parses a :S,from,to, or a :C,from,to, modifier.
-func (p *MkParser) varUseModifierSubst(closing byte) (ok bool, regex bool, from string, to string, options string) {
- lexer := p.lexer
- regex = lexer.PeekByte() == 'C'
- lexer.Skip(1 /* the initial S or C */)
-
- sep := lexer.PeekByte() // bmake allows _any_ separator, even letters.
- if sep == -1 || byte(sep) == closing {
- return
- }
-
- lexer.Skip(1)
- separator := byte(sep)
-
- unescape := func(s string) string {
- return strings.Replace(s, "\\"+string(separator), string(separator), -1)
- }
-
- isOther := func(b byte) bool {
- return b != separator && b != '$' && b != '\\'
- }
-
- skipOther := func() {
- for {
- switch {
-
- case p.VarUse() != nil:
- break
-
- case lexer.SkipString("$$"):
- break
-
- case len(lexer.Rest()) >= 2 && lexer.PeekByte() == '\\' && separator != '\\':
- _ = lexer.Skip(2)
-
- case lexer.NextBytesFunc(isOther) != "":
- break
-
- default:
- return
- }
- }
- }
-
- fromStart := lexer.Mark()
- lexer.SkipByte('^')
- skipOther()
- lexer.SkipByte('$')
- from = unescape(lexer.Since(fromStart))
-
- if !lexer.SkipByte(separator) {
- return
- }
-
- toStart := lexer.Mark()
- skipOther()
- to = unescape(lexer.Since(toStart))
-
- if !lexer.SkipByte(separator) {
- return
- }
-
- optionsStart := lexer.Mark()
- lexer.NextBytesFunc(func(b byte) bool { return b == '1' || b == 'g' || b == 'W' })
- options = lexer.Since(optionsStart)
-
- ok = true
- return
-}
-
-// varUseModifierAt parses a variable modifier like ":@v@echo ${v};@",
-// which expands the variable value in a loop.
-func (p *MkParser) varUseModifierAt(lexer *textproc.Lexer, varname string) bool {
- lexer.Skip(1 /* the initial @ */)
-
- loopVar := lexer.NextBytesSet(AlnumDot)
- if loopVar == "" || !lexer.SkipByte('@') {
- return false
- }
-
- re := regcomp(`^([^$@\\]|\\.)+`)
- for p.VarUse() != nil || lexer.SkipString("$$") || lexer.SkipRegexp(re) {
- }
-
- if !lexer.SkipByte('@') && p.EmitWarnings {
- p.Line.Warnf("Modifier ${%s:@%s@...@} is missing the final \"@\".", varname, loopVar)
- }
-
- return true
+ mklex := NewMkLexer(text, line)
+ return &MkParser{line, mklex, mklex.lexer, line != nil}
}
// MkCond parses a condition like ${OPSYS} == "NetBSD".
//
// See devel/bmake/files/cond.c.
-//
-// FIXME: Move over to MkTokensParser
func (p *MkParser) MkCond() *MkCond {
and := p.mkCondAnd()
if and == nil {
@@ -556,7 +182,7 @@ func (p *MkParser) mkCondCompare() *MkCo
func (p *MkParser) mkCondTerm() *MkCondTerm {
lexer := p.lexer
- if rhs := p.VarUse(); rhs != nil {
+ if rhs := p.mklex.VarUse(); rhs != nil {
return &MkCondTerm{Var: rhs}
}
@@ -566,7 +192,7 @@ func (p *MkParser) mkCondTerm() *MkCondT
mark := lexer.Mark()
lexer.Skip(1)
- if quotedRHS := p.VarUse(); quotedRHS != nil {
+ if quotedRHS := p.mklex.VarUse(); quotedRHS != nil {
if lexer.SkipByte('"') {
return &MkCondTerm{Var: quotedRHS}
}
@@ -579,7 +205,7 @@ loop:
for {
m := lexer.Mark()
switch {
- case p.VarUse() != nil,
+ case p.mklex.VarUse() != nil,
lexer.NextBytesSet(textproc.Alnum) != "",
lexer.NextBytesFunc(func(b byte) bool { return b != '"' && b != '\\' }) != "":
rhsText.WriteString(lexer.Since(m))
@@ -612,14 +238,14 @@ func (p *MkParser) mkCondFunc() *MkCond
switch funcName {
case "defined":
- varname := p.Varname()
+ varname := p.mklex.Varname()
if varname != "" && lexer.SkipByte(')') {
return &MkCond{Defined: varname}
}
case "empty":
- if varname := p.Varname(); varname != "" {
- modifiers := p.VarUseModifiers(varname, ')')
+ if varname := p.mklex.Varname(); varname != "" {
+ modifiers := p.mklex.VarUseModifiers(varname, ')')
if lexer.SkipByte(')') {
return &MkCond{Empty: &MkVarUse{varname, modifiers}}
}
@@ -631,7 +257,7 @@ func (p *MkParser) mkCondFunc() *MkCond
case "commands", "exists", "make", "target":
argMark := lexer.Mark()
- for p.VarUse() != nil || lexer.NextBytesFunc(func(b byte) bool { return b != '$' && b != ')' }) != "" {
+ for p.mklex.VarUse() != nil || lexer.NextBytesFunc(func(b byte) bool { return b != '$' && b != ')' }) != "" {
}
arg := lexer.Since(argMark)
if lexer.SkipByte(')') {
@@ -643,21 +269,6 @@ func (p *MkParser) mkCondFunc() *MkCond
return nil
}
-func (p *MkParser) Varname() string {
- lexer := p.lexer
-
- // TODO: duplicated code in MatchVarassign
- mark := lexer.Mark()
- lexer.SkipByte('.')
- for lexer.NextBytesSet(VarbaseBytes) != "" || p.VarUse() != nil {
- }
- if lexer.SkipByte('.') || hasPrefix(lexer.Since(mark), "SITES_") {
- for lexer.NextBytesSet(VarparamBytes) != "" || p.VarUse() != nil {
- }
- }
- return lexer.Since(mark)
-}
-
func (p *MkParser) Op() (bool, MkOperator) {
lexer := p.lexer
switch {
@@ -681,7 +292,7 @@ func (p *MkParser) PkgbasePattern() stri
start := lexer.Mark()
for {
- if p.VarUse() != nil ||
+ if p.mklex.VarUse() != nil ||
lexer.SkipRegexp(regcomp(`^[\w.*+,{}]+`)) ||
lexer.SkipRegexp(regcomp(`^\[[\w-]+\]`)) {
continue
@@ -716,7 +327,7 @@ func (*MkParser) isPkgbasePart(str strin
return true
}
- varUse := NewMkParser(nil, lexer.Rest()).VarUse()
+ varUse := NewMkLexer(lexer.Rest(), nil).VarUse()
if varUse != nil {
return !contains(varUse.varname, "VER") && len(varUse.modifiers) == 0
}
@@ -740,7 +351,7 @@ func (p *MkParser) Dependency() *Depende
parseVersion := func() string {
mark := lexer.Mark()
- for p.VarUse() != nil {
+ for p.mklex.VarUse() != nil {
}
if lexer.Since(mark) != "" {
return lexer.Since(mark)
@@ -799,7 +410,7 @@ func (p *MkParser) Dependency() *Depende
if lexer.SkipByte('-') && lexer.Rest() != "" {
versionMark := lexer.Mark()
- for p.VarUse() != nil || lexer.SkipRegexp(regcomp(`^[\w\[\]*_.\-]+`)) {
+ for p.mklex.VarUse() != nil || lexer.SkipRegexp(regcomp(`^[\w\[\]*_.\-]+`)) {
}
if !lexer.SkipString("{,nb*}") {
@@ -822,7 +433,7 @@ func (p *MkParser) Dependency() *Depende
// if there is a parse error or some trailing text.
// Parse errors are silently ignored.
func ToVarUse(str string) *MkVarUse {
- p := NewMkParser(nil, str)
+ p := NewMkLexer(str, nil)
varUse := p.VarUse()
if varUse == nil || !p.EOF() {
return nil
@@ -970,7 +581,7 @@ func (w *MkCondWalker) walkAtom(atom *Mk
func (w *MkCondWalker) walkStr(str string, callback *MkCondCallback) {
if callback.VarUse != nil {
- tokens := NewMkParser(nil, str).MkTokens()
+ tokens, _ := NewMkLexer(str, nil).MkTokens()
for _, token := range tokens {
if token.Varuse != nil {
callback.VarUse(token.Varuse)
Index: pkgsrc/pkgtools/pkglint/files/pkgsrc_test.go
diff -u pkgsrc/pkgtools/pkglint/files/pkgsrc_test.go:1.36 pkgsrc/pkgtools/pkglint/files/pkgsrc_test.go:1.37
--- pkgsrc/pkgtools/pkglint/files/pkgsrc_test.go:1.36 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/pkgsrc_test.go Wed Nov 27 22:10:07 2019
@@ -1,6 +1,10 @@
package pkglint
-import "gopkg.in/check.v1"
+import (
+ "gopkg.in/check.v1"
+ "os"
+ "path/filepath"
+)
func (s *Suite) Test_Pkgsrc__frozen(c *check.C) {
t := s.Init(c)
@@ -827,7 +831,7 @@ func (s *Suite) Test_Pkgsrc_Latest__not_
t.CheckEquals(latest, "")
t.CheckOutputLines(
- "ERROR: Cannot find package versions of \"^python[0-9]+$\" in \"~/lang\".")
+ "ERROR: ~/lang: Cannot find package versions of \"^python[0-9]+$\".")
}
// In 2017, PostgreSQL changed their versioning scheme to SemVer,
@@ -956,7 +960,7 @@ func (s *Suite) Test_Pkgsrc_ListVersions
c.Check(versions, check.HasLen, 0)
t.CheckOutputLines(
- "ERROR: Cannot find package versions of \"^python[0-9]+$\" in \"~/lang\".")
+ "ERROR: ~/lang: Cannot find package versions of \"^python[0-9]+$\".")
}
func (s *Suite) Test_Pkgsrc_ListVersions__no_subdirs(c *check.C) {
@@ -968,7 +972,7 @@ func (s *Suite) Test_Pkgsrc_ListVersions
c.Check(versions, check.HasLen, 0)
t.CheckOutputLines(
- "ERROR: Cannot find package versions of \"^python[0-9]+$\" in \"~/lang\".")
+ "ERROR: ~/lang: Cannot find package versions of \"^python[0-9]+$\".")
}
// Ensures that failed lookups are also cached since they can be assumed
@@ -980,7 +984,7 @@ func (s *Suite) Test_Pkgsrc_ListVersions
c.Check(versions, check.HasLen, 0)
t.CheckOutputLines(
- "ERROR: Cannot find package versions of \"^python[0-9]+$\" in \"~/lang\".")
+ "ERROR: ~/lang: Cannot find package versions of \"^python[0-9]+$\".")
versions2 := G.Pkgsrc.ListVersions("lang", `^python[0-9]+$`, "../../lang/$0", true)
@@ -1143,7 +1147,7 @@ func (s *Suite) Test_Pkgsrc_checkTopleve
t.Main("-r", "-Cglobal", ".")
t.CheckOutputLines(
- "WARN: ~/category/package2/Makefile:11: License file ~/licenses/missing does not exist.",
+ "WARN: ~/category/package2/Makefile:11: License file ../../licenses/missing does not exist.",
"WARN: ~/licenses/gnu-gpl-v2: This license seems to be unused.", // Added by Tester.SetUpPkgsrc
"WARN: ~/licenses/gnu-gpl-v3: This license seems to be unused.",
"3 warnings found.")
@@ -1169,6 +1173,147 @@ func (s *Suite) Test_Pkgsrc_ReadDir(c *c
t.CheckDeepEquals(names, []string{"aaa-subdir", "file", "subdir"})
}
+func (s *Suite) Test_Pkgsrc_Relpath(c *check.C) {
+ t := s.Init(c)
+
+ t.Chdir(".")
+ t.CheckEquals(G.Pkgsrc.topdir, t.tmpdir)
+
+ test := func(from, to Path, result Path) {
+ t.CheckEquals(G.Pkgsrc.Relpath(from, to), result)
+ }
+
+ // TODO: add tests going from each of (top, cat, pkg, pkgsub) to the others
+
+ test("some/dir", "some/directory", "../../some/directory")
+ test("some/directory", "some/dir", "../../some/dir")
+
+ test("category/package/.", ".", "../..")
+
+ // This case is handled by one of the shortcuts that avoid file system access.
+ test(
+ "./.",
+ "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
+ "meta-pkgs/kde/kf5.mk")
+
+ test(".hidden/dir", ".", "../..")
+ test("dir/.hidden", ".", "../..")
+
+ // This happens when "pkglint -r x11" is run.
+ G.Pkgsrc.topdir = "x11/.."
+
+ test(
+ "./.",
+ "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
+ "meta-pkgs/kde/kf5.mk")
+ test(
+ "x11/..",
+ "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
+ "meta-pkgs/kde/kf5.mk")
+
+ volume := NewPathSlash(filepath.VolumeName(t.tmpdir.String()))
+ G.Pkgsrc.topdir = volume.JoinNoClean("usr/pkgsrc")
+
+ // Taken from Test_MkLineChecker_CheckRelativePath__wip_mk
+ test(
+ G.Pkgsrc.File("wip/package"),
+ G.Pkgsrc.File("wip/package/../mk/git-package.mk"),
+ "../../wip/mk/git-package.mk")
+
+ // Taken from Test_Package_parse__relative
+ test(
+ G.Pkgsrc.File("category/package"),
+ G.Pkgsrc.File("category/package/../package/extra.mk"),
+ "extra.mk")
+
+ // Make sure that .. in later positions is resolved correctly as well.
+ test(
+ G.Pkgsrc.File("category/package"),
+ G.Pkgsrc.File("category/package/sub/../../package/extra.mk"),
+ "extra.mk")
+
+ G.Pkgsrc.topdir = t.tmpdir
+
+ test("some/dir", "some/dir/../..", "../..")
+ test("some/dir", "some/dir/./././../..", "../..")
+ test("some/dir", "some/dir/", ".")
+
+ test("some/dir", ".", "../..")
+ test("some/dir/.", ".", "../..")
+
+ chdir := func(path Path) {
+ // See Tester.Chdir; a direct Chdir works here since this test
+ // neither loads lines nor processes them.
+ assertNil(os.Chdir(path.String()), "Chdir %s", path)
+ G.cwd = abspath(path)
+ }
+
+ t.CreateFileLines("testdir/subdir/dummy")
+ chdir("testdir/subdir")
+
+ test(".", ".", ".")
+ test("./.", "./dir", "dir")
+
+ test("dir", ".", "..")
+ test("dir", "dir", ".")
+ test("dir", "dir/file", "file")
+ test("dir", "dir/..", "..")
+
+ test(".", "../../other/package", "../../other/package")
+
+ // Even though this path could be shortened to "../package",
+ // in pkgsrc the convention is to always go from a package
+ // directory up to the root and then back to the other package
+ // directory.
+ test(".", "../../testdir/package", "../../testdir/package")
+
+ chdir("..")
+
+ // When pkglint is run from a category directory to test
+ // the complete pkgsrc.
+ test("..", "../other/package", "other/package")
+
+ chdir(t.tmpdir.JoinNoClean(".."))
+
+ test(
+ "pkgsrc/category/package",
+ "pkgsrc/category/package/../../other/package",
+ "../../other/package")
+
+ test(
+ "pkgsrc/category/package",
+ "pkgsrc/category/package/../../category/other",
+ "../../category/other")
+
+ chdir(t.tmpdir.JoinNoClean("testdir").JoinNoClean("subdir"))
+
+ test("..", ".", "subdir")
+ test("../..", ".", "testdir/subdir")
+ test("../../", ".", "testdir/subdir")
+}
+
+func (s *Suite) Test_Pkgsrc_File(c *check.C) {
+ t := s.Init(c)
+
+ G.Pkgsrc.topdir = "$pkgsrcdir"
+
+ test := func(rel, resolved Path) {
+ t.CheckEquals(G.Pkgsrc.File(rel), resolved)
+ }
+
+ test(".", "$pkgsrcdir")
+ test("category", "$pkgsrcdir/category")
+
+ test(
+ "category/package/../../mk/bsd.prefs.mk",
+ "$pkgsrcdir/mk/bsd.prefs.mk")
+
+ G.Pkgsrc.topdir = "."
+
+ test(".", ".")
+ test("filename", "filename")
+}
+
func (s *Suite) Test_Change_Version(c *check.C) {
t := s.Init(c)
Index: pkgsrc/pkgtools/pkglint/files/util_test.go
diff -u pkgsrc/pkgtools/pkglint/files/util_test.go:1.36 pkgsrc/pkgtools/pkglint/files/util_test.go:1.37
--- pkgsrc/pkgtools/pkglint/files/util_test.go:1.36 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/util_test.go Wed Nov 27 22:10:07 2019
@@ -89,6 +89,11 @@ func (s *Suite) Test_isEmptyDir(c *check
t.CheckEquals(isEmptyDir(t.File(".")), true)
t.CheckEquals(isEmptyDir(t.File("CVS")), true)
+
+ t.Chdir(".")
+
+ t.CheckEquals(isEmptyDir("."), true)
+ t.CheckEquals(isEmptyDir("CVS"), true)
}
func (s *Suite) Test_isEmptyDir__and_getSubdirs(c *check.C) {
@@ -332,60 +337,6 @@ func (s *Suite) Test__regex_ReplaceFirst
t.CheckEquals(rest, "X+c+d")
}
-func (s *Suite) Test_relpath(c *check.C) {
- t := s.Init(c)
-
- t.Chdir(".")
- t.CheckEquals(G.Pkgsrc.topdir, t.tmpdir)
-
- test := func(from, to Path, result Path) {
- t.CheckEquals(relpath(from, to), result)
- }
-
- test("some/dir", "some/directory", "../../some/directory")
- test("some/directory", "some/dir", "../../some/dir")
-
- test("category/package/.", ".", "../..")
-
- // This case is handled by one of the shortcuts that avoid file system access.
- test(
- "./.",
- "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
- "meta-pkgs/kde/kf5.mk")
-
- test(".hidden/dir", ".", "../..")
- test("dir/.hidden", ".", "../..")
-
- // This happens when "pkglint -r x11" is run.
- G.Pkgsrc.topdir = "x11/.."
-
- test(
- "./.",
- "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
- "meta-pkgs/kde/kf5.mk")
- test(
- "x11/..",
- "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
- "meta-pkgs/kde/kf5.mk")
-}
-
-// Relpath is called so often that handling the most common calls
-// without file system IO makes sense.
-func (s *Suite) Test_relpath__quick(c *check.C) {
- t := s.Init(c)
-
- test := func(from, to Path, result Path) {
- t.CheckEquals(relpath(from, to), result)
- }
-
- test("some/dir", "some/dir/../..", "../..")
- test("some/dir", "some/dir/./././../..", "../..")
- test("some/dir", "some/dir/", ".")
-
- test("some/dir", ".", "../..")
- test("some/dir/.", ".", "../..")
-}
-
func (s *Suite) Test_cleanpath(c *check.C) {
t := s.Init(c)
Index: pkgsrc/pkgtools/pkglint/files/mkparser_test.go
diff -u pkgsrc/pkgtools/pkglint/files/mkparser_test.go:1.35 pkgsrc/pkgtools/pkglint/files/mkparser_test.go:1.36
--- pkgsrc/pkgtools/pkglint/files/mkparser_test.go:1.35 Sun Nov 17 01:26:25 2019
+++ pkgsrc/pkgtools/pkglint/files/mkparser_test.go Wed Nov 27 22:10:07 2019
@@ -5,707 +5,6 @@ import (
"strings"
)
-func (s *Suite) Test_MkParser_MkTokens(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- testRest := func(input string, expectedTokens []*MkToken, expectedRest string) {
- line := t.NewLines("Test_MkParser_MkTokens.mk", input).Lines[0]
- p := NewMkParser(line, input)
- actualTokens := p.MkTokens()
- t.CheckDeepEquals(actualTokens, expectedTokens)
- for i, expectedToken := range expectedTokens {
- if i < len(actualTokens) {
- t.CheckDeepEquals(*actualTokens[i], *expectedToken)
- t.CheckDeepEquals(actualTokens[i].Varuse, expectedToken.Varuse)
- }
- }
- t.CheckEquals(p.Rest(), expectedRest)
- }
- test := func(input string, expectedToken *MkToken) {
- testRest(input, b.Tokens(expectedToken), "")
- }
- literal := b.TextToken
- varuse := b.VaruseToken
-
- // Everything except VarUses is passed through unmodified.
-
- test("literal",
- literal("literal"))
-
- test("\\/share\\/ { print \"share directory\" }",
- literal("\\/share\\/ { print \"share directory\" }"))
-
- test("find . -name \\*.orig -o -name \\*.pre",
- literal("find . -name \\*.orig -o -name \\*.pre"))
-
- test("-e 's|\\$${EC2_HOME.*}|EC2_HOME}|g'",
- literal("-e 's|\\$${EC2_HOME.*}|EC2_HOME}|g'"))
-
- test("$$var1 $$var2 $$? $$",
- literal("$$var1 $$var2 $$? $$"))
-
- testRest("hello, ${W:L:tl}orld",
- b.Tokens(
- literal("hello, "),
- varuse("W", "L", "tl"),
- literal("orld")),
- "")
-
- testRest("ftp://${PKGNAME}/ ${MASTER_SITES:=subdir/}",
- b.Tokens(
- literal("ftp://"),
- varuse("PKGNAME"),
- literal("/ "),
- varuse("MASTER_SITES", "=subdir/")),
- "")
-
- testRest("${VAR:S,a,b,c,d,e,f}",
- b.Tokens(b.VaruseTextToken("${VAR:S,a,b,c,d,e,f}", "VAR", "S,a,b,")),
- "")
- t.CheckOutputLines(
- "WARN: Test_MkParser_MkTokens.mk:1: Invalid variable modifier \"c,d,e,f\" for \"VAR\".")
-
- testRest("Text${VAR:Mmodifier}${VAR2}more text${VAR3}",
- b.Tokens(
- literal("Text"),
- varuse("VAR", "Mmodifier"),
- varuse("VAR2"),
- literal("more text"),
- varuse("VAR3")),
- "")
-}
-
-func (s *Suite) Test_MkParser_VarUse(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
- varuse := b.VaruseToken
- varuseText := b.VaruseTextToken
-
- testRest := func(input string, expectedTokens []*MkToken, expectedRest string, diagnostics ...string) {
- line := t.NewLines("Test_MkParser_VarUse.mk", input).Lines[0]
- p := NewMkParser(line, input)
-
- actualTokens := p.MkTokens()
-
- t.CheckDeepEquals(actualTokens, expectedTokens)
- for i, expectedToken := range expectedTokens {
- if i < len(actualTokens) {
- t.CheckDeepEquals(*actualTokens[i], *expectedToken)
- t.CheckDeepEquals(actualTokens[i].Varuse, expectedToken.Varuse)
- }
- }
- t.CheckEquals(p.Rest(), expectedRest)
- t.CheckOutput(diagnostics)
- }
- test := func(input string, expectedToken *MkToken, diagnostics ...string) {
- testRest(input, b.Tokens(expectedToken), "", diagnostics...)
- }
-
- t.Use(testRest, test, varuse, varuseText)
-
- test("${VARIABLE}",
- varuse("VARIABLE"))
-
- test("${VARIABLE.param}",
- varuse("VARIABLE.param"))
-
- test("${VARIABLE.${param}}",
- varuse("VARIABLE.${param}"))
-
- test("${VARIABLE.hicolor-icon-theme}",
- varuse("VARIABLE.hicolor-icon-theme"))
-
- test("${VARIABLE.gtk+extra}",
- varuse("VARIABLE.gtk+extra"))
-
- test("${VARIABLE:S/old/new/}",
- varuse("VARIABLE", "S/old/new/"))
-
- test("${GNUSTEP_LFLAGS:S/-L//g}",
- varuse("GNUSTEP_LFLAGS", "S/-L//g"))
-
- test("${SUSE_VERSION:S/.//}",
- varuse("SUSE_VERSION", "S/.//"))
-
- test("${MASTER_SITE_GNOME:=sources/alacarte/0.13/}",
- varuse("MASTER_SITE_GNOME", "=sources/alacarte/0.13/"))
-
- test("${INCLUDE_DIRS:H:T}",
- varuse("INCLUDE_DIRS", "H", "T"))
-
- test("${A.${B.${C.${D}}}}",
- varuse("A.${B.${C.${D}}}"))
-
- test("${RUBY_VERSION:C/([0-9]+)\\.([0-9]+)\\.([0-9]+)/\\1/}",
- varuse("RUBY_VERSION", "C/([0-9]+)\\.([0-9]+)\\.([0-9]+)/\\1/"))
-
- test("${PERL5_${_var_}:Q}",
- varuse("PERL5_${_var_}", "Q"))
-
- test("${PKGNAME_REQD:C/(^.*-|^)py([0-9][0-9])-.*/\\2/}",
- varuse("PKGNAME_REQD", "C/(^.*-|^)py([0-9][0-9])-.*/\\2/"))
-
- test("${PYLIB:S|/|\\\\/|g}",
- varuse("PYLIB", "S|/|\\\\/|g"))
-
- test("${PKGNAME_REQD:C/ruby([0-9][0-9]+)-.*/\\1/}",
- varuse("PKGNAME_REQD", "C/ruby([0-9][0-9]+)-.*/\\1/"))
-
- test("${RUBY_SHLIBALIAS:S/\\//\\\\\\//}",
- varuse("RUBY_SHLIBALIAS", "S/\\//\\\\\\//"))
-
- test("${RUBY_VER_MAP.${RUBY_VER}:U${RUBY_VER}}",
- varuse("RUBY_VER_MAP.${RUBY_VER}", "U${RUBY_VER}"))
-
- test("${RUBY_VER_MAP.${RUBY_VER}:U18}",
- varuse("RUBY_VER_MAP.${RUBY_VER}", "U18"))
-
- test("${CONFIGURE_ARGS:S/ENABLE_OSS=no/ENABLE_OSS=yes/g}",
- varuse("CONFIGURE_ARGS", "S/ENABLE_OSS=no/ENABLE_OSS=yes/g"))
-
- test("${PLIST_RUBY_DIRS:S,DIR=\"PREFIX/,DIR=\",}",
- varuse("PLIST_RUBY_DIRS", "S,DIR=\"PREFIX/,DIR=\","))
-
- test("${LDFLAGS:S/-Wl,//g:Q}",
- varuse("LDFLAGS", "S/-Wl,//g", "Q"))
-
- test("${_PERL5_REAL_PACKLIST:S/^/${DESTDIR}/}",
- varuse("_PERL5_REAL_PACKLIST", "S/^/${DESTDIR}/"))
-
- test("${_PYTHON_VERSION:C/^([0-9])/\\1./1}",
- varuse("_PYTHON_VERSION", "C/^([0-9])/\\1./1"))
-
- test("${PKGNAME:S/py${_PYTHON_VERSION}/py${i}/}",
- varuse("PKGNAME", "S/py${_PYTHON_VERSION}/py${i}/"))
-
- test("${PKGNAME:C/-[0-9].*$/-[0-9]*/}",
- varuse("PKGNAME", "C/-[0-9].*$/-[0-9]*/"))
-
- // TODO: Does the $@ refer to ${.TARGET}, or is it just an unmatchable
- // regular expression?
- test("${PKGNAME:C/$@/target?/}",
- varuse("PKGNAME", "C/$@/target?/"))
-
- test("${PKGNAME:S/py${_PYTHON_VERSION}/py${i}/:C/-[0-9].*$/-[0-9]*/}",
- varuse("PKGNAME", "S/py${_PYTHON_VERSION}/py${i}/", "C/-[0-9].*$/-[0-9]*/"))
-
- test("${_PERL5_VARS:tl:S/^/-V:/}",
- varuse("_PERL5_VARS", "tl", "S/^/-V:/"))
-
- test("${_PERL5_VARS_OUT:M${_var_:tl}=*:S/^${_var_:tl}=${_PERL5_PREFIX:=/}//}",
- varuse("_PERL5_VARS_OUT", "M${_var_:tl}=*", "S/^${_var_:tl}=${_PERL5_PREFIX:=/}//"))
-
- test("${RUBY${RUBY_VER}_PATCHLEVEL}",
- varuse("RUBY${RUBY_VER}_PATCHLEVEL"))
-
- test("${DISTFILES:M*.gem}",
- varuse("DISTFILES", "M*.gem"))
-
- test("${LOCALBASE:S^/^_^}",
- varuse("LOCALBASE", "S^/^_^"))
-
- test("${SOURCES:%.c=%.o}",
- varuse("SOURCES", "%.c=%.o"))
-
- test("${GIT_TEMPLATES:@.t.@ ${EGDIR}/${GIT_TEMPLATEDIR}/${.t.} ${PREFIX}/${GIT_CORE_TEMPLATEDIR}/${.t.} @:M*}",
- varuse("GIT_TEMPLATES", "@.t.@ ${EGDIR}/${GIT_TEMPLATEDIR}/${.t.} ${PREFIX}/${GIT_CORE_TEMPLATEDIR}/${.t.} @", "M*"))
-
- test("${DISTNAME:C:_:-:}",
- varuse("DISTNAME", "C:_:-:"))
-
- test("${CF_FILES:H:O:u:S@^@${PKG_SYSCONFDIR}/@}",
- varuse("CF_FILES", "H", "O", "u", "S@^@${PKG_SYSCONFDIR}/@"))
-
- test("${ALT_GCC_RTS:S%${LOCALBASE}%%:S%/%%}",
- varuse("ALT_GCC_RTS", "S%${LOCALBASE}%%", "S%/%%"))
-
- test("${PREFIX:C;///*;/;g:C;/$;;}",
- varuse("PREFIX", "C;///*;/;g", "C;/$;;"))
-
- test("${GZIP_CMD:[1]:Q}",
- varuse("GZIP_CMD", "[1]", "Q"))
-
- test("${RUBY_RAILS_SUPPORTED:[#]}",
- varuse("RUBY_RAILS_SUPPORTED", "[#]"))
-
- test("${GZIP_CMD:[asdf]:Q}",
- varuseText("${GZIP_CMD:[asdf]:Q}", "GZIP_CMD", "Q"),
- "WARN: Test_MkParser_VarUse.mk:1: Invalid variable modifier \"[asdf]\" for \"GZIP_CMD\".")
-
- test("${DISTNAME:C/-[0-9]+$$//:C/_/-/}",
- varuse("DISTNAME", "C/-[0-9]+$$//", "C/_/-/"))
-
- test("${DISTNAME:slang%=slang2%}",
- varuse("DISTNAME", "slang%=slang2%"))
-
- test("${OSMAP_SUBSTVARS:@v@-e 's,\\@${v}\\@,${${v}},g' @}",
- varuse("OSMAP_SUBSTVARS", "@v@-e 's,\\@${v}\\@,${${v}},g' @"))
-
- test("${BRANDELF:D${BRANDELF} -t Linux ${LINUX_LDCONFIG}:U${TRUE}}",
- varuse("BRANDELF", "D${BRANDELF} -t Linux ${LINUX_LDCONFIG}", "U${TRUE}"))
-
- test("${${_var_}.*}",
- varuse("${_var_}.*"))
-
- test("${OPTIONS:@opt@printf 'Option %s is selected\n' ${opt:Q}';@}",
- varuse("OPTIONS", "@opt@printf 'Option %s is selected\n' ${opt:Q}';@"))
-
- /* weird features */
- test("${${EMACS_VERSION_MAJOR}>22:?@comment :}",
- varuse("${EMACS_VERSION_MAJOR}>22", "?@comment :"))
-
- test("${empty(CFLAGS):?:-cflags ${CFLAGS:Q}}",
- varuse("empty(CFLAGS)", "?:-cflags ${CFLAGS:Q}"))
-
- test("${${PKGSRC_COMPILER}==gcc:?gcc:cc}",
- varuse("${PKGSRC_COMPILER}==gcc", "?gcc:cc"))
-
- test("${${XKBBASE}/xkbcomp:L:Q}",
- varuse("${XKBBASE}/xkbcomp", "L", "Q"))
-
- test("${${PKGBASE} ${PKGVERSION}:L}",
- varuse("${PKGBASE} ${PKGVERSION}", "L"))
-
- // The variable name is optional; the variable with the empty name always
- // evaluates to the empty string. Bmake actively prevents this variable from
- // ever being defined. Therefore the :U branch is always taken, and this
- // in turn is used to implement the variables from the .for loops.
- test("${:U}",
- varuse("", "U"))
-
- test("${:Ufixed value}",
- varuse("", "Ufixed value"))
-
- // This complicated expression returns the major.minor.patch version
- // of the package given in ${d}.
- //
- // The :L modifier interprets the variable name not as a variable name
- // but takes it as the variable value. Followed by the :sh modifier,
- // this combination evaluates to the output of pkg_info.
- //
- // In this output, all non-digit characters are replaced with spaces so
- // that the remaining value is a space-separated list of version parts.
- // From these parts, the first 3 are taken and joined using a dot as separator.
- test("${${${PKG_INFO} -E ${d} || echo:L:sh}:L:C/[^[0-9]]*/ /g:[1..3]:ts.}",
- varuse("${${PKG_INFO} -E ${d} || echo:L:sh}", "L", "C/[^[0-9]]*/ /g", "[1..3]", "ts."))
-
- // For :S and :C, the colon can be left out. It's confusing but possible.
- test("${VAR:S/-//S/.//}",
- varuseText("${VAR:S/-//S/.//}", "VAR", "S/-//", "S/.//"))
-
- // The :S and :C modifiers accept an arbitrary character as separator. Here it is "a".
- test("${VAR:Sahara}",
- varuse("VAR", "Sahara"))
-
- // The separator character can be left out, which means empty.
- test("${VAR:ts}",
- varuse("VAR", "ts"))
-
- // The separator character can be a long octal number.
- test("${VAR:ts\\000012}",
- varuse("VAR", "ts\\000012"))
-
- // Or even decimal.
- test("${VAR:ts\\124}",
- varuse("VAR", "ts\\124"))
-
- // The :ts modifier only takes single-character separators.
- test("${VAR:ts---}",
- varuse("VAR", "ts---"),
- "WARN: Test_MkParser_VarUse.mk:1: Invalid separator \"---\" for :ts modifier of \"VAR\".")
-
- test("$<",
- varuseText("$<", "<")) // Same as ${.IMPSRC}
-
- test("$(GNUSTEP_USER_ROOT)",
- varuseText("$(GNUSTEP_USER_ROOT)", "GNUSTEP_USER_ROOT"),
- "WARN: Test_MkParser_VarUse.mk:1: Please use curly braces {} instead of round parentheses () for GNUSTEP_USER_ROOT.")
-
- // Opening brace, closing parenthesis.
- // Warnings are only printed for balanced expressions.
- test("${VAR)",
- varuseText("${VAR)", "VAR)"),
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \"}\" for \"VAR)\".",
- "WARN: Test_MkParser_VarUse.mk:1: Invalid part \")\" after variable name \"VAR\".")
-
- // Opening parenthesis, closing brace
- // Warnings are only printed for balanced expressions.
- test("$(VAR}",
- varuseText("$(VAR}", "VAR}"),
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \")\" for \"VAR}\".",
- "WARN: Test_MkParser_VarUse.mk:1: Invalid part \"}\" after variable name \"VAR\".")
-
- test("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}@}",
- varuse("PLIST_SUBST_VARS", "@var@${var}=${${var}:Q}@"))
-
- test("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}}",
- varuseText("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}}",
- "PLIST_SUBST_VARS", "@var@${var}=${${var}:Q}}"),
- "WARN: Test_MkParser_VarUse.mk:1: Modifier ${PLIST_SUBST_VARS:@var@...@} is missing the final \"@\".",
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \"}\" for \"PLIST_SUBST_VARS\".")
-
- // The replacement text may include closing braces, which is useful
- // for AWK programs.
- test("${PLIST_SUBST_VARS:@var@{${var}}@}",
- varuseText("${PLIST_SUBST_VARS:@var@{${var}}@}",
- "PLIST_SUBST_VARS", "@var@{${var}}@"),
- nil...)
-
- // Unfinished variable use
- test("${",
- varuseText("${", ""),
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \"}\" for \"\".")
-
- // Unfinished nested variable use
- test("${${",
- varuseText("${${", "${"),
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \"}\" for \"\".",
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \"}\" for \"${\".")
-
- test("${arbitrary :Mpattern:---:Q}",
- varuseText("${arbitrary :Mpattern:---:Q}", "arbitrary ", "Mpattern", "Q"),
- // TODO: Swap the order of these message
- "WARN: Test_MkParser_VarUse.mk:1: Invalid variable modifier \"---\" for \"arbitrary \".",
- "WARN: Test_MkParser_VarUse.mk:1: Invalid part \" \" after variable name \"arbitrary\".")
-
- // Variable names containing spaces do not occur in pkgsrc.
- // Technically they are possible:
- //
- // VARNAME= name with spaces
- // ${VARNAME}= value
- //
- // all:
- // @echo ${name with spaces:Q}''
- test("${arbitrary text}",
- varuse("arbitrary text"),
- "WARN: Test_MkParser_VarUse.mk:1: Invalid part \" text\" after variable name \"arbitrary\".")
-}
-
-func (s *Suite) Test_MkParser_VarUse__ambiguous(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- t.SetUpCommandLine("--explain")
-
- line := t.NewLine("module.mk", 123, "\t$Varname $X")
- p := NewMkParser(line, line.Text[1:])
-
- tokens := p.MkTokens()
- t.CheckDeepEquals(tokens, b.Tokens(
- b.VaruseTextToken("$V", "V"),
- b.TextToken("arname "),
- b.VaruseTextToken("$X", "X")))
-
- t.CheckOutputLines(
- "ERROR: module.mk:123: $Varname is ambiguous. Use ${Varname} if you mean a Make variable or $$Varname if you mean a shell variable.",
- "",
- "\tOnly the first letter after the dollar is the variable name.",
- "\tEverything following it is normal text, even if it looks like a",
- "\tvariable name to human readers.",
- "",
- "WARN: module.mk:123: $X is ambiguous. Use ${X} if you mean a Make variable or $$X if you mean a shell variable.",
- "",
- "\tIn its current form, this variable is parsed as a Make variable. For",
- "\thuman readers though, $x looks more like a shell variable than a",
- "\tMake variable, since Make variables are usually written using braces",
- "\t(BSD-style) or parentheses (GNU-style).",
- "")
-}
-
-// Pkglint can replace $(VAR) with ${VAR}. It doesn't look at all components
-// of nested variables though because this case is not important enough to
-// invest much development time. It occurs so seldom that it is acceptable
-// to run pkglint multiple times in such a case.
-func (s *Suite) Test_MkParser_VarUse__parentheses_autofix(c *check.C) {
- t := s.Init(c)
-
- t.SetUpCommandLine("--autofix")
- t.SetUpVartypes()
- lines := t.SetUpFileLines("Makefile",
- MkCvsID,
- "COMMENT=$(P1) $(P2)) $(P3:Q) ${BRACES} $(A.$(B.$(C)))")
- mklines := NewMkLines(lines)
-
- mklines.Check()
-
- t.CheckOutputLines(
- "AUTOFIX: ~/Makefile:2: Replacing \"$(P1)\" with \"${P1}\".",
- "AUTOFIX: ~/Makefile:2: Replacing \"$(P2)\" with \"${P2}\".",
- "AUTOFIX: ~/Makefile:2: Replacing \"$(P3:Q)\" with \"${P3:Q}\".",
- "AUTOFIX: ~/Makefile:2: Replacing \"$(C)\" with \"${C}\".")
- t.CheckFileLines("Makefile",
- MkCvsID,
- "COMMENT=${P1} ${P2}) ${P3:Q} ${BRACES} $(A.$(B.${C}))")
-}
-
-func (s *Suite) Test_MkParser_VarUseModifiers(c *check.C) {
- t := s.Init(c)
-
- varUse := NewMkTokenBuilder().VarUse
- test := func(text string, varUse *MkVarUse, diagnostics ...string) {
- line := t.NewLine("Makefile", 20, "\t"+text)
- p := NewMkParser(line, text)
-
- actual := p.VarUse()
-
- t.CheckDeepEquals(actual, varUse)
- t.CheckEquals(p.Rest(), "")
- t.CheckOutput(diagnostics)
- }
-
- // The !command! modifier is used so seldom that pkglint does not
- // check whether the command is actually valid.
- // At least not while parsing the modifier since at this point it might
- // be still unknown which of the commands can be used and which cannot.
- test("${VAR:!command!}", varUse("VAR", "!command!"))
-
- test("${VAR:!command}", varUse("VAR"),
- "WARN: Makefile:20: Invalid variable modifier \"!command\" for \"VAR\".")
-
- test("${VAR:command!}", varUse("VAR"),
- "WARN: Makefile:20: Invalid variable modifier \"command!\" for \"VAR\".")
-
- // The :L modifier makes the variable value "echo hello", and the :[1]
- // modifier extracts the "echo".
- test("${echo hello:L:[1]}", varUse("echo hello", "L", "[1]"))
-
- // bmake ignores the :[3] modifier, and the :L modifier just returns the
- // variable name, in this case BUILD_DIRS.
- test("${BUILD_DIRS:[3]:L}", varUse("BUILD_DIRS", "[3]", "L"))
-
- test("${PATH:ts::Q}", varUse("PATH", "ts:", "Q"))
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__invalid_ts_modifier_with_warning(c *check.C) {
- t := s.Init(c)
-
- t.SetUpCommandLine("-Wall", "--explain")
- line := t.NewLine("filename.mk", 123, "${VAR:tsabc}")
- p := NewMkParser(line, "tsabc}")
-
- modifier := p.varUseModifier("VAR", '}')
-
- t.CheckEquals(modifier, "tsabc")
- t.CheckEquals(p.Rest(), "}")
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid separator \"abc\" for :ts modifier of \"VAR\".",
- "",
- "\tThe separator for the :ts modifier must be either a single character",
- "\tor an escape sequence like \\t or \\n or an octal or decimal escape",
- "\tsequence; see the bmake man page for further details.",
- "")
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__invalid_ts_modifier_without_warning(c *check.C) {
- t := s.Init(c)
-
- p := NewMkParser(nil, "tsabc}")
-
- modifier := p.varUseModifier("VAR", '}')
-
- t.CheckEquals(modifier, "tsabc")
- t.CheckEquals(p.Rest(), "}")
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__square_bracket(c *check.C) {
- t := s.Init(c)
-
- line := t.NewLine("filename.mk", 123, "\t${VAR:[asdf]}")
- p := NewMkParser(line, "[asdf]")
-
- modifier := p.varUseModifier("VAR", '}')
-
- t.CheckEquals(modifier, "")
- t.CheckEquals(p.Rest(), "")
-
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid variable modifier \"[asdf]\" for \"VAR\".")
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__condition_without_colon(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- line := t.NewLine("filename.mk", 123, "${${VAR}:?yes:no}${${VAR}:?yes}")
- p := NewMkParser(line, line.Text)
-
- varUse1 := p.VarUse()
- varUse2 := p.VarUse()
-
- t.CheckDeepEquals(varUse1, b.VarUse("${VAR}", "?yes:no"))
- t.CheckDeepEquals(varUse2, b.VarUse("${VAR}"))
- t.CheckEquals(p.Rest(), "")
-
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid variable modifier \"?yes\" for \"${VAR}\".")
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__malformed_in_parentheses(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- line := t.NewLine("filename.mk", 123, "$(${VAR}:?yes)")
- p := NewMkParser(line, line.Text)
-
- varUse := p.VarUse()
-
- t.CheckDeepEquals(varUse, b.VarUse("${VAR}"))
- t.CheckEquals(p.Rest(), "")
-
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid variable modifier \"?yes\" for \"${VAR}\".",
- "WARN: filename.mk:123: Please use curly braces {} instead of round parentheses () for ${VAR}.")
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__varuse_in_malformed_modifier(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- line := t.NewLine("filename.mk", 123, "${${VAR}:?yes${INNER}}")
- p := NewMkParser(line, line.Text)
-
- varUse := p.VarUse()
-
- t.CheckDeepEquals(varUse, b.VarUse("${VAR}"))
- t.CheckEquals(p.Rest(), "")
-
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid variable modifier \"?yes${INNER}\" for \"${VAR}\".")
-}
-
-func (s *Suite) Test_MkParser_varUseModifierSubst(c *check.C) {
- t := s.Init(c)
-
- varUse := NewMkTokenBuilder().VarUse
- test := func(text string, varUse *MkVarUse, rest string, diagnostics ...string) {
- line := t.NewLine("Makefile", 20, "\t"+text)
- p := NewMkParser(line, text)
-
- actual := p.VarUse()
-
- t.CheckDeepEquals(actual, varUse)
- t.CheckEquals(p.Rest(), rest)
- t.CheckOutput(diagnostics)
- }
-
- test("${VAR:S", varUse("VAR"), "",
- "WARN: Makefile:20: Invalid variable modifier \"S\" for \"VAR\".",
- "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
-
- test("${VAR:S}", varUse("VAR"), "",
- "WARN: Makefile:20: Invalid variable modifier \"S\" for \"VAR\".")
-
- test("${VAR:S,}", varUse("VAR"), "",
- "WARN: Makefile:20: Invalid variable modifier \"S,\" for \"VAR\".")
-
- test("${VAR:S,from,to}", varUse("VAR"), "",
- "WARN: Makefile:20: Invalid variable modifier \"S,from,to\" for \"VAR\".")
-
- test("${VAR:S,from,to,}", varUse("VAR", "S,from,to,"), "")
-
- test("${VAR:S,^from$,to,}", varUse("VAR", "S,^from$,to,"), "")
-
- test("${VAR:S,@F@,${F},}", varUse("VAR", "S,@F@,${F},"), "")
-
- test("${VAR:S,from,to,1}", varUse("VAR", "S,from,to,1"), "")
- test("${VAR:S,from,to,g}", varUse("VAR", "S,from,to,g"), "")
- test("${VAR:S,from,to,W}", varUse("VAR", "S,from,to,W"), "")
-
- test("${VAR:S,from,to,1gW}", varUse("VAR", "S,from,to,1gW"), "")
-
- // Inside the :S or :C modifiers, neither a colon nor the closing
- // brace need to be escaped. Otherwise these patterns would become
- // too difficult to read and write.
- test("${VAR:C/[[:alnum:]]{2}/**/g}",
- varUse("VAR", "C/[[:alnum:]]{2}/**/g"),
- "")
-
- // Some pkgsrc users really explore the darkest corners of bmake by using
- // the backslash as the separator in the :S modifier. Sure, it works, it
- // just looks totally unexpected to the average pkgsrc reader.
- //
- // Using the backslash as separator means that it cannot be used for anything
- // else, not even for escaping other characters.
- test("${VAR:S\\.post1\\\\1}",
- varUse("VAR", "S\\.post1\\\\1"),
- "")
-}
-
-func (s *Suite) Test_MkParser_varUseModifierAt__missing_at_after_variable_name(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- line := t.NewLine("filename.mk", 123, "${VAR:@varname}")
- p := NewMkParser(line, line.Text)
-
- varUse := p.VarUse()
-
- t.CheckDeepEquals(varUse, b.VarUse("VAR"))
- t.CheckEquals(p.Rest(), "")
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid variable modifier \"@varname\" for \"VAR\".")
-}
-
-func (s *Suite) Test_MkParser_varUseModifierAt__dollar(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- line := t.NewLine("filename.mk", 123, "${VAR:@var@$$var@}")
- p := NewMkParser(line, line.Text)
-
- varUse := p.VarUse()
-
- t.CheckDeepEquals(varUse, b.VarUse("VAR", "@var@$$var@"))
- t.CheckEquals(p.Rest(), "")
- t.CheckOutputEmpty()
-}
-
-func (s *Suite) Test_MkParser_varUseModifierAt__incomplete_without_warning(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- p := NewMkParser(nil, "${VAR:@var@$$var}rest")
-
- varUse := p.VarUse()
-
- t.CheckDeepEquals(varUse, b.VarUse("VAR", "@var@$$var}rest"))
- t.CheckEquals(p.Rest(), "")
- t.CheckOutputEmpty()
-}
-
-func (s *Suite) Test_MkParser_varUseModifierAt(c *check.C) {
- t := s.Init(c)
-
- varUse := NewMkTokenBuilder().VarUse
- test := func(text string, varUse *MkVarUse, rest string, diagnostics ...string) {
- line := t.NewLine("Makefile", 20, "\t"+text)
- p := NewMkParser(line, text)
-
- actual := p.VarUse()
-
- t.CheckDeepEquals(actual, varUse)
- t.CheckEquals(p.Rest(), rest)
- t.CheckOutput(diagnostics)
- }
-
- test("${VAR:@",
- varUse("VAR"),
- "",
- "WARN: Makefile:20: Invalid variable modifier \"@\" for \"VAR\".",
- "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
-
- test("${VAR:@i@${i}}", varUse("VAR", "@i@${i}}"), "",
- "WARN: Makefile:20: Modifier ${VAR:@i@...@} is missing the final \"@\".",
- "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
-
- test("${VAR:@i@${i}@}", varUse("VAR", "@i@${i}@"), "")
-
- test("${PKG_GROUPS:@g@${g:Q}:${PKG_GID.${g}:Q}@:C/:*$//g}",
- varUse("PKG_GROUPS", "@g@${g:Q}:${PKG_GID.${g}:Q}@", "C/:*$//g"),
- "")
-}
-
func (s *Suite) Test_MkParser_MkCond(c *check.C) {
t := s.Init(c)
b := NewMkTokenBuilder()
@@ -971,40 +270,6 @@ func (s *Suite) Test_MkParser_mkCondComp
t.CheckOutputEmpty()
}
-func (s *Suite) Test_MkParser_Varname(c *check.C) {
- t := s.Init(c)
-
- test := func(text string) {
- line := t.NewLine("filename.mk", 1, text)
- p := NewMkParser(line, text)
-
- varname := p.Varname()
-
- t.CheckEquals(varname, text)
- t.CheckEquals(p.Rest(), "")
- }
-
- testRest := func(text string, expectedVarname string, expectedRest string) {
- line := t.NewLine("filename.mk", 1, text)
- p := NewMkParser(line, text)
-
- varname := p.Varname()
-
- t.CheckEquals(varname, expectedVarname)
- t.CheckEquals(p.Rest(), expectedRest)
- }
-
- test("VARNAME")
- test("VARNAME.param")
- test("VARNAME.${param}")
- test("SITES_${param}")
- test("SITES_distfile-1.0.tar.gz")
- test("SITES.gtk+-2.0")
- test("PKGPATH.category/package")
-
- testRest("VARNAME/rest", "VARNAME", "/rest")
-}
-
func (s *Suite) Test_MkParser_PkgbasePattern(c *check.C) {
t := s.Init(c)
Index: pkgsrc/pkgtools/pkglint/files/mkshparser.go
diff -u pkgsrc/pkgtools/pkglint/files/mkshparser.go:1.17 pkgsrc/pkgtools/pkglint/files/mkshparser.go:1.18
--- pkgsrc/pkgtools/pkglint/files/mkshparser.go:1.17 Sun Nov 17 01:26:25 2019
+++ pkgsrc/pkgtools/pkglint/files/mkshparser.go Wed Nov 27 22:10:07 2019
@@ -238,7 +238,7 @@ func (lex *ShellLexer) Lex(lval *shyySym
lex.atCommandStart = false
case lex.atCommandStart && matches(token, `^[A-Za-z_]\w*=`):
ttype = tkASSIGNMENT_WORD
- p := NewShTokenizer(dummyLine, token, false) // Just for converting the string to a ShToken
+ p := NewShTokenizer(nil, token, false)
lval.Word = p.ShToken()
case hasPrefix(token, "#"):
// This doesn't work for multiline shell programs.
@@ -246,7 +246,7 @@ func (lex *ShellLexer) Lex(lval *shyySym
return 0
default:
ttype = tkWORD
- p := NewShTokenizer(dummyLine, token, false) // Just for converting the string to a ShToken
+ p := NewShTokenizer(nil, token, false)
lval.Word = p.ShToken()
lex.atCommandStart = false
Index: pkgsrc/pkgtools/pkglint/files/mktypes_test.go
diff -u pkgsrc/pkgtools/pkglint/files/mktypes_test.go:1.17 pkgsrc/pkgtools/pkglint/files/mktypes_test.go:1.18
--- pkgsrc/pkgtools/pkglint/files/mktypes_test.go:1.17 Sun Nov 17 01:26:25 2019
+++ pkgsrc/pkgtools/pkglint/files/mktypes_test.go Wed Nov 27 22:10:07 2019
@@ -39,16 +39,6 @@ func (MkTokenBuilder) VarUse(varname str
return &MkVarUse{varname, mods}
}
-// AddCommand adds a command directly to a list of commands,
-// creating all the intermediate nodes for the syntactic representation.
-// As soon as that representation is replaced with a semantic representation,
-// this method should no longer be necessary.
-func (list *MkShList) AddCommand(command *MkShCommand) *MkShList {
- pipeline := NewMkShPipeline(false, []*MkShCommand{command})
- andOr := NewMkShAndOr(pipeline)
- return list.AddAndOr(andOr)
-}
-
func (s *Suite) Test_MkVarUseModifier_MatchSubst(c *check.C) {
t := s.Init(c)
@@ -220,7 +210,7 @@ func (s *Suite) Test_MkVarUse_Mod(c *che
test := func(varUseText string, mod string) {
line := t.NewLine("filename.mk", 123, "")
- varUse := NewMkParser(line, varUseText).VarUse()
+ varUse := NewMkLexer(varUseText, line).VarUse()
t.CheckOutputEmpty()
t.CheckEquals(varUse.Mod(), mod)
}
Index: pkgsrc/pkgtools/pkglint/files/mkshparser_test.go
diff -u pkgsrc/pkgtools/pkglint/files/mkshparser_test.go:1.20 pkgsrc/pkgtools/pkglint/files/mkshparser_test.go:1.21
--- pkgsrc/pkgtools/pkglint/files/mkshparser_test.go:1.20 Sun Nov 17 01:26:25 2019
+++ pkgsrc/pkgtools/pkglint/files/mkshparser_test.go Wed Nov 27 22:10:07 2019
@@ -65,10 +65,11 @@ type ShSuite struct {
var _ = check.Suite(&ShSuite{})
func (s *ShSuite) SetUpTest(*check.C) {
- G = NewPkglint(nil, nil)
+ G = unusablePkglint()
}
func (s *ShSuite) TearDownTest(*check.C) {
+ s.t.ReportUncheckedOutput()
G = unusablePkglint()
}
@@ -268,10 +269,21 @@ func (s *ShSuite) Test_parseShellProgram
b.Words("*"),
b.List().AddCommand(b.SimpleCommand("echo", "$i")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "WARN: MkShBuilder.Token.mk:1: $i is ambiguous. Use ${i} if you "+
+ "mean a Make variable or $$i if you mean a shell variable.",
+ "WARN: ShSuite.test.mk:1: $i is ambiguous. Use ${i} if you "+
+ "mean a Make variable or $$i if you mean a shell variable.")
+
s.test("case $i in esac",
b.List().AddCommand(b.Case(
b.Token("$i"))))
+ s.t.CheckOutputLines(
+ "WARN: MkShBuilder.Token.mk:1: $i is ambiguous. Use ${i} if you "+
+ "mean a Make variable or $$i if you mean a shell variable.",
+ "WARN: ShSuite.test.mk:1: $i is ambiguous. Use ${i} if you "+
+ "mean a Make variable or $$i if you mean a shell variable.")
}
func (s *ShSuite) Test_parseShellProgram__subshell(c *check.C) {
@@ -312,6 +324,12 @@ func (s *ShSuite) Test_parseShellProgram
b.Words("\"$$@\""),
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
// Only linebreak is allowed, but not semicolon.
s.test("for var \n do echo $var ; done",
b.List().AddCommand(b.For(
@@ -319,30 +337,60 @@ func (s *ShSuite) Test_parseShellProgram
b.Words("\"$$@\""),
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("for var in a b c ; do echo $var ; done",
b.List().AddCommand(b.For(
"var",
b.Words("a", "b", "c"),
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("for var \n \n \n in a b c ; do echo $var ; done",
b.List().AddCommand(b.For(
"var",
b.Words("a", "b", "c"),
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("for var \n in ; do echo $var ; done",
b.List().AddCommand(b.For(
"var",
nil,
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("for var in in esac ; do echo $var ; done",
b.List().AddCommand(b.For(
"var",
b.Words("in", "esac"),
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("for var in \n do : ; done",
b.List().AddCommand(b.For(
"var",
@@ -366,6 +414,13 @@ func (s *ShSuite) Test_parseShellProgram
s.test("case $var in esac",
b.List().AddCommand(b.Case(b.Token("$var"))))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. "+
+ "Use ${var} if you mean a Make variable "+
+ "or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("case selector in pattern) ;; pattern) esac",
b.List().AddCommand(b.Case(
b.Token("selector"),
@@ -658,7 +713,9 @@ func (s *ShSuite) Test_parseShellProgram
func (s *ShSuite) init(c *check.C) *MkShBuilder {
s.c = c
- s.t = &Tester{c: c, testName: c.TestName()}
+ tmpdir := NewPath("The ShSuite tests don't need a temporary directory")
+ s.t = &Tester{c: c, testName: c.TestName(), tmpdir: tmpdir}
+ G = NewPkglint(&s.t.stdout, &s.t.stderr)
return NewMkShBuilder()
}
@@ -666,7 +723,8 @@ func (s *ShSuite) test(program string, e
t := s.t
// See parseShellProgram
- tokens, rest := splitIntoShellTokens(dummyLine, program)
+ line := t.NewLine("ShSuite.test.mk", 1, "")
+ tokens, rest := splitIntoShellTokens(line, program)
t.CheckEquals(rest, "")
lexer := NewShellLexer(tokens, rest)
parser := shyyParserImpl{}
@@ -691,7 +749,8 @@ func (s *ShSuite) test(program string, e
func (s *ShSuite) testFail(program string, expectedRemaining ...string) {
t := s.t
- tokens, rest := splitIntoShellTokens(dummyLine, program)
+ line := t.NewLine("ShSuite.testFail.mk", 1, "")
+ tokens, rest := splitIntoShellTokens(line, program)
t.CheckEquals(rest, "")
lexer := ShellLexer{remaining: tokens, atCommandStart: true}
parser := shyyParserImpl{}
@@ -704,9 +763,11 @@ func (s *ShSuite) testFail(program strin
}
func (s *ShSuite) Test_ShellLexer_Lex__redirects(c *check.C) {
+ _ = s.init(c)
t := s.t
- tokens, rest := splitIntoShellTokens(dummyLine, "2>&1 <& <>file >>file <<EOF <<-EOF > /dev/stderr")
+ line := t.NewLine("filename.mk", 1, "")
+ tokens, rest := splitIntoShellTokens(line, "2>&1 <& <>file >>file <<EOF <<-EOF > /dev/stderr")
t.CheckDeepEquals(tokens, []string{"2>&", "1", "<&", "<>", "file", ">>", "file", "<<", "EOF", "<<-", "EOF", ">", "/dev/stderr"})
t.CheckEquals(rest, "")
@@ -757,7 +818,8 @@ func (s *ShSuite) Test_ShellLexer_Lex__k
t := s.t
testErr := func(program, error, remaining string) {
- tokens, rest := splitIntoShellTokens(dummyLine, program)
+ line := t.NewLine("filename.mk", 1, "")
+ tokens, rest := splitIntoShellTokens(line, program)
t.CheckEquals(rest, "")
lexer := ShellLexer{
@@ -944,7 +1006,8 @@ func (b *MkShBuilder) Redirected(cmd *Mk
}
func (b *MkShBuilder) Token(mktext string) *ShToken {
- tokenizer := NewShTokenizer(dummyLine, mktext, false)
+ line := NewLine("MkShBuilder.Token.mk", 1, "", &RawLine{1, "\n", "\n"})
+ tokenizer := NewShTokenizer(line, mktext, false)
token := tokenizer.ShToken()
assertf(tokenizer.parser.EOF(), "Invalid token: %q", tokenizer.parser.Rest())
return token
Index: pkgsrc/pkgtools/pkglint/files/mktypes.go
diff -u pkgsrc/pkgtools/pkglint/files/mktypes.go:1.20 pkgsrc/pkgtools/pkglint/files/mktypes.go:1.21
--- pkgsrc/pkgtools/pkglint/files/mktypes.go:1.20 Tue Nov 19 06:51:38 2019
+++ pkgsrc/pkgtools/pkglint/files/mktypes.go Wed Nov 27 22:10:07 2019
@@ -43,7 +43,7 @@ func (m MkVarUseModifier) IsSuffixSubst(
}
func (m MkVarUseModifier) MatchSubst() (ok bool, regex bool, from string, to string, options string) {
- p := NewMkParser(nil, m.Text)
+ p := NewMkLexer(m.Text, nil)
return p.varUseModifierSubst('}')
}
Index: pkgsrc/pkgtools/pkglint/files/shtokenizer.go
diff -u pkgsrc/pkgtools/pkglint/files/shtokenizer.go:1.20 pkgsrc/pkgtools/pkglint/files/shtokenizer.go:1.21
--- pkgsrc/pkgtools/pkglint/files/shtokenizer.go:1.20 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/shtokenizer.go Wed Nov 27 22:10:07 2019
@@ -3,14 +3,17 @@ package pkglint
import "netbsd.org/pkglint/textproc"
type ShTokenizer struct {
- parser *MkParser
+ parser *MkLexer
}
func NewShTokenizer(line *Line, text string, emitWarnings bool) *ShTokenizer {
// TODO: Switching to NewMkParser is nontrivial since emitWarnings must equal (line != nil).
// assert((line != nil) == emitWarnings)
- p := MkParser{line, textproc.NewLexer(text), emitWarnings}
- return &ShTokenizer{&p}
+ if line != nil {
+ emitWarnings = true
+ }
+ mklex := NewMkLexer(text, line)
+ return &ShTokenizer{mklex}
}
// ShAtom parses a basic building block of a shell program.
@@ -65,9 +68,9 @@ func (p *ShTokenizer) ShAtom(quoting ShQ
if atom == nil {
lexer.Reset(mark)
if hasPrefix(lexer.Rest(), "$${") {
- p.parser.Line.Warnf("Unclosed shell variable starting at %q.", shorten(lexer.Rest(), 20))
+ p.parser.line.Warnf("Unclosed shell variable starting at %q.", shorten(lexer.Rest(), 20))
} else {
- p.parser.Line.Warnf("Internal pkglint error in ShTokenizer.ShAtom at %q (quoting=%s).", lexer.Rest(), quoting)
+ p.parser.line.Warnf("Internal pkglint error in ShTokenizer.ShAtom at %q (quoting=%s).", lexer.Rest(), quoting)
}
}
return atom
Index: pkgsrc/pkgtools/pkglint/files/mkshtypes_test.go
diff -u pkgsrc/pkgtools/pkglint/files/mkshtypes_test.go:1.2 pkgsrc/pkgtools/pkglint/files/mkshtypes_test.go:1.3
--- pkgsrc/pkgtools/pkglint/files/mkshtypes_test.go:1.2 Mon Dec 17 00:15:39 2018
+++ pkgsrc/pkgtools/pkglint/files/mkshtypes_test.go Wed Nov 27 22:10:07 2019
@@ -3,3 +3,14 @@ package pkglint
func (list *MkShList) AddSemicolon() *MkShList { return list.AddSeparator(sepSemicolon) }
func (list *MkShList) AddBackground() *MkShList { return list.AddSeparator(sepBackground) }
func (list *MkShList) AddNewline() *MkShList { return list.AddSeparator(sepNewline) }
+
+// AddCommand adds a command directly to a list of commands,
+// creating all the intermediate nodes for the syntactic representation.
+//
+// As soon as that representation is replaced with a semantic representation,
+// this method should no longer be necessary.
+func (list *MkShList) AddCommand(command *MkShCommand) *MkShList {
+ pipeline := NewMkShPipeline(false, []*MkShCommand{command})
+ andOr := NewMkShAndOr(pipeline)
+ return list.AddAndOr(andOr)
+}
Index: pkgsrc/pkgtools/pkglint/files/mkshwalker_test.go
diff -u pkgsrc/pkgtools/pkglint/files/mkshwalker_test.go:1.10 pkgsrc/pkgtools/pkglint/files/mkshwalker_test.go:1.11
--- pkgsrc/pkgtools/pkglint/files/mkshwalker_test.go:1.10 Sun Jul 14 21:25:47 2019
+++ pkgsrc/pkgtools/pkglint/files/mkshwalker_test.go Wed Nov 27 22:10:07 2019
@@ -15,7 +15,8 @@ func (s *Suite) Test_MkShWalker_Walk(c *
}
test := func(program string, output ...string) {
- list, err := parseShellProgram(dummyLine, program)
+ line := t.NewLine("filename.mk", 1, "")
+ list, err := parseShellProgram(line, program)
if !c.Check(err, check.IsNil) || !c.Check(list, check.NotNil) {
return
@@ -244,7 +245,8 @@ func (s *Suite) Test_MkShWalker_Walk__em
t := s.Init(c)
test := func(program string) {
- list, err := parseShellProgram(dummyLine, program)
+ line := t.NewLine("filename.mk", 1, "")
+ list, err := parseShellProgram(line, program)
assertNil(err, "")
walker := NewMkShWalker()
@@ -270,7 +272,8 @@ func (s *Suite) Test_MkShWalker_Walk__em
func (s *Suite) Test_MkShWalker_Walk__assertion(c *check.C) {
t := s.Init(c)
- list, err := parseShellProgram(dummyLine, "echo \"hello, world\"")
+ line := t.NewLine("filename.mk", 1, "")
+ list, err := parseShellProgram(line, "echo \"hello, world\"")
assertNil(err, "")
walker := NewMkShWalker()
Index: pkgsrc/pkgtools/pkglint/files/package.go
diff -u pkgsrc/pkgtools/pkglint/files/package.go:1.69 pkgsrc/pkgtools/pkglint/files/package.go:1.70
--- pkgsrc/pkgtools/pkglint/files/package.go:1.69 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/package.go Wed Nov 27 22:10:07 2019
@@ -230,7 +230,7 @@ func (pkg *Package) parse(mklines *MkLin
filename := mklines.lines.Filename
if filename.Base() == "buildlink3.mk" {
builtin := cleanpath(filename.Dir().JoinNoClean("builtin.mk"))
- builtinRel := relpath(pkg.dir, builtin)
+ builtinRel := G.Pkgsrc.Relpath(pkg.dir, builtin)
if pkg.included.FirstTime(builtinRel.String()) && builtin.IsFile() {
builtinMkLines := LoadMk(builtin, MustSucceed|LogErrors)
pkg.parse(builtinMkLines, allLines, "")
@@ -297,7 +297,7 @@ func (pkg *Package) loadIncluded(mkline
dirname, _ := includingFile.Split()
dirname = cleanpath(dirname)
fullIncluded := joinPath(dirname, includedFile)
- relIncludedFile := relpath(pkg.dir, fullIncluded)
+ relIncludedFile := G.Pkgsrc.Relpath(pkg.dir, fullIncluded)
if !pkg.shouldDiveInto(includingFile, includedFile) {
return nil, true
@@ -371,7 +371,7 @@ func (pkg *Package) resolveIncludedFile(
}
if mkline.Basename != "buildlink3.mk" {
- if includedFile.HasSuffixText("/buildlink3.mk") {
+ if includedFile.HasSuffixPath("buildlink3.mk") {
pkg.bl3[includedFile] = mkline
if trace.Tracing {
trace.Step1("Buildlink3 file in package: %q", includedText)
@@ -392,11 +392,9 @@ func (*Package) shouldDiveInto(including
return false
}
- // FIXME: includingFile may be "../../mk/../devel/readline/buildlink.mk" and thus contain "mk"
- // even though the resolved file is not part of the pkgsrc infrastructure.
- if includingFile.ContainsPath("mk") && !G.Pkgsrc.ToRel(includingFile).HasPrefixPath("wip/mk") {
- // TODO: try ".buildlink.mk", ".builtin.mk" instead, see wip/clfswm.
- return includingFile.HasSuffixText("buildlink3.mk") && includedFile.HasSuffixText("builtin.mk")
+ if G.Pkgsrc.IsInfraMain(includingFile) {
+ return includingFile.HasSuffixText(".buildlink3.mk") &&
+ includedFile.HasSuffixText(".builtin.mk")
}
return true
@@ -971,7 +969,7 @@ func (pkg *Package) checkUseLanguagesCom
}
if mkline.Basename == "compiler.mk" {
- if relpath(pkg.dir, mkline.Filename) == "../../mk/compiler.mk" {
+ if G.Pkgsrc.Relpath(pkg.dir, mkline.Filename) == "../../mk/compiler.mk" {
return
}
}
@@ -1075,7 +1073,10 @@ func (pkg *Package) nbPart() string {
}
func (pkg *Package) pkgnameFromDistname(pkgname, distname string) (string, bool) {
- tokens := NewMkParser(nil, pkgname).MkTokens()
+ tokens, rest := NewMkLexer(pkgname, nil).MkTokens()
+ if rest != "" {
+ return "", false
+ }
// TODO: Make this resolving of variable references available to all other variables as well.
@@ -1206,9 +1207,7 @@ func (pkg *Package) checkDirent(dirent P
switch {
case mode.IsRegular():
- pkgsrcRel := G.Pkgsrc.ToRel(dirent)
- depth := pkgsrcRel.Count() - 1 // FIXME
- G.checkReg(dirent, basename, depth)
+ G.checkReg(dirent, basename, G.Pkgsrc.ToRel(dirent).Count())
case hasPrefix(basename, "work"):
if G.Opts.Import {
@@ -1377,6 +1376,13 @@ func (pkg *Package) checkIncludeConditio
}
}
+ dependingOn := func(varnames []string) string {
+ if len(varnames) == 0 {
+ return ""
+ }
+ return sprintf(" (depending on %s)", strings.Join(varnames, ", "))
+ }
+
if indentation.IsConditional() {
if other := pkg.unconditionalIncludes[key]; other != nil {
if !pkg.Once.FirstTimeSlice("checkIncludeConditionally", mkline.Location.String(), other.Location.String()) {
@@ -1384,9 +1390,11 @@ func (pkg *Package) checkIncludeConditio
}
mkline.Warnf(
- "%q is included conditionally here (depending on %s) "+
+ "%q is included conditionally here%s "+
"and unconditionally in %s.",
- cleanpath(mkline.IncludedFile()), strings.Join(mkline.ConditionalVars(), ", "), mkline.RefTo(other))
+ cleanpath(mkline.IncludedFile()),
+ dependingOn(mkline.ConditionalVars()),
+ mkline.RefTo(other))
explainPkgOptions(other, mkline)
}
@@ -1399,8 +1407,10 @@ func (pkg *Package) checkIncludeConditio
mkline.Warnf(
"%q is included unconditionally here "+
- "and conditionally in %s (depending on %s).",
- cleanpath(mkline.IncludedFile()), mkline.RefTo(other), strings.Join(other.ConditionalVars(), ", "))
+ "and conditionally in %s%s.",
+ cleanpath(mkline.IncludedFile()),
+ mkline.RefTo(other),
+ dependingOn(other.ConditionalVars()))
explainPkgOptions(mkline, other)
}
@@ -1441,7 +1451,7 @@ func (pkg *Package) File(relativeFileNam
// Example:
// NewPackage("category/package").Rel("other/package") == "../../other/package"
func (pkg *Package) Rel(filename Path) Path {
- return relpath(pkg.dir, filename)
+ return G.Pkgsrc.Relpath(pkg.dir, filename)
}
// Returns whether the given file (relative to the package directory)
Index: pkgsrc/pkgtools/pkglint/files/package_test.go
diff -u pkgsrc/pkgtools/pkglint/files/package_test.go:1.59 pkgsrc/pkgtools/pkglint/files/package_test.go:1.60
--- pkgsrc/pkgtools/pkglint/files/package_test.go:1.59 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/package_test.go Wed Nov 27 22:10:07 2019
@@ -116,7 +116,7 @@ func (s *Suite) Test_Package__relative_i
// TODO: Since other.mk is referenced via "../../category/package",
// it would be nice if this relative path would be reflected in the output
// instead of referring just to "other.mk".
- // This needs to be fixed somewhere near relpath.
+ // This needs to be fixed somewhere near Relpath.
//
// The notes are in reverse order because they are produced when checking
// other.mk, and there the relative order is correct (line 2 before line 3).
@@ -1063,7 +1063,7 @@ func (s *Suite) Test_Package_resolveIncl
func (s *Suite) Test_Package_shouldDiveInto(c *check.C) {
t := s.Init(c)
- t.Chdir(".")
+ t.Chdir("category/package")
test := func(including, included Path, expected bool) {
actual := (*Package)(nil).shouldDiveInto(including, included)
@@ -2432,6 +2432,15 @@ func (s *Suite) Test_Package_pkgnameFrom
"WARN: ~/category/package/Makefile:4: Invalid variable modifier \"c,d\" for \"DISTNAME\".")
test("${DISTFILE:C,\\..*,,}", "aspell-af-0.50-0", "")
+
+ // Parse error because of missing closing brace, parsing succeeds.
+ test("${DISTNAME:M", "package-1.0", "",
+ "WARN: ~/category/package/Makefile:4: "+
+ "Missing closing \"}\" for \"DISTNAME\".")
+
+ // Parse error with an unparseable rest.
+ test("$", "package-1.0", "",
+ nil...)
}
func (s *Suite) Test_Package_checkPossibleDowngrade(c *check.C) {
@@ -2989,6 +2998,45 @@ func (s *Suite) Test_Package_checkInclud
"(depending on OPSYS) and unconditionally in buildlink3.mk:12.")
}
+func (s *Suite) Test_Package_checkIncludeConditionally__conditionally_no_variable(c *check.C) {
+ t := s.Init(c)
+
+ t.SetUpOption("zlib", "")
+ t.SetUpPackage("category/package",
+ ".include \"../../devel/zlib/buildlink3.mk\"",
+ ".if exists(/usr/include)",
+ ".include \"../../sysutils/coreutils/buildlink3.mk\"",
+ ".endif")
+ t.CreateFileLines("mk/bsd.options.mk", "")
+ t.CreateFileLines("devel/zlib/buildlink3.mk", "")
+ t.CreateFileLines("sysutils/coreutils/buildlink3.mk", "")
+
+ t.CreateFileLines("category/package/options.mk",
+ MkCvsID,
+ "",
+ "PKG_OPTIONS_VAR=\tPKG_OPTIONS.package",
+ "PKG_SUPPORTED_OPTIONS=\t# none",
+ "",
+ ".include \"../../mk/bsd.options.mk\"",
+ "",
+ ".if exists(/usr/include)",
+ ". include \"../../devel/zlib/buildlink3.mk\"",
+ ".endif",
+ ".include \"../../sysutils/coreutils/buildlink3.mk\"")
+ t.Chdir("category/package")
+ t.FinishSetUp()
+
+ G.checkdirPackage(".")
+
+ t.CheckOutputLines(
+ "WARN: Makefile:20: \"../../devel/zlib/buildlink3.mk\" "+
+ "is included unconditionally here "+
+ "and conditionally in options.mk:9.",
+ "WARN: Makefile:22: \"../../sysutils/coreutils/buildlink3.mk\" "+
+ "is included conditionally here "+
+ "and unconditionally in options.mk:11.")
+}
+
func (s *Suite) Test_Package_checkIncludeConditionally__explain_PKG_OPTIONS_in_options_mk(c *check.C) {
t := s.Init(c)
Index: pkgsrc/pkgtools/pkglint/files/util.go
diff -u pkgsrc/pkgtools/pkglint/files/util.go:1.59 pkgsrc/pkgtools/pkglint/files/util.go:1.60
--- pkgsrc/pkgtools/pkglint/files/util.go:1.59 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/util.go Wed Nov 27 22:10:07 2019
@@ -239,7 +239,7 @@ func assertf(cond bool, format string, a
}
func isEmptyDir(filename Path) bool {
- if filename.HasSuffixText("/CVS") {
+ if filename.HasSuffixPath("CVS") {
return true
}
@@ -457,6 +457,7 @@ func mkopSubst(s string, left bool, from
})
}
+// FIXME: Replace with Path.JoinNoClean
func joinPath(a, b Path, others ...Path) Path {
if len(others) == 0 {
return a + "/" + b
@@ -468,69 +469,6 @@ func joinPath(a, b Path, others ...Path)
return NewPath(strings.Join(parts, "/"))
}
-// relpath returns the relative path from the directory "from"
-// to the filesystem entry "to".
-//
-// The relative path is built by going from the "from" directory via the
-// pkgsrc root to the "to" filename. This produces the form
-// "../../category/package" that is found in DEPENDS and .include lines.
-//
-// Both from and to are interpreted relative to the current working directory,
-// unless they are absolute paths.
-//
-// This function should only be used if the relative path from one file to
-// another cannot be computed in another way. The preferred way is to take
-// the relative filenames directly from the .include or exists() where they
-// appear.
-//
-// TODO: Invent data types for all kinds of relative paths that occur in pkgsrc
-// and pkglint. Make sure that these paths cannot be accidentally mixed.
-func relpath(from, to Path) (result Path) {
-
- if trace.Tracing {
- defer trace.Call(from, to, trace.Result(&result))()
- }
-
- cfrom := cleanpath(from)
- cto := cleanpath(to)
-
- if cfrom == cto {
- return "."
- }
-
- // Take a shortcut for the common case from "dir" to "dir/subdir/...".
- if cto.HasPrefixPath(cfrom) {
- return cleanpath(cto[len(cfrom)+1:])
- }
-
- // Take a shortcut for the common case from "category/package" to ".".
- // This is the most common variant in a complete pkgsrc scan.
- if cto == "." {
- fromParts := cfrom.Parts()
- if len(fromParts) == 2 && !hasPrefix(fromParts[0], ".") && !hasPrefix(fromParts[1], ".") {
- return "../.."
- }
- }
-
- if cfrom == "." && !cto.IsAbs() {
- return cto.Clean()
- }
-
- absFrom := abspath(cfrom)
- absTopdir := abspath(G.Pkgsrc.topdir)
- absTo := abspath(cto)
-
- toTop := absFrom.Rel(absTopdir)
- fromTop := absTopdir.Rel(absTo)
-
- result = cleanpath(toTop.JoinNoClean(fromTop))
-
- if trace.Tracing {
- trace.Stepf("relpath from %q to %q = %q", cfrom, cto, result)
- }
- return
-}
-
func abspath(filename Path) Path {
abs := filename
if !filename.IsAbs() {
Index: pkgsrc/pkgtools/pkglint/files/path.go
diff -u pkgsrc/pkgtools/pkglint/files/path.go:1.1 pkgsrc/pkgtools/pkglint/files/path.go:1.2
--- pkgsrc/pkgtools/pkglint/files/path.go:1.1 Sat Nov 23 23:36:20 2019
+++ pkgsrc/pkgtools/pkglint/files/path.go Wed Nov 27 22:10:07 2019
@@ -13,6 +13,12 @@ import (
// Some paths may contain placeholders like @VAR@ or ${VAR}.
// The base directory of relative paths depends on the context
// in which the path is used.
+//
+// TODO: Consider adding several more specialized types of path:
+// TODO: CurrPath, relative to the current working directory
+// TODO: PkgsrcPath, relative to the pkgsrc root
+// TODO: PackagePath, relative to the package directory
+// TODO: RelativePath, relative to some other basedir
type Path string
func NewPath(name string) Path { return Path(name) }
@@ -32,10 +38,33 @@ func (p Path) Split() (dir Path, base st
return Path(strDir), strBase
}
+// Parts splits the path into its components.
+// Multiple adjacent slashes are treated like a single slash.
+// Parts that are single dots are skipped.
+// Absolute paths have an empty string as its first part.
+// All other parts are nonempty.
func (p Path) Parts() []string {
- return strings.FieldsFunc(string(p), func(r rune) bool { return r == '/' })
+ if p == "" {
+ return nil
+ }
+
+ parts := strings.Split(string(p), "/")
+ j := 0
+ for i, part := range parts {
+ if (i == 0 || part != "") && part != "." {
+ parts[j] = part
+ j++
+ }
+ }
+ parts = parts[:j]
+ if len(parts) == 0 {
+ return []string{"."}
+ }
+ return parts
}
+// Count returns the number of meaningful parts of the path.
+// See Parts.
func (p Path) Count() int { return len(p.Parts()) }
func (p Path) HasPrefixText(prefix string) bool {
@@ -45,8 +74,26 @@ func (p Path) HasPrefixText(prefix strin
// HasPrefixPath returns whether the path starts with the given prefix.
// The basic unit of comparison is a path component, not a character.
func (p Path) HasPrefixPath(prefix Path) bool {
- return hasPrefix(string(p), string(prefix)) &&
- (len(p) == len(prefix) || p[len(prefix)] == '/')
+ // Handle the simple case first, without any memory allocations.
+ if hasPrefix(string(p), string(prefix)) {
+ return len(p) == len(prefix) || p[len(prefix)] == '/'
+ }
+
+ if prefix == "." {
+ return !p.IsAbs()
+ }
+
+ parts := p.Parts()
+ prefixParts := prefix.Parts()
+ if len(prefixParts) > len(parts) {
+ return false
+ }
+ for i, prefixPart := range prefixParts {
+ if parts[i] != prefixPart {
+ return false
+ }
+ }
+ return true
}
// TODO: Check each call whether ContainsPath is more appropriate; add tests
@@ -75,7 +122,6 @@ func (p Path) ContainsPathCanonical(sub
return cleaned.ContainsPath(sub)
}
-// TODO: Check each call whether HasSuffixPath is more appropriate; add tests
func (p Path) HasSuffixText(suffix string) bool {
return hasSuffix(string(p), suffix)
}
@@ -107,15 +153,35 @@ func (p Path) JoinNoClean(s Path) Path {
func (p Path) Clean() Path { return NewPath(path.Clean(string(p))) }
+// CleanDot returns the path with single dots removed and double slashes
+// collapsed.
+func (p Path) CleanDot() Path {
+ if !p.ContainsText(".") {
+ return p
+ }
+
+ var parts []string
+ for i, part := range p.Parts() {
+ if !(part == "." || i > 0 && part == "") { // See Parts
+ parts = append(parts, part)
+ }
+ }
+ if len(parts) == 0 {
+ return "."
+ }
+ return NewPath(strings.Join(parts, "/"))
+}
+
func (p Path) IsAbs() bool {
return p.HasPrefixText("/") || filepath.IsAbs(filepath.FromSlash(string(p)))
}
+// Rel returns the relative path from this path to the other.
func (p Path) Rel(other Path) Path {
fp := filepath.FromSlash(p.String())
fpOther := filepath.FromSlash(other.String())
rel, err := filepath.Rel(fp, fpOther)
- assertNil(err, "relpath from %q to %q", p, other)
+ assertNil(err, "Relpath from %q to %q", p, other)
return NewPath(filepath.ToSlash(rel))
}
Index: pkgsrc/pkgtools/pkglint/files/path_test.go
diff -u pkgsrc/pkgtools/pkglint/files/path_test.go:1.1 pkgsrc/pkgtools/pkglint/files/path_test.go:1.2
--- pkgsrc/pkgtools/pkglint/files/path_test.go:1.1 Sat Nov 23 23:36:20 2019
+++ pkgsrc/pkgtools/pkglint/files/path_test.go Wed Nov 27 22:10:07 2019
@@ -104,11 +104,32 @@ func (s *Suite) Test_Path_Parts(c *check
t.CheckDeepEquals(NewPath(p).Parts(), parts)
}
- test("", []string{}...)
- test("././././", ".", ".", ".", ".") // No trailing ""
- test("/root", "root") // No leading ""
- test("filename", "filename")
- test("dir/filename", "dir", "filename")
+ // Only the empty path returns an empty slice.
+ test("", nil...)
+
+ // The standard cases for relative paths.
+ test("relative", "relative")
+ test("relative/subdir", "relative", "subdir")
+ test("relative////subdir", "relative", "subdir")
+ test("relative/..", "relative", "..")
+ test("relative/.", "relative")
+
+ // Leading dots are removed when they are followed by something.
+ test("./relative", "relative")
+
+ // A path consisting of only dots produces a single dot.
+ test("./././.", ".")
+
+ // Slashes at the end are treated like a single dot.
+ test("././././", ".")
+ test(".///////", ".")
+
+ // Absolute paths have an empty first component.
+ test("/", "")
+ test("/.", "")
+ test("/root", "", "root")
+
+ // The backslash is not a path separator.
test("dir/filename\\with\\backslash", "dir", "filename\\with\\backslash")
}
@@ -119,12 +140,36 @@ func (s *Suite) Test_Path_Count(c *check
t.CheckEquals(NewPath(p).Count(), count)
}
- test("", 0) // FIXME
- test("././././", 4)
- test("/root", 1) // FIXME
+ test("././././", 1)
+ test("/root", 2)
test("filename", 1)
test("dir/filename", 2)
test("dir/filename\\with\\backslash", 2)
+
+ // Only the empty path returns an empty slice.
+ test("", 0)
+
+ // The standard cases for canonical relative paths.
+ test("relative", 1)
+ test("relative/subdir", 2)
+ test("relative////subdir", 2)
+ test("relative/..", 2)
+ test("relative/.", 1)
+
+ // A path consisting of only dots produces a single dot.
+ test("./././.", 1)
+
+ // Slashes at the end are treated like a single dot.
+ test("././././", 1)
+ test(".///////", 1)
+
+ // Absolute paths have an empty first component.
+ test("/", 1)
+ test("/.", 1)
+ test("/root", 2)
+
+ // The backslash is not a path separator.
+ test("dir/filename\\with\\backslash", 2)
}
func (s *Suite) Test_Path_HasPrefixText(c *check.C) {
@@ -155,9 +200,21 @@ func (s *Suite) Test_Path_HasPrefixPath(
test("", "x", false)
test("/root", "/r", false)
test("/root", "/root", true)
- test("/root", "/root/", false)
+
+ // Even though the textual representation of the prefix is longer than
+ // the path. The trailing slash marks the path as a directory, and
+ // there are only a few cases where the difference matters, such as
+ // in rsync and mkdir.
+ test("/root", "/root/", true)
+
test("/root/", "/root", true)
+ test("/root/", "root", false)
test("/root/subdir", "/root", true)
+ test("filename", ".", true)
+ test("filename", "./filename", true)
+ test("filename", "./file", false)
+ test("filename", "./filename/sub", false)
+ test("/anything", ".", false)
}
func (s *Suite) Test_Path_ContainsText(c *check.C) {
@@ -343,6 +400,23 @@ func (s *Suite) Test_Path_Clean(c *check
test("a/bb///../c", "a/c")
}
+func (s *Suite) Test_Path_CleanDot(c *check.C) {
+ t := s.Init(c)
+
+ test := func(p, result Path) {
+ t.CheckEquals(p.CleanDot(), result)
+ }
+
+ test("", "")
+ test(".", ".")
+ test("./././", ".")
+ test("a/bb///../c", "a/bb/../c")
+ test("./filename", "filename")
+ test("/absolute", "/absolute")
+ test("/usr/pkgsrc/wip/package", "/usr/pkgsrc/wip/package")
+ test("/usr/pkgsrc/wip/package/../mk/git-package.mk", "/usr/pkgsrc/wip/package/../mk/git-package.mk")
+}
+
func (s *Suite) Test_Path_IsAbs(c *check.C) {
t := s.Init(c)
Index: pkgsrc/pkgtools/pkglint/files/pkglint.1
diff -u pkgsrc/pkgtools/pkglint/files/pkglint.1:1.56 pkgsrc/pkgtools/pkglint/files/pkglint.1:1.57
--- pkgsrc/pkgtools/pkglint/files/pkglint.1:1.56 Sun Jun 30 20:56:19 2019
+++ pkgsrc/pkgtools/pkglint/files/pkglint.1 Wed Nov 27 22:10:07 2019
@@ -1,4 +1,4 @@
-.\" $NetBSD: pkglint.1,v 1.56 2019/06/30 20:56:19 rillig Exp $
+.\" $NetBSD: pkglint.1,v 1.57 2019/11/27 22:10:07 rillig Exp $
.\" From FreeBSD: portlint.1,v 1.8 1997/11/25 14:53:14 itojun Exp
.\"
.\" Copyright (c) 1997 by Jun-ichiro Itoh <itojun%itojun.org@localhost>.
@@ -8,7 +8,7 @@
.\" Thomas Klausner <wiz%NetBSD.org@localhost>, 2012.
.\" Roland Illig <rillig%NetBSD.org@localhost>, 2015-2019.
.\"
-.Dd June 17, 2019
+.Dd November 27, 2019
.Dt PKGLINT 1
.Os
.Sh NAME
Index: pkgsrc/pkgtools/pkglint/files/pkglint.go
diff -u pkgsrc/pkgtools/pkglint/files/pkglint.go:1.65 pkgsrc/pkgtools/pkglint/files/pkglint.go:1.66
--- pkgsrc/pkgtools/pkglint/files/pkglint.go:1.65 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/pkglint.go Wed Nov 27 22:10:07 2019
@@ -43,7 +43,7 @@ type Pkglint struct {
interner StringInterner
// cwd is the absolute path to the current working
- // directory. It is used for speeding up relpath and abspath.
+ // directory. It is used for speeding up Relpath and abspath.
// There is no other use for it.
cwd Path
@@ -106,7 +106,7 @@ type pkglintFatal struct{}
// G is the abbreviation for "global state";
// this and the tracer are the only global variables in this Go package.
var (
- G = NewPkglint(nil, nil)
+ G = NewPkglint(os.Stdout, os.Stderr)
trace tracePkg.Tracer
)
@@ -179,7 +179,8 @@ func (pkglint *Pkglint) setUpProfiling()
f, err := os.Create("pkglint.pprof")
if err != nil {
- dummyLine.Fatalf("Cannot create profiling file: %s", err)
+ pkglint.Logger.TechErrorf("pkglint.pprof", "Cannot create profiling file: %s", err)
+ panic(pkglintFatal{})
}
atExit(func() { assertNil(f.Close(), "") })
@@ -217,11 +218,11 @@ func (pkglint *Pkglint) prepareMainLoop(
// pkglint doesn't know where to load the infrastructure files from,
// and these are needed for virtually every single check.
// Therefore, the only sensible thing to do is to quit immediately.
- dummyLine.Fatalf("%q must be inside a pkgsrc tree.", firstDir)
+ NewLineWhole(firstDir).Fatalf("Must be inside a pkgsrc tree.")
}
pkglint.Pkgsrc = NewPkgsrc(joinPath(firstDir, relTopdir))
- pkglint.Wip = pkglint.Pkgsrc.ToRel(firstDir).HasPrefixPath("wip") // Same as in Pkglint.Check.
+ pkglint.Wip = pkglint.Pkgsrc.IsWip(firstDir) // See Pkglint.checkMode.
pkglint.Pkgsrc.LoadInfrastructure()
currentUser, err := user.Current()
@@ -244,7 +245,6 @@ func (pkglint *Pkglint) ParseCommandLine
opts.AddFlagVar('h', "help", &gopts.ShowHelp, false, "show a detailed usage message")
opts.AddFlagVar('I', "dumpmakefile", &gopts.DumpMakefile, false, "dump the Makefile after parsing")
opts.AddFlagVar('i', "import", &gopts.Import, false, "prepare the import of a wip package")
- opts.AddFlagVar('m', "log-verbose", &lopts.LogVerbose, false, "allow the same diagnostic more than once")
opts.AddStrList('o', "only", &gopts.LogOnly, "only log diagnostics containing the given text")
opts.AddFlagVar('p', "profiling", &gopts.Profiling, false, "profile the executing program")
opts.AddFlagVar('q', "quiet", &lopts.Quiet, false, "don't show a summary line when finishing")
@@ -338,9 +338,8 @@ func (pkglint *Pkglint) checkMode(dirent
}
if isReg {
- depth := pkgsrcRel.Count() - 1 // FIXME
pkglint.checkExecutable(dirent, mode)
- pkglint.checkReg(dirent, basename, depth)
+ pkglint.checkReg(dirent, basename, pkgsrcRel.Count())
return
}
@@ -448,7 +447,8 @@ func CheckLinesDescr(lines *Lines) {
ck.CheckValidCharacters()
if containsVarRef(line.Text) {
- for _, token := range NewMkParser(nil, line.Text).MkTokens() {
+ tokens, _ := NewMkLexer(line.Text, nil).MkTokens()
+ for _, token := range tokens {
if token.Varuse != nil && G.Pkgsrc.VariableType(nil, token.Varuse.varname) != nil {
line.Notef("Variables are not expanded in the DESCR file.")
}
@@ -546,9 +546,13 @@ func CheckFileMk(filename Path) {
mklines.SaveAutofixChanges()
}
+// checkReg checks the given regular file.
+// depth is 3 for files in the package directory, and 4 or more for files
+// deeper in the directory hierarchy, such as in files/ or patches/.
func (pkglint *Pkglint) checkReg(filename Path, basename string, depth int) {
- if depth == 2 && !pkglint.Wip {
+ if depth == 3 && !pkglint.Wip {
+ // FIXME: There's no good reason for prohibiting a README file.
if contains(basename, "README") || contains(basename, "TODO") {
NewLineWhole(filename).Errorf("Packages in main pkgsrc must not have a %s file.", basename)
// TODO: Add a convincing explanation.
@@ -560,8 +564,8 @@ func (pkglint *Pkglint) checkReg(filenam
case hasSuffix(basename, "~"),
hasSuffix(basename, ".orig"),
hasSuffix(basename, ".rej"),
- contains(basename, "README") && depth == 2,
- contains(basename, "TODO") && depth == 2:
+ contains(basename, "README") && depth == 3,
+ contains(basename, "TODO") && depth == 3:
if pkglint.Opts.Import {
NewLineWhole(filename).Errorf("Must be cleaned up before committing the package.")
}
@@ -605,7 +609,7 @@ func (pkglint *Pkglint) checkReg(filenam
CheckLinesPatch(lines)
}
- case matches(filename.String(), `(?:^|/)patches/manual[^/]*$`):
+ case filename.Dir().Base() == "patches" && matches(filename.Base(), `^manual[^/]*$`):
if trace.Tracing {
trace.Stepf("Unchecked file %q.", filename)
}
@@ -677,7 +681,7 @@ func (pkglint *Pkglint) checkExecutable(
fix.Describef(0, "Clearing executable bits")
if autofix {
if err := filename.Chmod(mode &^ 0111); err != nil {
- G.Logger.Errorf(cleanpath(filename), "Cannot clear executable bits: %s", err)
+ G.Logger.TechErrorf(cleanpath(filename), "Cannot clear executable bits: %s", err)
}
}
})
Index: pkgsrc/pkgtools/pkglint/files/pkglint_test.go
diff -u pkgsrc/pkgtools/pkglint/files/pkglint_test.go:1.50 pkgsrc/pkgtools/pkglint/files/pkglint_test.go:1.51
--- pkgsrc/pkgtools/pkglint/files/pkglint_test.go:1.50 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/pkglint_test.go Wed Nov 27 22:10:07 2019
@@ -58,7 +58,6 @@ func (s *Suite) Test_Pkglint_Main__help(
" -h, --help show a detailed usage message",
" -I, --dumpmakefile dump the Makefile after parsing",
" -i, --import prepare the import of a wip package",
- " -m, --log-verbose allow the same diagnostic more than once",
" -o, --only only log diagnostics containing the given text",
" -p, --profiling profile the executing program",
" -q, --quiet don't show a summary line when finishing",
@@ -102,7 +101,7 @@ func (s *Suite) Test_Pkglint_Main__no_ar
// The "." from the error message is the implicit argument added in Pkglint.Main.
t.CheckEquals(exitcode, 1)
t.CheckOutputLines(
- "FATAL: \".\" must be inside a pkgsrc tree.")
+ "FATAL: .: Must be inside a pkgsrc tree.")
}
func (s *Suite) Test_Pkglint_Main__unknown_option(c *check.C) {
@@ -331,7 +330,19 @@ func (s *Suite) Test_Pkglint_Main__profi
t.CheckEquals(exitcode, 1)
t.CheckOutputMatches(
- `FATAL: Cannot create profiling file: open pkglint\.pprof: .*`)
+ `ERROR: pkglint\.pprof: Cannot create profiling file: open pkglint\.pprof: .*`)
+}
+
+// Branch coverage for Logger.Logf, the level != Fatal case.
+func (s *Suite) Test_Pkglint_prepareMainLoop__fatal(c *check.C) {
+ t := s.Init(c)
+
+ t.Chdir(".")
+ t.Main("--profiling", t.File("does-not-exist").String())
+
+ t.CheckOutputLines(
+ "fileCache: 0 hits, 0 misses",
+ "FATAL: does-not-exist: Must be inside a pkgsrc tree.")
}
func (s *Suite) Test_Pkglint_ParseCommandLine__only(c *check.C) {
@@ -874,12 +885,12 @@ func (s *Suite) Test_Pkglint_checkReg__f
t.Chdir(".")
- G.checkReg("buildlink3.mk", "buildlink3.mk", 2)
- G.checkReg("DESCR", "DESCR", 2)
- G.checkReg("distinfo", "distinfo", 2)
- G.checkReg("MESSAGE", "MESSAGE", 2)
- G.checkReg("patches/patch-aa", "patch-aa", 2)
- G.checkReg("PLIST", "PLIST", 2)
+ G.checkReg("buildlink3.mk", "buildlink3.mk", 3)
+ G.checkReg("DESCR", "DESCR", 3)
+ G.checkReg("distinfo", "distinfo", 3)
+ G.checkReg("MESSAGE", "MESSAGE", 3)
+ G.checkReg("patches/patch-aa", "patch-aa", 3)
+ G.checkReg("PLIST", "PLIST", 3)
t.CheckOutputLines(
"ERROR: buildlink3.mk: Cannot be read.",
@@ -897,7 +908,7 @@ func (s *Suite) Test_Pkglint_checkReg__n
t.Chdir(".")
t.DisableTracing()
- G.checkReg("patches/manual-aa", "manual-aa", 2)
+ G.checkReg("patches/manual-aa", "manual-aa", 4)
t.CheckOutputEmpty()
}
@@ -1001,7 +1012,7 @@ func (s *Suite) Test_Pkglint_checkReg__u
t.CreateFileDummyPatch("category/Makefile/patches/index")
- G.checkReg(t.File("category/Makefile/patches/index"), "index", 3)
+ G.checkReg(t.File("category/Makefile/patches/index"), "index", 4)
t.CheckOutputLines(
"WARN: ~/category/Makefile/patches/index: " +
@@ -1014,7 +1025,7 @@ func (s *Suite) Test_Pkglint_checkReg__p
t.CreateFileDummyPatch("category/package/patches/patch-compiler.mk")
t.Chdir("category/package")
- G.checkReg(t.File("patches/patch-compiler.mk"), "patch-compiler.mk", 3)
+ G.checkReg(t.File("patches/patch-compiler.mk"), "patch-compiler.mk", 4)
t.CheckOutputEmpty()
}
@@ -1024,7 +1035,7 @@ func (s *Suite) Test_Pkglint_checkReg__f
t.CreateFileLines("category/package/files/index")
- G.checkReg(t.File("category/package/files/index"), "index", 3)
+ G.checkReg(t.File("category/package/files/index"), "index", 4)
// These files are ignored since they could contain anything.
t.CheckOutputEmpty()
@@ -1036,8 +1047,8 @@ func (s *Suite) Test_Pkglint_checkReg__s
t.CreateFileLines("category/package/spec")
t.CreateFileLines("regress/package/spec")
- G.checkReg(t.File("category/package/spec"), "spec", 2)
- G.checkReg(t.File("regress/package/spec"), "spec", 2)
+ G.checkReg(t.File("category/package/spec"), "spec", 3)
+ G.checkReg(t.File("regress/package/spec"), "spec", 3)
t.CheckOutputLines(
"WARN: ~/category/package/spec: Only packages in regress/ may have spec files.")
@@ -1047,7 +1058,7 @@ func (s *Suite) Test_Pkglint_checkExecut
t := s.Init(c)
filename := t.CreateFileLines("file.mk")
- err := os.Chmod(filename.String(), 0555)
+ err := filename.Chmod(0555)
assertNil(err, "")
G.checkExecutable(filename, 0555)
Index: pkgsrc/pkgtools/pkglint/files/pkgsrc.go
diff -u pkgsrc/pkgtools/pkglint/files/pkgsrc.go:1.42 pkgsrc/pkgtools/pkglint/files/pkgsrc.go:1.43
--- pkgsrc/pkgtools/pkglint/files/pkgsrc.go:1.42 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/pkgsrc.go Wed Nov 27 22:10:07 2019
@@ -813,7 +813,7 @@ func (src *Pkgsrc) ListVersions(category
}
if len(names) == 0 {
if errorIfEmpty {
- dummyLine.Errorf("Cannot find package versions of %q in %q.", re, src.File(category))
+ NewLineWhole(src.File(category)).Errorf("Cannot find package versions of %q.", re)
}
src.listVersions[cacheKey] = nil
return nil
@@ -1035,13 +1035,92 @@ func (src *Pkgsrc) Load(filename Path, o
return Load(src.File(filename), options)
}
+// Relpath returns the canonical relative path from the directory "from"
+// to the filesystem entry "to".
+//
+// The relative path is built by going from the "from" directory up to the
+// pkgsrc root and from there to the "to" filename. This produces the form
+// "../../category/package" that is found in DEPENDS and .include lines.
+//
+// Both from and to are interpreted relative to the current working directory,
+// unless they are absolute paths.
+//
+// This function should only be used if the relative path from one file to
+// another cannot be computed in another way. The preferred way is to take
+// the relative filenames directly from the .include or exists() where they
+// appear.
+//
+// TODO: Invent data types for all kinds of relative paths that occur in pkgsrc
+// and pkglint. Make sure that these paths cannot be accidentally mixed.
+func (src *Pkgsrc) Relpath(from, to Path) (result Path) {
+ cfrom := from.Clean()
+ cto := to.Clean()
+
+ if cfrom == cto {
+ return "."
+ }
+
+ // Take a shortcut for the common case from "dir" to "dir/subdir/...".
+ if cto.HasPrefixPath(cfrom) {
+ rel := cfrom.Rel(cto)
+ if !rel.HasPrefixPath("..") {
+ return rel
+ }
+ }
+
+ // Take a shortcut for the common case from "category/package" to ".".
+ // This is the most common variant in a complete pkgsrc scan.
+ if cto == "." {
+ fromParts := cfrom.Parts()
+ if len(fromParts) == 2 && fromParts[0] != ".." && fromParts[1] != ".." {
+ return "../.."
+ }
+ }
+
+ if cfrom == "." && !cto.IsAbs() {
+ return cto.Clean()
+ }
+
+ absFrom := abspath(cfrom)
+ absTopdir := abspath(src.topdir)
+ absTo := abspath(cto)
+
+ up := absFrom.Rel(absTopdir)
+ down := absTopdir.Rel(absTo)
+
+ if absFrom.HasPrefixPath(absTo) || absTo.HasPrefixPath(absFrom) {
+ return absFrom.Rel(absTo)
+ }
+
+ fromParts := absTopdir.Rel(absFrom).Parts()
+ toParts := down.Parts()
+
+ if len(fromParts) >= 2 && len(toParts) >= 2 {
+ if fromParts[0] == toParts[0] && fromParts[1] == toParts[1] {
+ var relParts []string
+ for _ = range fromParts[2:] {
+ relParts = append(relParts, "..")
+ }
+ relParts = append(relParts, toParts[2:]...)
+ return NewPath(strings.Join(relParts, "/")).CleanDot()
+ }
+ }
+
+ result = up.JoinNoClean(down).CleanDot()
+ return
+}
+
// File resolves a filename relative to the pkgsrc top directory.
//
// Example:
// NewPkgsrc("/usr/pkgsrc").File("distfiles") => "/usr/pkgsrc/distfiles"
func (src *Pkgsrc) File(relativeName Path) Path {
+ cleaned := relativeName.Clean()
+ if cleaned == "." {
+ return src.topdir.CleanDot()
+ }
// TODO: Package.File resolves variables, Pkgsrc.File doesn't. They should behave the same.
- return cleanpath(joinPath(src.topdir, relativeName))
+ return src.topdir.JoinNoClean(cleaned).CleanDot()
}
// ToRel returns the path of `filename`, relative to the pkgsrc top directory.
@@ -1049,16 +1128,26 @@ func (src *Pkgsrc) File(relativeName Pat
// Example:
// NewPkgsrc("/usr/pkgsrc").ToRel("/usr/pkgsrc/distfiles") => "distfiles"
func (src *Pkgsrc) ToRel(filename Path) Path {
- return relpath(src.topdir, filename)
+ return src.Relpath(src.topdir, filename)
}
-// IsInfra returns whether the given filename (relative to the pkglint
+// IsInfra returns whether the given filename (relative to the current
// working directory) is part of the pkgsrc infrastructure.
func (src *Pkgsrc) IsInfra(filename Path) bool {
rel := src.ToRel(filename)
return rel.HasPrefixPath("mk") || rel.HasPrefixPath("wip/mk")
}
+func (src *Pkgsrc) IsInfraMain(filename Path) bool {
+ rel := src.ToRel(filename)
+ return rel.HasPrefixPath("mk")
+}
+
+func (src *Pkgsrc) IsWip(filename Path) bool {
+ rel := src.ToRel(filename)
+ return rel.HasPrefixPath("wip")
+}
+
// Change describes a modification to a single package, from the doc/CHANGES-* files.
type Change struct {
Location Location
Index: pkgsrc/pkgtools/pkglint/files/shell_test.go
diff -u pkgsrc/pkgtools/pkglint/files/shell_test.go:1.57 pkgsrc/pkgtools/pkglint/files/shell_test.go:1.58
--- pkgsrc/pkgtools/pkglint/files/shell_test.go:1.57 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/shell_test.go Wed Nov 27 22:10:07 2019
@@ -838,7 +838,7 @@ func (s *Suite) Test_ShellLineChecker_Ch
// foobar="`echo \"foo bar\"`"
text := "foobar=\"`echo \\\"foo bar\\\"`\""
- tokens, rest := splitIntoShellTokens(dummyLine, text)
+ tokens, rest := splitIntoShellTokens(ck.mkline.Line, text)
t.CheckDeepEquals(tokens, []string{text})
t.CheckEquals(rest, "")
@@ -907,13 +907,17 @@ func (s *Suite) Test_ShellLineChecker_Ch
ck.CheckShellCommandLine(text)
t.CheckOutputLines(
+ // TODO: Avoid these duplicate diagnostics.
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"NOTE: Makefile:3: Please use the SUBST framework instead of ${SED} and ${MV}.",
+ "WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
+ "WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
+ "WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
+ "WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"WARN: Makefile:3: f is used but not defined.",
- // TODO: Avoid these duplicate diagnostics.
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.")
@@ -1557,19 +1561,21 @@ func (s *Suite) Test_ShellLineChecker_ch
func (s *Suite) Test_splitIntoShellTokens__line_continuation(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "if true; then \\")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "if true; then \\")
t.CheckDeepEquals(words, []string{"if", "true", ";", "then"})
t.CheckEquals(rest, "\\")
t.CheckOutputLines(
- "WARN: Internal pkglint error in ShTokenizer.ShAtom at \"\\\\\" (quoting=plain).")
+ "WARN: filename.mk:1: Internal pkglint error in ShTokenizer.ShAtom at \"\\\\\" (quoting=plain).")
}
func (s *Suite) Test_splitIntoShellTokens__dollar_slash(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "pax -s /.*~$$//g")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "pax -s /.*~$$//g")
t.CheckDeepEquals(words, []string{"pax", "-s", "/.*~$$//g"})
t.CheckEquals(rest, "")
@@ -1578,7 +1584,8 @@ func (s *Suite) Test_splitIntoShellToken
func (s *Suite) Test_splitIntoShellTokens__dollar_subshell(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "id=$$(${AWK} '{print}' < ${WRKSRC}/idfile) && echo \"$$id\"")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "id=$$(${AWK} '{print}' < ${WRKSRC}/idfile) && echo \"$$id\"")
t.CheckDeepEquals(words, []string{"id=$$(${AWK} '{print}' < ${WRKSRC}/idfile)", "&&", "echo", "\"$$id\""})
t.CheckEquals(rest, "")
@@ -1587,7 +1594,8 @@ func (s *Suite) Test_splitIntoShellToken
func (s *Suite) Test_splitIntoShellTokens__semicolons(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "word1 word2;;;")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "word1 word2;;;")
t.CheckDeepEquals(words, []string{"word1", "word2", ";;", ";"})
t.CheckEquals(rest, "")
@@ -1597,7 +1605,8 @@ func (s *Suite) Test_splitIntoShellToken
t := s.Init(c)
text := "\t${RUN} cd ${WRKSRC}&&(${ECHO} ${PERL5:Q};${ECHO})|${BASH} ./install"
- words, rest := splitIntoShellTokens(dummyLine, text)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, text)
t.CheckDeepEquals(words, []string{
"${RUN}",
@@ -1611,7 +1620,8 @@ func (s *Suite) Test_splitIntoShellToken
t := s.Init(c)
text := "\"\""
- words, rest := splitIntoShellTokens(dummyLine, text)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, text)
t.CheckDeepEquals(words, []string{"\"\""})
t.CheckEquals(rest, "")
@@ -1621,7 +1631,8 @@ func (s *Suite) Test_splitIntoShellToken
t := s.Init(c)
text := "\t\""
- words, rest := splitIntoShellTokens(dummyLine, text)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, text)
c.Check(words, check.IsNil)
t.CheckEquals(rest, "\"")
@@ -1631,7 +1642,8 @@ func (s *Suite) Test_splitIntoShellToken
t := s.Init(c)
text := "echo \"$$\""
- words, rest := splitIntoShellTokens(dummyLine, text)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, text)
t.CheckDeepEquals(words, []string{"echo", "\"$$\""})
t.CheckEquals(rest, "")
@@ -1643,7 +1655,8 @@ func (s *Suite) Test_splitIntoShellToken
t := s.Init(c)
varuseWord := "${GCONF_SCHEMAS:@.s.@${INSTALL_DATA} ${WRKSRC}/src/common/dbus/${.s.} ${DESTDIR}${GCONF_SCHEMAS_DIR}/@}"
- words, rest := splitIntoShellTokens(dummyLine, varuseWord)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, varuseWord)
t.CheckDeepEquals(words, []string{varuseWord})
t.CheckEquals(rest, "")
@@ -1655,7 +1668,8 @@ func (s *Suite) Test_splitIntoShellToken
t := s.Init(c)
code := "echo $$i$$j"
- words, rest := splitIntoShellTokens(dummyLine, code)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, code)
t.CheckDeepEquals(words, []string{"echo", "$$i$$j"})
t.CheckEquals(rest, "")
@@ -1664,7 +1678,8 @@ func (s *Suite) Test_splitIntoShellToken
func (s *Suite) Test_splitIntoShellTokens__varuse_with_embedded_space(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "${VAR:S/ /_/g}")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "${VAR:S/ /_/g}")
t.CheckDeepEquals(words, []string{"${VAR:S/ /_/g}"})
t.CheckEquals(rest, "")
@@ -1673,7 +1688,8 @@ func (s *Suite) Test_splitIntoShellToken
func (s *Suite) Test_splitIntoShellTokens__redirect(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "echo 1>output 2>>append 3>|clobber 4>&5 6<input >>append")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "echo 1>output 2>>append 3>|clobber 4>&5 6<input >>append")
t.CheckDeepEquals(words, []string{
"echo",
@@ -1685,7 +1701,7 @@ func (s *Suite) Test_splitIntoShellToken
">>", "append"})
t.CheckEquals(rest, "")
- words, rest = splitIntoShellTokens(dummyLine, "echo 1> output 2>> append 3>| clobber 4>& 5 6< input >> append")
+ words, rest = splitIntoShellTokens(line, "echo 1> output 2>> append 3>| clobber 4>& 5 6< input >> append")
t.CheckDeepEquals(words, []string{
"echo",
Index: pkgsrc/pkgtools/pkglint/files/shtypes_test.go
diff -u pkgsrc/pkgtools/pkglint/files/shtypes_test.go:1.9 pkgsrc/pkgtools/pkglint/files/shtypes_test.go:1.10
--- pkgsrc/pkgtools/pkglint/files/shtypes_test.go:1.9 Sun Jul 14 21:25:47 2019
+++ pkgsrc/pkgtools/pkglint/files/shtypes_test.go Wed Nov 27 22:10:07 2019
@@ -17,7 +17,8 @@ func (s *Suite) Test_ShAtomType_String(c
func (s *Suite) Test_ShAtom_String(c *check.C) {
t := s.Init(c)
- tokenizer := NewShTokenizer(dummyLine, "${ECHO} \"hello, world\"", false)
+ line := t.NewLine("filename.mk", 1, "")
+ tokenizer := NewShTokenizer(line, "${ECHO} \"hello, world\"", false)
atoms := tokenizer.ShAtoms()
@@ -45,7 +46,8 @@ func (s *Suite) Test_NewShToken__no_atom
func (s *Suite) Test_ShToken_String(c *check.C) {
t := s.Init(c)
- tokenizer := NewShTokenizer(dummyLine, "${ECHO} \"hello, world\"", false)
+ line := t.NewLine("filename.mk", 1, "")
+ tokenizer := NewShTokenizer(line, "${ECHO} \"hello, world\"", false)
t.CheckEquals(tokenizer.ShToken().String(), "ShToken([varuse(\"ECHO\")])")
t.CheckEquals(tokenizer.ShToken().String(), "ShToken([ShAtom(text, \"\\\"\", d) ShAtom(text, \"hello, world\", d) \"\\\"\"])")
Index: pkgsrc/pkgtools/pkglint/files/testnames_test.go
diff -u pkgsrc/pkgtools/pkglint/files/testnames_test.go:1.9 pkgsrc/pkgtools/pkglint/files/testnames_test.go:1.10
--- pkgsrc/pkgtools/pkglint/files/testnames_test.go:1.9 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/testnames_test.go Wed Nov 27 22:10:07 2019
@@ -8,10 +8,11 @@ import (
// Ensures that all test names follow a common naming scheme:
//
// Test_${Type}_${Method}__${description_using_underscores}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Configure("path.go", "*", "*", +intqa.EMissingTest)
ck.Configure("*yacc.go", "*", "*", intqa.ENone)
+ ck.Configure("*", "*", "", -intqa.EMissingTest)
ck.Check()
}
Index: pkgsrc/pkgtools/pkglint/files/substcontext.go
diff -u pkgsrc/pkgtools/pkglint/files/substcontext.go:1.29 pkgsrc/pkgtools/pkglint/files/substcontext.go:1.30
--- pkgsrc/pkgtools/pkglint/files/substcontext.go:1.29 Sat Oct 26 09:51:48 2019
+++ pkgsrc/pkgtools/pkglint/files/substcontext.go Wed Nov 27 22:10:07 2019
@@ -142,6 +142,7 @@ func (ctx *SubstContext) Varassign(mklin
fix.Replace("pre-patch", "post-extract")
fix.Replace("post-patch", "pre-configure")
fix.Apply()
+ // FIXME: Add test that has "SUBST_STAGE.id=pre-patch # or rather post-patch?"
}
if G.Pkg != nil && (value == "pre-configure" || value == "post-configure") {
@@ -270,7 +271,6 @@ func (ctx *SubstContext) suggestSubstVar
if !mkline.HasComment() && len(tokens) == 2 && tokens[0] == "-e" {
fix.Replace(mkline.Text, alignWith(varop, mkline.ValueAlign())+varname)
}
- fix.Anyway()
fix.Apply()
ctx.curr.seenVars = true
@@ -280,7 +280,7 @@ func (ctx *SubstContext) suggestSubstVar
// extractVarname extracts the variable name from a sed command of the form
// s,@VARNAME@,${VARNAME}, and some related variants thereof.
func (*SubstContext) extractVarname(token string) string {
- parser := NewMkParser(nil, token)
+ parser := NewMkLexer(token, nil)
lexer := parser.lexer
if !lexer.SkipByte('s') {
return ""
Index: pkgsrc/pkgtools/pkglint/files/varalignblock.go
diff -u pkgsrc/pkgtools/pkglint/files/varalignblock.go:1.8 pkgsrc/pkgtools/pkglint/files/varalignblock.go:1.9
--- pkgsrc/pkgtools/pkglint/files/varalignblock.go:1.8 Sun Nov 17 01:26:25 2019
+++ pkgsrc/pkgtools/pkglint/files/varalignblock.go Wed Nov 27 22:10:07 2019
@@ -676,7 +676,7 @@ func (VaralignSplitter) parseVarnameOp(p
}
mark := lexer.Mark()
- _ = parser.Varname()
+ _ = parser.mklex.Varname()
lexer.SkipHspace()
ok, _ := parser.Op()
assert(ok)
Index: pkgsrc/pkgtools/pkglint/files/vardefs.go
diff -u pkgsrc/pkgtools/pkglint/files/vardefs.go:1.78 pkgsrc/pkgtools/pkglint/files/vardefs.go:1.79
--- pkgsrc/pkgtools/pkglint/files/vardefs.go:1.78 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/vardefs.go Wed Nov 27 22:10:07 2019
@@ -406,6 +406,12 @@ func (reg *VarTypeRegistry) enumFrom(pkg
return enum(joined)
}
+ if !G.Testing {
+ mklines.Whole().Fatalf(
+ "Must contain at least 1 variable definition for %s.",
+ joinSkipEmptyCambridge("or", varcanons...))
+ }
+
if trace.Tracing {
trace.Stepf("Enum from default value: %s", defval)
}
@@ -421,8 +427,13 @@ func (reg *VarTypeRegistry) enumFrom(pkg
func (reg *VarTypeRegistry) enumFromDirs(pkgsrc *Pkgsrc, category Path, re regex.Pattern, repl string, defval string) *BasicType {
versions := pkgsrc.ListVersions(category, re, repl, false)
if len(versions) == 0 {
+ if !G.Testing {
+ NewLineWhole(category).Fatalf(
+ "Must contain at least 1 subdirectory matching %q.", re)
+ }
return enum(defval)
}
+
return enum(strings.Join(versions, " "))
}
@@ -440,8 +451,13 @@ func (reg *VarTypeRegistry) enumFromFile
}
}
if len(relevant) == 0 {
+ if !G.Testing {
+ NewLineWhole(basedir).Fatalf(
+ "Must contain at least 1 file matching %q.", re)
+ }
return enum(defval)
}
+
return enum(strings.Join(relevant, " "))
}
Index: pkgsrc/pkgtools/pkglint/files/vardefs_test.go
diff -u pkgsrc/pkgtools/pkglint/files/vardefs_test.go:1.24 pkgsrc/pkgtools/pkglint/files/vardefs_test.go:1.25
--- pkgsrc/pkgtools/pkglint/files/vardefs_test.go:1.24 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/vardefs_test.go Wed Nov 27 22:10:07 2019
@@ -94,10 +94,36 @@ func (s *Suite) Test_VarTypeRegistry_enu
t.CheckEquals(nonexistentType.AllowedEnums(), "defval")
}
+func (s *Suite) Test_VarTypeRegistry_enumFrom__no_testing(c *check.C) {
+ t := s.Init(c)
+
+ G.Testing = false
+
+ t.ExpectFatal(
+ t.SetUpVartypes,
+ "FATAL: ~/mk/compiler.mk: Cannot be read.")
+
+ t.CreateFileLines("mk/compiler.mk",
+ MkCvsID)
+
+ t.ExpectFatal(
+ t.SetUpVartypes,
+ "FATAL: ~/mk/compiler.mk: Must contain at least 1 variable "+
+ "definition for _COMPILERS or _PSEUDO_COMPILERS.")
+
+ t.CreateFileLines("mk/compiler.mk",
+ MkCvsID,
+ "_COMPILERS=\tgcc")
+
+ t.ExpectFatal(
+ t.SetUpVartypes,
+ "FATAL: ~/editors/emacs/modules.mk: Cannot be read.")
+}
+
func (s *Suite) Test_VarTypeRegistry_enumFromDirs(c *check.C) {
t := s.Init(c)
- // To make the test useful, these directories must differ from the
+ // To make the test observable, these directories must differ from the
// PYPKGPREFIX default value in vardefs.go.
t.CreateFileLines("lang/python28/Makefile", MkCvsID)
t.CreateFileLines("lang/python33/Makefile", MkCvsID)
@@ -112,6 +138,20 @@ func (s *Suite) Test_VarTypeRegistry_enu
test("PYPKGPREFIX", "enum: py28 py33 (system-provided)")
}
+func (s *Suite) Test_VarTypeRegistry_enumFromDirs__no_testing(c *check.C) {
+ t := s.Init(c)
+
+ G.Testing = false
+
+ t.ExpectFatal(
+ func() {
+ G.Pkgsrc.vartypes.enumFromDirs(
+ &G.Pkgsrc, "category", `^pack.*`, "$0", "default")
+ },
+ "FATAL: category: Must contain at least 1 "+
+ "subdirectory matching \"^pack.*\".")
+}
+
func (s *Suite) Test_VarTypeRegistry_enumFromFiles(c *check.C) {
t := s.Init(c)
@@ -130,6 +170,20 @@ func (s *Suite) Test_VarTypeRegistry_enu
test("OPSYS", "enum: NetBSD SunOS (system-provided)")
}
+func (s *Suite) Test_VarTypeRegistry_enumFromFiles__no_testing(c *check.C) {
+ t := s.Init(c)
+
+ G.Testing = false
+
+ t.ExpectFatal(
+ func() {
+ G.Pkgsrc.vartypes.enumFromFiles(
+ "mk/platform", `^(\w+)\.mk$`, "$1", "default")
+ },
+ "FATAL: mk/platform: Must contain at least 1 "+
+ "file matching \"^(\\\\w+)\\\\.mk$\".")
+}
+
func (s *Suite) Test_VarTypeRegistry_Init(c *check.C) {
t := s.Init(c)
@@ -178,7 +232,8 @@ func (s *Suite) Test_VarTypeRegistry_Ini
G.Testing = false
t.ExpectFatal(
t.FinishSetUp,
- "FATAL: ~/editors/emacs/modules.mk: Cannot be read.")
+ "FATAL: ~/mk/compiler.mk: Must contain at least 1 "+
+ "variable definition for _COMPILERS or _PSEUDO_COMPILERS.")
}
func (s *Suite) Test_VarTypeRegistry_Init__MASTER_SITES(c *check.C) {
Index: pkgsrc/pkgtools/pkglint/files/vartypecheck.go
diff -u pkgsrc/pkgtools/pkglint/files/vartypecheck.go:1.67 pkgsrc/pkgtools/pkglint/files/vartypecheck.go:1.68
--- pkgsrc/pkgtools/pkglint/files/vartypecheck.go:1.67 Sat Nov 23 23:35:56 2019
+++ pkgsrc/pkgtools/pkglint/files/vartypecheck.go Wed Nov 27 22:10:07 2019
@@ -726,8 +726,6 @@ func (cv *VartypeCheck) Homepage() {
"Defining MASTER_SITES=${HOMEPAGE} is ok, though.")
if baseURL != "" {
fix.Replace(wrong, fixedURL)
- } else {
- fix.Anyway()
}
fix.Apply()
}
@@ -1083,9 +1081,10 @@ func (cv *VartypeCheck) Pkgpath() {
cv.MkLine.Errorf("A main pkgsrc package must not depend on a pkgsrc-wip package.")
}
- if !G.Pkgsrc.File(pkgpath.JoinNoClean("Makefile")).IsFile() {
+ pkgdir := G.Pkgsrc.File(pkgpath)
+ if !pkgdir.JoinNoClean("Makefile").IsFile() {
cv.MkLine.Errorf("There is no package in %q.",
- cv.MkLine.PathToFile(G.Pkgsrc.File(pkgpath)))
+ cv.MkLine.PathToFile(pkgdir))
return
}
Index: pkgsrc/pkgtools/pkglint/files/getopt/getopt_test.go
diff -u pkgsrc/pkgtools/pkglint/files/getopt/getopt_test.go:1.12 pkgsrc/pkgtools/pkglint/files/getopt/getopt_test.go:1.13
--- pkgsrc/pkgtools/pkglint/files/getopt/getopt_test.go:1.12 Tue Nov 19 06:51:38 2019
+++ pkgsrc/pkgtools/pkglint/files/getopt/getopt_test.go Wed Nov 27 22:10:07 2019
@@ -409,8 +409,8 @@ func (s *Suite) Test_Options_Help__with_
" (Prefix a flag with \"no-\" to disable it.)\n")
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
Index: pkgsrc/pkgtools/pkglint/files/histogram/histogram_test.go
diff -u pkgsrc/pkgtools/pkglint/files/histogram/histogram_test.go:1.3 pkgsrc/pkgtools/pkglint/files/histogram/histogram_test.go:1.4
--- pkgsrc/pkgtools/pkglint/files/histogram/histogram_test.go:1.3 Tue Nov 19 06:51:38 2019
+++ pkgsrc/pkgtools/pkglint/files/histogram/histogram_test.go Wed Nov 27 22:10:07 2019
@@ -28,8 +28,8 @@ func (s *Suite) Test_Histogram(c *check.
"caption 2 two\n")
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
Index: pkgsrc/pkgtools/pkglint/files/intqa/ideas.go
diff -u pkgsrc/pkgtools/pkglint/files/intqa/ideas.go:1.2 pkgsrc/pkgtools/pkglint/files/intqa/ideas.go:1.3
--- pkgsrc/pkgtools/pkglint/files/intqa/ideas.go:1.2 Sun Nov 17 01:26:26 2019
+++ pkgsrc/pkgtools/pkglint/files/intqa/ideas.go Wed Nov 27 22:10:07 2019
@@ -5,8 +5,5 @@ package intqa
// then the current package, then a package-qualified identifier.
// As if there were a "_ = XYZ" at the beginning of the function.
-// XXX: All methods should be defined in the same file as their receiver type.
-// If that is not possible, there should only be a small list of exceptions.
-
// XXX: If there is a constructor for a type, only that constructor may be used
// for constructing objects. All other forms (var x Type; x := &Type{}) should be forbidden.
Index: pkgsrc/pkgtools/pkglint/files/licenses/licenses_test.go
diff -u pkgsrc/pkgtools/pkglint/files/licenses/licenses_test.go:1.7 pkgsrc/pkgtools/pkglint/files/licenses/licenses_test.go:1.8
--- pkgsrc/pkgtools/pkglint/files/licenses/licenses_test.go:1.7 Tue Nov 19 06:51:38 2019
+++ pkgsrc/pkgtools/pkglint/files/licenses/licenses_test.go Wed Nov 27 22:10:07 2019
@@ -131,8 +131,8 @@ func Test(t *testing.T) {
check.TestingT(t)
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
Index: pkgsrc/pkgtools/pkglint/files/pkgver/vercmp_test.go
diff -u pkgsrc/pkgtools/pkglint/files/pkgver/vercmp_test.go:1.7 pkgsrc/pkgtools/pkglint/files/pkgver/vercmp_test.go:1.8
--- pkgsrc/pkgtools/pkglint/files/pkgver/vercmp_test.go:1.7 Tue Nov 19 06:51:38 2019
+++ pkgsrc/pkgtools/pkglint/files/pkgver/vercmp_test.go Wed Nov 27 22:10:07 2019
@@ -93,8 +93,8 @@ func (s *Suite) Test_newVersion(c *check
&version{[]int{3, 0, 5, 0, 4, 5, 22, 1710}, 0})
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
Index: pkgsrc/pkgtools/pkglint/files/textproc/lexer_test.go
diff -u pkgsrc/pkgtools/pkglint/files/textproc/lexer_test.go:1.8 pkgsrc/pkgtools/pkglint/files/textproc/lexer_test.go:1.9
--- pkgsrc/pkgtools/pkglint/files/textproc/lexer_test.go:1.8 Tue Nov 19 06:51:39 2019
+++ pkgsrc/pkgtools/pkglint/files/textproc/lexer_test.go Wed Nov 27 22:10:08 2019
@@ -413,8 +413,8 @@ func (s *Suite) Test__Alpha(c *check.C)
c.Check(set.Contains('z'), equals, true)
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
Index: pkgsrc/pkgtools/pkglint/files/trace/tracing_test.go
diff -u pkgsrc/pkgtools/pkglint/files/trace/tracing_test.go:1.5 pkgsrc/pkgtools/pkglint/files/trace/tracing_test.go:1.6
--- pkgsrc/pkgtools/pkglint/files/trace/tracing_test.go:1.5 Tue Nov 19 06:51:39 2019
+++ pkgsrc/pkgtools/pkglint/files/trace/tracing_test.go Wed Nov 27 22:10:08 2019
@@ -143,8 +143,8 @@ func (str) String() string {
return "It's a string"
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
Added files:
Index: pkgsrc/pkgtools/pkglint/files/mklexer.go
diff -u /dev/null pkgsrc/pkgtools/pkglint/files/mklexer.go:1.1
--- /dev/null Wed Nov 27 22:10:08 2019
+++ pkgsrc/pkgtools/pkglint/files/mklexer.go Wed Nov 27 22:10:07 2019
@@ -0,0 +1,414 @@
+package pkglint
+
+import (
+ "netbsd.org/pkglint/regex"
+ "netbsd.org/pkglint/textproc"
+ "strings"
+)
+
+// MkLexer splits a text into a sequence of variable uses
+// and plain text.
+type MkLexer struct {
+ lexer *textproc.Lexer
+ line *Line
+}
+
+func NewMkLexer(text string, line *Line) *MkLexer {
+ return &MkLexer{textproc.NewLexer(text), line}
+}
+
+// MkTokens splits a text like in the following example:
+// Text${VAR:Mmodifier}${VAR2}more text${VAR3}
+// into tokens like these:
+// Text
+// ${VAR:Mmodifier}
+// ${VAR2}
+// more text
+// ${VAR3}
+func (p *MkLexer) MkTokens() ([]*MkToken, string) {
+ lexer := p.lexer
+
+ var tokens []*MkToken
+ for !lexer.EOF() {
+ mark := lexer.Mark()
+ if varuse := p.VarUse(); varuse != nil {
+ tokens = append(tokens, &MkToken{Text: lexer.Since(mark), Varuse: varuse})
+ continue
+ }
+
+ for lexer.NextBytesFunc(func(b byte) bool { return b != '$' }) != "" || lexer.SkipString("$$") {
+ }
+ text := lexer.Since(mark)
+ if text != "" {
+ tokens = append(tokens, &MkToken{Text: text})
+ continue
+ }
+
+ break
+ }
+ return tokens, lexer.Rest()
+}
+
+func (p *MkLexer) VarUse() *MkVarUse {
+ rest := p.lexer.Rest()
+ if len(rest) < 2 || rest[0] != '$' {
+ return nil
+ }
+
+ switch rest[1] {
+ case '{', '(':
+ return p.varUseBrace(rest[1] == '(')
+
+ case '$':
+ // This is an escaped dollar character and not a variable use.
+ return nil
+
+ case '@', '<', ' ':
+ // These variable names are known to exist.
+ //
+ // Many others are also possible but not used in practice.
+ // In particular, when parsing the :C or :S modifier,
+ // the $ must not be interpreted as a variable name,
+ // even when it looks like $/ could refer to the "/" variable.
+ //
+ // TODO: Find out whether $" is a variable use when it appears in the :M modifier.
+ p.lexer.Skip(2)
+ return &MkVarUse{rest[1:2], nil}
+
+ default:
+ return p.varUseAlnum()
+ }
+}
+
+// varUseBrace parses:
+// ${VAR}
+// ${arbitrary text:L}
+// ${variable with invalid chars}
+// $(PARENTHESES)
+// ${VAR:Mpattern:C,:,colon,g:Q:Q:Q}
+func (p *MkLexer) varUseBrace(usingRoundParen bool) *MkVarUse {
+ lexer := p.lexer
+
+ beforeDollar := lexer.Mark()
+ lexer.Skip(2)
+
+ closing := byte('}')
+ if usingRoundParen {
+ closing = ')'
+ }
+
+ beforeVarname := lexer.Mark()
+ varname := p.Varname()
+ p.varUseText(closing)
+ varExpr := lexer.Since(beforeVarname)
+
+ modifiers := p.VarUseModifiers(varExpr, closing)
+
+ closed := lexer.SkipByte(closing)
+
+ if p.line != nil {
+ if !closed {
+ p.line.Warnf("Missing closing %q for %q.", string(rune(closing)), varExpr)
+ }
+
+ if usingRoundParen && closed {
+ parenVaruse := lexer.Since(beforeDollar)
+ edit := []byte(parenVaruse)
+ edit[1] = '{'
+ edit[len(edit)-1] = '}'
+ bracesVaruse := string(edit)
+
+ fix := p.line.Autofix()
+ fix.Warnf("Please use curly braces {} instead of round parentheses () for %s.", varExpr)
+ fix.Replace(parenVaruse, bracesVaruse)
+ fix.Apply()
+ }
+
+ if len(varExpr) > len(varname) && !(&MkVarUse{varExpr, modifiers}).IsExpression() {
+ p.line.Warnf("Invalid part %q after variable name %q.", varExpr[len(varname):], varname)
+ }
+ }
+
+ return &MkVarUse{varExpr, modifiers}
+}
+
+func (p *MkLexer) Varname() string {
+ lexer := p.lexer
+
+ // TODO: duplicated code in MatchVarassign
+ mark := lexer.Mark()
+ lexer.SkipByte('.')
+ for lexer.NextBytesSet(VarbaseBytes) != "" || p.VarUse() != nil {
+ }
+ if lexer.SkipByte('.') || hasPrefix(lexer.Since(mark), "SITES_") {
+ for lexer.NextBytesSet(VarparamBytes) != "" || p.VarUse() != nil {
+ }
+ }
+ return lexer.Since(mark)
+}
+
+// varUseText parses any text up to the next colon or closing mark.
+// Nested variable uses are parsed as well.
+//
+// This is used for the :L and :? modifiers since they accept arbitrary
+// text as the "variable name" and effectively interpret it as the variable
+// value instead.
+func (p *MkLexer) varUseText(closing byte) string {
+ lexer := p.lexer
+ start := lexer.Mark()
+ re := regcomp(regex.Pattern(condStr(closing == '}', `^([^$:}]|\$\$)+`, `^([^$:)]|\$\$)+`)))
+ for p.VarUse() != nil || lexer.SkipRegexp(re) {
+ }
+ return lexer.Since(start)
+}
+
+// VarUseModifiers parses the modifiers of a variable being used, such as :Q, :Mpattern.
+//
+// See the bmake manual page.
+func (p *MkLexer) VarUseModifiers(varname string, closing byte) []MkVarUseModifier {
+ lexer := p.lexer
+
+ var modifiers []MkVarUseModifier
+ // The :S and :C modifiers may be chained without using the : as separator.
+ mayOmitColon := false
+
+ for lexer.SkipByte(':') || mayOmitColon {
+ modifier := p.varUseModifier(varname, closing)
+ if modifier != "" {
+ modifiers = append(modifiers, MkVarUseModifier{modifier})
+ }
+ mayOmitColon = modifier != "" && (modifier[0] == 'S' || modifier[0] == 'C')
+ }
+ return modifiers
+}
+
+// varUseModifier parses a single variable modifier such as :Q or :S,from,to,.
+// The actual parsing starts after the leading colon.
+func (p *MkLexer) varUseModifier(varname string, closing byte) string {
+ lexer := p.lexer
+ mark := lexer.Mark()
+
+ switch lexer.PeekByte() {
+ case 'E', 'H', 'L', 'O', 'Q', 'R', 'T', 's', 't', 'u':
+ mod := lexer.NextBytesSet(textproc.Alnum)
+
+ switch mod {
+ case
+ "E", // Extension, e.g. path/file.suffix => suffix
+ "H", // Head, e.g. dir/subdir/file.suffix => dir/subdir
+ "L", // XXX: Shouldn't this be handled specially?
+ "O", // Order alphabetically
+ "Ox", // Shuffle
+ "Q", // Quote shell meta-characters
+ "R", // Strip the file suffix, e.g. path/file.suffix => file
+ "T", // Basename, e.g. path/file.suffix => file.suffix
+ "sh", // Evaluate the variable value as shell command
+ "tA", // Try to convert to absolute path
+ "tW", // Causes the value to be treated as a single word
+ "tl", // To lowercase
+ "tu", // To uppercase
+ "tw", // Causes the value to be treated as list of words
+ "u": // Remove adjacent duplicate words (like uniq(1))
+ return mod
+ }
+
+ if hasPrefix(mod, "ts") {
+ // See devel/bmake/files/var.c:/case 't'
+ sep := mod[2:] + p.varUseText(closing)
+ switch {
+ case sep == "":
+ lexer.SkipString(":")
+ case len(sep) == 1:
+ break
+ case matches(sep, `^\\\d+`):
+ break
+ default:
+ if p.line != nil {
+ p.line.Warnf("Invalid separator %q for :ts modifier of %q.", sep, varname)
+ p.line.Explain(
+ "The separator for the :ts modifier must be either a single character",
+ "or an escape sequence like \\t or \\n or an octal or decimal escape",
+ "sequence; see the bmake man page for further details.")
+ }
+ }
+ return lexer.Since(mark)
+ }
+
+ case '=', 'D', 'M', 'N', 'U':
+ lexer.Skip(1)
+ re := regcomp(regex.Pattern(condStr(closing == '}', `^([^$:\\}]|\$\$|\\.)+`, `^([^$:\\)]|\$\$|\\.)+`)))
+ for p.VarUse() != nil || lexer.SkipRegexp(re) {
+ }
+ arg := lexer.Since(mark)
+ return strings.Replace(arg, "\\:", ":", -1)
+
+ case 'C', 'S':
+ if ok, _, _, _, _ := p.varUseModifierSubst(closing); ok {
+ return lexer.Since(mark)
+ }
+
+ case '@':
+ if p.varUseModifierAt(lexer, varname) {
+ return lexer.Since(mark)
+ }
+
+ case '[':
+ if lexer.SkipRegexp(regcomp(`^\[(?:[-.\d]+|#)\]`)) {
+ return lexer.Since(mark)
+ }
+
+ case '?':
+ lexer.Skip(1)
+ p.varUseText(closing)
+ if lexer.SkipByte(':') {
+ p.varUseText(closing)
+ return lexer.Since(mark)
+ }
+ }
+
+ lexer.Reset(mark)
+
+ re := regcomp(regex.Pattern(condStr(closing == '}', `^([^:$}]|\$\$)+`, `^([^:$)]|\$\$)+`)))
+ for p.VarUse() != nil || lexer.SkipRegexp(re) {
+ }
+ modifier := lexer.Since(mark)
+
+ // ${SOURCES:%.c=%.o} or ${:!uname -a!:[2]}
+ if contains(modifier, "=") || (hasPrefix(modifier, "!") && hasSuffix(modifier, "!")) {
+ return modifier
+ }
+
+ if p.line != nil && modifier != "" {
+ p.line.Warnf("Invalid variable modifier %q for %q.", modifier, varname)
+ }
+
+ return ""
+}
+
+// varUseModifierSubst parses a :S,from,to, or a :C,from,to, modifier.
+func (p *MkLexer) varUseModifierSubst(closing byte) (ok bool, regex bool, from string, to string, options string) {
+ lexer := p.lexer
+ regex = lexer.PeekByte() == 'C'
+ lexer.Skip(1 /* the initial S or C */)
+
+ sep := lexer.PeekByte() // bmake allows _any_ separator, even letters.
+ if sep == -1 || byte(sep) == closing {
+ return
+ }
+
+ lexer.Skip(1)
+ separator := byte(sep)
+
+ unescape := func(s string) string {
+ return strings.Replace(s, "\\"+string(separator), string(separator), -1)
+ }
+
+ isOther := func(b byte) bool {
+ return b != separator && b != '$' && b != '\\'
+ }
+
+ skipOther := func() {
+ for {
+ switch {
+
+ case p.VarUse() != nil:
+ break
+
+ case lexer.SkipString("$$"):
+ break
+
+ case len(lexer.Rest()) >= 2 && lexer.PeekByte() == '\\' && separator != '\\':
+ _ = lexer.Skip(2)
+
+ case lexer.NextBytesFunc(isOther) != "":
+ break
+
+ default:
+ return
+ }
+ }
+ }
+
+ fromStart := lexer.Mark()
+ lexer.SkipByte('^')
+ skipOther()
+ lexer.SkipByte('$')
+ from = unescape(lexer.Since(fromStart))
+
+ if !lexer.SkipByte(separator) {
+ return
+ }
+
+ toStart := lexer.Mark()
+ skipOther()
+ to = unescape(lexer.Since(toStart))
+
+ if !lexer.SkipByte(separator) {
+ return
+ }
+
+ optionsStart := lexer.Mark()
+ lexer.NextBytesFunc(func(b byte) bool { return b == '1' || b == 'g' || b == 'W' })
+ options = lexer.Since(optionsStart)
+
+ ok = true
+ return
+}
+
+// varUseModifierAt parses a variable modifier like ":@v@echo ${v};@",
+// which expands the variable value in a loop.
+func (p *MkLexer) varUseModifierAt(lexer *textproc.Lexer, varname string) bool {
+ lexer.Skip(1 /* the initial @ */)
+
+ loopVar := lexer.NextBytesSet(AlnumDot)
+ if loopVar == "" || !lexer.SkipByte('@') {
+ return false
+ }
+
+ re := regcomp(`^([^$@\\]|\\.)+`)
+ for p.VarUse() != nil || lexer.SkipString("$$") || lexer.SkipRegexp(re) {
+ }
+
+ if !lexer.SkipByte('@') && p.line != nil {
+ p.line.Warnf("Modifier ${%s:@%s@...@} is missing the final \"@\".", varname, loopVar)
+ }
+
+ return true
+}
+
+func (p *MkLexer) varUseAlnum() *MkVarUse {
+ lexer := p.lexer
+
+ apparentVarname := textproc.NewLexer(lexer.Rest()[1:]).NextBytesSet(textproc.AlnumU)
+ if apparentVarname == "" {
+ return nil
+ }
+
+ lexer.Skip(2)
+
+ if p.line != nil {
+ if len(apparentVarname) > 1 {
+ p.line.Errorf("$%[1]s is ambiguous. Use ${%[1]s} if you mean a Make variable or $$%[1]s if you mean a shell variable.",
+ apparentVarname)
+ p.line.Explain(
+ "Only the first letter after the dollar is the variable name.",
+ "Everything following it is normal text, even if it looks like a variable name to human readers.")
+ } else {
+ p.line.Warnf("$%[1]s is ambiguous. Use ${%[1]s} if you mean a Make variable or $$%[1]s if you mean a shell variable.", apparentVarname)
+ p.line.Explain(
+ "In its current form, this variable is parsed as a Make variable.",
+ "For human readers though, $x looks more like a shell variable than a Make variable,",
+ "since Make variables are usually written using braces (BSD-style) or parentheses (GNU-style).")
+ }
+ }
+
+ return &MkVarUse{apparentVarname[:1], nil}
+}
+
+func (p *MkLexer) EOF() bool {
+ return p.lexer.EOF()
+}
+
+func (p *MkLexer) Rest() string {
+ return p.lexer.Rest()
+}
Index: pkgsrc/pkgtools/pkglint/files/mklexer_test.go
diff -u /dev/null pkgsrc/pkgtools/pkglint/files/mklexer_test.go:1.1
--- /dev/null Wed Nov 27 22:10:08 2019
+++ pkgsrc/pkgtools/pkglint/files/mklexer_test.go Wed Nov 27 22:10:07 2019
@@ -0,0 +1,750 @@
+package pkglint
+
+import "gopkg.in/check.v1"
+
+func (s *Suite) Test_MkLexer_MkTokens(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ testRest := func(input string, expectedTokens []*MkToken, expectedRest string) {
+ line := t.NewLines("Test_MkLexer_MkTokens.mk", input).Lines[0]
+ p := NewMkLexer(input, line)
+ actualTokens, rest := p.MkTokens()
+ t.CheckDeepEquals(actualTokens, expectedTokens)
+ for i, expectedToken := range expectedTokens {
+ if i < len(actualTokens) {
+ t.CheckDeepEquals(*actualTokens[i], *expectedToken)
+ t.CheckDeepEquals(actualTokens[i].Varuse, expectedToken.Varuse)
+ }
+ }
+ t.CheckEquals(rest, expectedRest)
+ }
+ test := func(input string, expectedToken *MkToken) {
+ testRest(input, b.Tokens(expectedToken), "")
+ }
+ literal := b.TextToken
+ varuse := b.VaruseToken
+
+ // Everything except VarUses is passed through unmodified.
+
+ test("literal",
+ literal("literal"))
+
+ test("\\/share\\/ { print \"share directory\" }",
+ literal("\\/share\\/ { print \"share directory\" }"))
+
+ test("find . -name \\*.orig -o -name \\*.pre",
+ literal("find . -name \\*.orig -o -name \\*.pre"))
+
+ test("-e 's|\\$${EC2_HOME.*}|EC2_HOME}|g'",
+ literal("-e 's|\\$${EC2_HOME.*}|EC2_HOME}|g'"))
+
+ test("$$var1 $$var2 $$? $$",
+ literal("$$var1 $$var2 $$? $$"))
+
+ testRest("hello, ${W:L:tl}orld",
+ b.Tokens(
+ literal("hello, "),
+ varuse("W", "L", "tl"),
+ literal("orld")),
+ "")
+
+ testRest("ftp://${PKGNAME}/ ${MASTER_SITES:=subdir/}",
+ b.Tokens(
+ literal("ftp://"),
+ varuse("PKGNAME"),
+ literal("/ "),
+ varuse("MASTER_SITES", "=subdir/")),
+ "")
+
+ testRest("${VAR:S,a,b,c,d,e,f}",
+ b.Tokens(b.VaruseTextToken("${VAR:S,a,b,c,d,e,f}", "VAR", "S,a,b,")),
+ "")
+ t.CheckOutputLines(
+ "WARN: Test_MkLexer_MkTokens.mk:1: Invalid variable modifier \"c,d,e,f\" for \"VAR\".")
+
+ testRest("Text${VAR:Mmodifier}${VAR2}more text${VAR3}",
+ b.Tokens(
+ literal("Text"),
+ varuse("VAR", "Mmodifier"),
+ varuse("VAR2"),
+ literal("more text"),
+ varuse("VAR3")),
+ "")
+}
+
+func (s *Suite) Test_MkLexer_VarUse(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+ varuse := b.VaruseToken
+ varuseText := b.VaruseTextToken
+
+ testRest := func(input string, expectedTokens []*MkToken, expectedRest string, diagnostics ...string) {
+ line := t.NewLines("Test_MkLexer_VarUse.mk", input).Lines[0]
+ p := NewMkLexer(input, line)
+
+ actualTokens, rest := p.MkTokens()
+
+ t.CheckDeepEquals(actualTokens, expectedTokens)
+ for i, expectedToken := range expectedTokens {
+ if i < len(actualTokens) {
+ t.CheckDeepEquals(*actualTokens[i], *expectedToken)
+ t.CheckDeepEquals(actualTokens[i].Varuse, expectedToken.Varuse)
+ }
+ }
+ t.CheckEquals(rest, expectedRest)
+ t.CheckOutput(diagnostics)
+ }
+ test := func(input string, expectedToken *MkToken, diagnostics ...string) {
+ testRest(input, b.Tokens(expectedToken), "", diagnostics...)
+ }
+
+ t.Use(testRest, test, varuse, varuseText)
+
+ test("${VARIABLE}",
+ varuse("VARIABLE"))
+
+ test("${VARIABLE.param}",
+ varuse("VARIABLE.param"))
+
+ test("${VARIABLE.${param}}",
+ varuse("VARIABLE.${param}"))
+
+ test("${VARIABLE.hicolor-icon-theme}",
+ varuse("VARIABLE.hicolor-icon-theme"))
+
+ test("${VARIABLE.gtk+extra}",
+ varuse("VARIABLE.gtk+extra"))
+
+ test("${VARIABLE:S/old/new/}",
+ varuse("VARIABLE", "S/old/new/"))
+
+ test("${GNUSTEP_LFLAGS:S/-L//g}",
+ varuse("GNUSTEP_LFLAGS", "S/-L//g"))
+
+ test("${SUSE_VERSION:S/.//}",
+ varuse("SUSE_VERSION", "S/.//"))
+
+ test("${MASTER_SITE_GNOME:=sources/alacarte/0.13/}",
+ varuse("MASTER_SITE_GNOME", "=sources/alacarte/0.13/"))
+
+ test("${INCLUDE_DIRS:H:T}",
+ varuse("INCLUDE_DIRS", "H", "T"))
+
+ test("${A.${B.${C.${D}}}}",
+ varuse("A.${B.${C.${D}}}"))
+
+ test("${RUBY_VERSION:C/([0-9]+)\\.([0-9]+)\\.([0-9]+)/\\1/}",
+ varuse("RUBY_VERSION", "C/([0-9]+)\\.([0-9]+)\\.([0-9]+)/\\1/"))
+
+ test("${PERL5_${_var_}:Q}",
+ varuse("PERL5_${_var_}", "Q"))
+
+ test("${PKGNAME_REQD:C/(^.*-|^)py([0-9][0-9])-.*/\\2/}",
+ varuse("PKGNAME_REQD", "C/(^.*-|^)py([0-9][0-9])-.*/\\2/"))
+
+ test("${PYLIB:S|/|\\\\/|g}",
+ varuse("PYLIB", "S|/|\\\\/|g"))
+
+ test("${PKGNAME_REQD:C/ruby([0-9][0-9]+)-.*/\\1/}",
+ varuse("PKGNAME_REQD", "C/ruby([0-9][0-9]+)-.*/\\1/"))
+
+ test("${RUBY_SHLIBALIAS:S/\\//\\\\\\//}",
+ varuse("RUBY_SHLIBALIAS", "S/\\//\\\\\\//"))
+
+ test("${RUBY_VER_MAP.${RUBY_VER}:U${RUBY_VER}}",
+ varuse("RUBY_VER_MAP.${RUBY_VER}", "U${RUBY_VER}"))
+
+ test("${RUBY_VER_MAP.${RUBY_VER}:U18}",
+ varuse("RUBY_VER_MAP.${RUBY_VER}", "U18"))
+
+ test("${CONFIGURE_ARGS:S/ENABLE_OSS=no/ENABLE_OSS=yes/g}",
+ varuse("CONFIGURE_ARGS", "S/ENABLE_OSS=no/ENABLE_OSS=yes/g"))
+
+ test("${PLIST_RUBY_DIRS:S,DIR=\"PREFIX/,DIR=\",}",
+ varuse("PLIST_RUBY_DIRS", "S,DIR=\"PREFIX/,DIR=\","))
+
+ test("${LDFLAGS:S/-Wl,//g:Q}",
+ varuse("LDFLAGS", "S/-Wl,//g", "Q"))
+
+ test("${_PERL5_REAL_PACKLIST:S/^/${DESTDIR}/}",
+ varuse("_PERL5_REAL_PACKLIST", "S/^/${DESTDIR}/"))
+
+ test("${_PYTHON_VERSION:C/^([0-9])/\\1./1}",
+ varuse("_PYTHON_VERSION", "C/^([0-9])/\\1./1"))
+
+ test("${PKGNAME:S/py${_PYTHON_VERSION}/py${i}/}",
+ varuse("PKGNAME", "S/py${_PYTHON_VERSION}/py${i}/"))
+
+ test("${PKGNAME:C/-[0-9].*$/-[0-9]*/}",
+ varuse("PKGNAME", "C/-[0-9].*$/-[0-9]*/"))
+
+ // TODO: Does the $@ refer to ${.TARGET}, or is it just an unmatchable
+ // regular expression?
+ test("${PKGNAME:C/$@/target?/}",
+ varuse("PKGNAME", "C/$@/target?/"))
+
+ test("${PKGNAME:S/py${_PYTHON_VERSION}/py${i}/:C/-[0-9].*$/-[0-9]*/}",
+ varuse("PKGNAME", "S/py${_PYTHON_VERSION}/py${i}/", "C/-[0-9].*$/-[0-9]*/"))
+
+ test("${_PERL5_VARS:tl:S/^/-V:/}",
+ varuse("_PERL5_VARS", "tl", "S/^/-V:/"))
+
+ test("${_PERL5_VARS_OUT:M${_var_:tl}=*:S/^${_var_:tl}=${_PERL5_PREFIX:=/}//}",
+ varuse("_PERL5_VARS_OUT", "M${_var_:tl}=*", "S/^${_var_:tl}=${_PERL5_PREFIX:=/}//"))
+
+ test("${RUBY${RUBY_VER}_PATCHLEVEL}",
+ varuse("RUBY${RUBY_VER}_PATCHLEVEL"))
+
+ test("${DISTFILES:M*.gem}",
+ varuse("DISTFILES", "M*.gem"))
+
+ test("${LOCALBASE:S^/^_^}",
+ varuse("LOCALBASE", "S^/^_^"))
+
+ test("${SOURCES:%.c=%.o}",
+ varuse("SOURCES", "%.c=%.o"))
+
+ test("${GIT_TEMPLATES:@.t.@ ${EGDIR}/${GIT_TEMPLATEDIR}/${.t.} ${PREFIX}/${GIT_CORE_TEMPLATEDIR}/${.t.} @:M*}",
+ varuse("GIT_TEMPLATES", "@.t.@ ${EGDIR}/${GIT_TEMPLATEDIR}/${.t.} ${PREFIX}/${GIT_CORE_TEMPLATEDIR}/${.t.} @", "M*"))
+
+ test("${DISTNAME:C:_:-:}",
+ varuse("DISTNAME", "C:_:-:"))
+
+ test("${CF_FILES:H:O:u:S@^@${PKG_SYSCONFDIR}/@}",
+ varuse("CF_FILES", "H", "O", "u", "S@^@${PKG_SYSCONFDIR}/@"))
+
+ test("${ALT_GCC_RTS:S%${LOCALBASE}%%:S%/%%}",
+ varuse("ALT_GCC_RTS", "S%${LOCALBASE}%%", "S%/%%"))
+
+ test("${PREFIX:C;///*;/;g:C;/$;;}",
+ varuse("PREFIX", "C;///*;/;g", "C;/$;;"))
+
+ test("${GZIP_CMD:[1]:Q}",
+ varuse("GZIP_CMD", "[1]", "Q"))
+
+ test("${RUBY_RAILS_SUPPORTED:[#]}",
+ varuse("RUBY_RAILS_SUPPORTED", "[#]"))
+
+ test("${GZIP_CMD:[asdf]:Q}",
+ varuseText("${GZIP_CMD:[asdf]:Q}", "GZIP_CMD", "Q"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid variable modifier \"[asdf]\" for \"GZIP_CMD\".")
+
+ test("${DISTNAME:C/-[0-9]+$$//:C/_/-/}",
+ varuse("DISTNAME", "C/-[0-9]+$$//", "C/_/-/"))
+
+ test("${DISTNAME:slang%=slang2%}",
+ varuse("DISTNAME", "slang%=slang2%"))
+
+ test("${OSMAP_SUBSTVARS:@v@-e 's,\\@${v}\\@,${${v}},g' @}",
+ varuse("OSMAP_SUBSTVARS", "@v@-e 's,\\@${v}\\@,${${v}},g' @"))
+
+ test("${BRANDELF:D${BRANDELF} -t Linux ${LINUX_LDCONFIG}:U${TRUE}}",
+ varuse("BRANDELF", "D${BRANDELF} -t Linux ${LINUX_LDCONFIG}", "U${TRUE}"))
+
+ test("${${_var_}.*}",
+ varuse("${_var_}.*"))
+
+ test("${OPTIONS:@opt@printf 'Option %s is selected\n' ${opt:Q}';@}",
+ varuse("OPTIONS", "@opt@printf 'Option %s is selected\n' ${opt:Q}';@"))
+
+ /* weird features */
+ test("${${EMACS_VERSION_MAJOR}>22:?@comment :}",
+ varuse("${EMACS_VERSION_MAJOR}>22", "?@comment :"))
+
+ test("${empty(CFLAGS):?:-cflags ${CFLAGS:Q}}",
+ varuse("empty(CFLAGS)", "?:-cflags ${CFLAGS:Q}"))
+
+ test("${${PKGSRC_COMPILER}==gcc:?gcc:cc}",
+ varuse("${PKGSRC_COMPILER}==gcc", "?gcc:cc"))
+
+ test("${${XKBBASE}/xkbcomp:L:Q}",
+ varuse("${XKBBASE}/xkbcomp", "L", "Q"))
+
+ test("${${PKGBASE} ${PKGVERSION}:L}",
+ varuse("${PKGBASE} ${PKGVERSION}", "L"))
+
+ // The variable name is optional; the variable with the empty name always
+ // evaluates to the empty string. Bmake actively prevents this variable from
+ // ever being defined. Therefore the :U branch is always taken, and this
+ // in turn is used to implement the variables from the .for loops.
+ test("${:U}",
+ varuse("", "U"))
+
+ test("${:Ufixed value}",
+ varuse("", "Ufixed value"))
+
+ // This complicated expression returns the major.minor.patch version
+ // of the package given in ${d}.
+ //
+ // The :L modifier interprets the variable name not as a variable name
+ // but takes it as the variable value. Followed by the :sh modifier,
+ // this combination evaluates to the output of pkg_info.
+ //
+ // In this output, all non-digit characters are replaced with spaces so
+ // that the remaining value is a space-separated list of version parts.
+ // From these parts, the first 3 are taken and joined using a dot as separator.
+ test("${${${PKG_INFO} -E ${d} || echo:L:sh}:L:C/[^[0-9]]*/ /g:[1..3]:ts.}",
+ varuse("${${PKG_INFO} -E ${d} || echo:L:sh}", "L", "C/[^[0-9]]*/ /g", "[1..3]", "ts."))
+
+ // For :S and :C, the colon can be left out. It's confusing but possible.
+ test("${VAR:S/-//S/.//}",
+ varuseText("${VAR:S/-//S/.//}", "VAR", "S/-//", "S/.//"))
+
+ // The :S and :C modifiers accept an arbitrary character as separator. Here it is "a".
+ test("${VAR:Sahara}",
+ varuse("VAR", "Sahara"))
+
+ // The separator character can be left out, which means empty.
+ test("${VAR:ts}",
+ varuse("VAR", "ts"))
+
+ // The separator character can be a long octal number.
+ test("${VAR:ts\\000012}",
+ varuse("VAR", "ts\\000012"))
+
+ // Or even decimal.
+ test("${VAR:ts\\124}",
+ varuse("VAR", "ts\\124"))
+
+ // The :ts modifier only takes single-character separators.
+ test("${VAR:ts---}",
+ varuse("VAR", "ts---"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid separator \"---\" for :ts modifier of \"VAR\".")
+
+ test("$<",
+ varuseText("$<", "<")) // Same as ${.IMPSRC}
+
+ test("$(GNUSTEP_USER_ROOT)",
+ varuseText("$(GNUSTEP_USER_ROOT)", "GNUSTEP_USER_ROOT"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Please use curly braces {} instead of round parentheses () for GNUSTEP_USER_ROOT.")
+
+ // Opening brace, closing parenthesis.
+ // Warnings are only printed for balanced expressions.
+ test("${VAR)",
+ varuseText("${VAR)", "VAR)"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \"}\" for \"VAR)\".",
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid part \")\" after variable name \"VAR\".")
+
+ // Opening parenthesis, closing brace
+ // Warnings are only printed for balanced expressions.
+ test("$(VAR}",
+ varuseText("$(VAR}", "VAR}"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \")\" for \"VAR}\".",
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid part \"}\" after variable name \"VAR\".")
+
+ test("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}@}",
+ varuse("PLIST_SUBST_VARS", "@var@${var}=${${var}:Q}@"))
+
+ test("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}}",
+ varuseText("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}}",
+ "PLIST_SUBST_VARS", "@var@${var}=${${var}:Q}}"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Modifier ${PLIST_SUBST_VARS:@var@...@} is missing the final \"@\".",
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \"}\" for \"PLIST_SUBST_VARS\".")
+
+ // The replacement text may include closing braces, which is useful
+ // for AWK programs.
+ test("${PLIST_SUBST_VARS:@var@{${var}}@}",
+ varuseText("${PLIST_SUBST_VARS:@var@{${var}}@}",
+ "PLIST_SUBST_VARS", "@var@{${var}}@"),
+ nil...)
+
+ // Unfinished variable use
+ test("${",
+ varuseText("${", ""),
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \"}\" for \"\".")
+
+ // Unfinished nested variable use
+ test("${${",
+ varuseText("${${", "${"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \"}\" for \"\".",
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \"}\" for \"${\".")
+
+ test("${arbitrary :Mpattern:---:Q}",
+ varuseText("${arbitrary :Mpattern:---:Q}", "arbitrary ", "Mpattern", "Q"),
+ // TODO: Swap the order of these message
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid variable modifier \"---\" for \"arbitrary \".",
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid part \" \" after variable name \"arbitrary\".")
+
+ // Variable names containing spaces do not occur in pkgsrc.
+ // Technically they are possible:
+ //
+ // VARNAME= name with spaces
+ // ${VARNAME}= value
+ //
+ // all:
+ // @echo ${name with spaces:Q}''
+ test("${arbitrary text}",
+ varuse("arbitrary text"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid part \" text\" after variable name \"arbitrary\".")
+}
+
+func (s *Suite) Test_MkLexer_VarUse__ambiguous(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ t.SetUpCommandLine("--explain")
+
+ line := t.NewLine("module.mk", 123, "\t$Varname $X")
+ p := NewMkLexer(line.Text[1:], line)
+
+ tokens, rest := p.MkTokens()
+ t.CheckDeepEquals(tokens, b.Tokens(
+ b.VaruseTextToken("$V", "V"),
+ b.TextToken("arname "),
+ b.VaruseTextToken("$X", "X")))
+ t.CheckEquals(rest, "")
+
+ t.CheckOutputLines(
+ "ERROR: module.mk:123: $Varname is ambiguous. Use ${Varname} if you mean a Make variable or $$Varname if you mean a shell variable.",
+ "",
+ "\tOnly the first letter after the dollar is the variable name.",
+ "\tEverything following it is normal text, even if it looks like a",
+ "\tvariable name to human readers.",
+ "",
+ "WARN: module.mk:123: $X is ambiguous. Use ${X} if you mean a Make variable or $$X if you mean a shell variable.",
+ "",
+ "\tIn its current form, this variable is parsed as a Make variable. For",
+ "\thuman readers though, $x looks more like a shell variable than a",
+ "\tMake variable, since Make variables are usually written using braces",
+ "\t(BSD-style) or parentheses (GNU-style).",
+ "")
+}
+
+// Pkglint can replace $(VAR) with ${VAR}. It doesn't look at all components
+// of nested variables though because this case is not important enough to
+// invest much development time. It occurs so seldom that it is acceptable
+// to run pkglint multiple times in such a case.
+func (s *Suite) Test_MkLexer_varUseBrace__autofix_parentheses(c *check.C) {
+ t := s.Init(c)
+
+ test := func() {
+ mklines := t.SetUpFileMkLines("Makefile",
+ MkCvsID,
+ "COMMENT=\t$(P1) $(P2)) $(P3:Q) ${BRACES} $(A.$(B.$(C))) $(A:M\\#)",
+ "P1=\t\t${COMMENT}",
+ "P2=\t\t# nothing",
+ "P3=\t\t# nothing",
+ "BRACES=\t\t# nothing",
+ "C=\t\t# nothing",
+ "A=\t\t# nothing")
+
+ mklines.Check()
+ }
+
+ t.ExpectDiagnosticsAutofix(
+ test,
+
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for P1.",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for P2.",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for P3.",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for C.",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for B.$(C).",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for A.$(B.$(C)).",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for A.",
+ "AUTOFIX: ~/Makefile:2: Replacing \"$(P1)\" with \"${P1}\".",
+ "AUTOFIX: ~/Makefile:2: Replacing \"$(P2)\" with \"${P2}\".",
+ "AUTOFIX: ~/Makefile:2: Replacing \"$(P3:Q)\" with \"${P3:Q}\".",
+ "AUTOFIX: ~/Makefile:2: Replacing \"$(C)\" with \"${C}\".")
+}
+
+func (s *Suite) Test_MkLexer_Varname(c *check.C) {
+ t := s.Init(c)
+
+ test := func(text string) {
+ line := t.NewLine("filename.mk", 1, text)
+ p := NewMkLexer(text, line)
+
+ varname := p.Varname()
+
+ t.CheckEquals(varname, text)
+ t.CheckEquals(p.Rest(), "")
+ }
+
+ testRest := func(text string, expectedVarname string, expectedRest string) {
+ line := t.NewLine("filename.mk", 1, text)
+ p := NewMkLexer(text, line)
+
+ varname := p.Varname()
+
+ t.CheckEquals(varname, expectedVarname)
+ t.CheckEquals(p.Rest(), expectedRest)
+ }
+
+ test("VARNAME")
+ test("VARNAME.param")
+ test("VARNAME.${param}")
+ test("SITES_${param}")
+ test("SITES_distfile-1.0.tar.gz")
+ test("SITES.gtk+-2.0")
+ test("PKGPATH.category/package")
+
+ testRest("VARNAME/rest", "VARNAME", "/rest")
+}
+
+func (s *Suite) Test_MkLexer_VarUseModifiers(c *check.C) {
+ t := s.Init(c)
+
+ varUse := NewMkTokenBuilder().VarUse
+ test := func(text string, varUse *MkVarUse, diagnostics ...string) {
+ line := t.NewLine("Makefile", 20, "\t"+text)
+ p := NewMkLexer(text, line)
+
+ actual := p.VarUse()
+
+ t.CheckDeepEquals(actual, varUse)
+ t.CheckEquals(p.Rest(), "")
+ t.CheckOutput(diagnostics)
+ }
+
+ // The !command! modifier is used so seldom that pkglint does not
+ // check whether the command is actually valid.
+ // At least not while parsing the modifier since at this point it might
+ // be still unknown which of the commands can be used and which cannot.
+ test("${VAR:!command!}", varUse("VAR", "!command!"))
+
+ test("${VAR:!command}", varUse("VAR"),
+ "WARN: Makefile:20: Invalid variable modifier \"!command\" for \"VAR\".")
+
+ test("${VAR:command!}", varUse("VAR"),
+ "WARN: Makefile:20: Invalid variable modifier \"command!\" for \"VAR\".")
+
+ // The :L modifier makes the variable value "echo hello", and the :[1]
+ // modifier extracts the "echo".
+ test("${echo hello:L:[1]}", varUse("echo hello", "L", "[1]"))
+
+ // bmake ignores the :[3] modifier, and the :L modifier just returns the
+ // variable name, in this case BUILD_DIRS.
+ test("${BUILD_DIRS:[3]:L}", varUse("BUILD_DIRS", "[3]", "L"))
+
+ test("${PATH:ts::Q}", varUse("PATH", "ts:", "Q"))
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__invalid_ts_modifier_with_warning(c *check.C) {
+ t := s.Init(c)
+
+ t.SetUpCommandLine("-Wall", "--explain")
+ line := t.NewLine("filename.mk", 123, "${VAR:tsabc}")
+ p := NewMkLexer("tsabc}", line)
+
+ modifier := p.varUseModifier("VAR", '}')
+
+ t.CheckEquals(modifier, "tsabc")
+ t.CheckEquals(p.Rest(), "}")
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid separator \"abc\" for :ts modifier of \"VAR\".",
+ "",
+ "\tThe separator for the :ts modifier must be either a single character",
+ "\tor an escape sequence like \\t or \\n or an octal or decimal escape",
+ "\tsequence; see the bmake man page for further details.",
+ "")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__invalid_ts_modifier_without_warning(c *check.C) {
+ t := s.Init(c)
+
+ p := NewMkLexer("tsabc}", nil)
+
+ modifier := p.varUseModifier("VAR", '}')
+
+ t.CheckEquals(modifier, "tsabc")
+ t.CheckEquals(p.Rest(), "}")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__square_bracket(c *check.C) {
+ t := s.Init(c)
+
+ line := t.NewLine("filename.mk", 123, "\t${VAR:[asdf]}")
+ p := NewMkLexer("[asdf]", line)
+
+ modifier := p.varUseModifier("VAR", '}')
+
+ t.CheckEquals(modifier, "")
+ t.CheckEquals(p.Rest(), "")
+
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid variable modifier \"[asdf]\" for \"VAR\".")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__condition_without_colon(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ line := t.NewLine("filename.mk", 123, "${${VAR}:?yes:no}${${VAR}:?yes}")
+ p := NewMkLexer(line.Text, line)
+
+ varUse1 := p.VarUse()
+ varUse2 := p.VarUse()
+
+ t.CheckDeepEquals(varUse1, b.VarUse("${VAR}", "?yes:no"))
+ t.CheckDeepEquals(varUse2, b.VarUse("${VAR}"))
+ t.CheckEquals(p.Rest(), "")
+
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid variable modifier \"?yes\" for \"${VAR}\".")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__malformed_in_parentheses(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ line := t.NewLine("filename.mk", 123, "$(${VAR}:?yes)")
+ p := NewMkLexer(line.Text, line)
+
+ varUse := p.VarUse()
+
+ t.CheckDeepEquals(varUse, b.VarUse("${VAR}"))
+ t.CheckEquals(p.Rest(), "")
+
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid variable modifier \"?yes\" for \"${VAR}\".",
+ "WARN: filename.mk:123: Please use curly braces {} instead of round parentheses () for ${VAR}.")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__varuse_in_malformed_modifier(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ line := t.NewLine("filename.mk", 123, "${${VAR}:?yes${INNER}}")
+ p := NewMkLexer(line.Text, line)
+
+ varUse := p.VarUse()
+
+ t.CheckDeepEquals(varUse, b.VarUse("${VAR}"))
+ t.CheckEquals(p.Rest(), "")
+
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid variable modifier \"?yes${INNER}\" for \"${VAR}\".")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifierSubst(c *check.C) {
+ t := s.Init(c)
+
+ varUse := NewMkTokenBuilder().VarUse
+ test := func(text string, varUse *MkVarUse, rest string, diagnostics ...string) {
+ line := t.NewLine("Makefile", 20, "\t"+text)
+ p := NewMkLexer(text, line)
+
+ actual := p.VarUse()
+
+ t.CheckDeepEquals(actual, varUse)
+ t.CheckEquals(p.Rest(), rest)
+ t.CheckOutput(diagnostics)
+ }
+
+ test("${VAR:S", varUse("VAR"), "",
+ "WARN: Makefile:20: Invalid variable modifier \"S\" for \"VAR\".",
+ "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
+
+ test("${VAR:S}", varUse("VAR"), "",
+ "WARN: Makefile:20: Invalid variable modifier \"S\" for \"VAR\".")
+
+ test("${VAR:S,}", varUse("VAR"), "",
+ "WARN: Makefile:20: Invalid variable modifier \"S,\" for \"VAR\".")
+
+ test("${VAR:S,from,to}", varUse("VAR"), "",
+ "WARN: Makefile:20: Invalid variable modifier \"S,from,to\" for \"VAR\".")
+
+ test("${VAR:S,from,to,}", varUse("VAR", "S,from,to,"), "")
+
+ test("${VAR:S,^from$,to,}", varUse("VAR", "S,^from$,to,"), "")
+
+ test("${VAR:S,@F@,${F},}", varUse("VAR", "S,@F@,${F},"), "")
+
+ test("${VAR:S,from,to,1}", varUse("VAR", "S,from,to,1"), "")
+ test("${VAR:S,from,to,g}", varUse("VAR", "S,from,to,g"), "")
+ test("${VAR:S,from,to,W}", varUse("VAR", "S,from,to,W"), "")
+
+ test("${VAR:S,from,to,1gW}", varUse("VAR", "S,from,to,1gW"), "")
+
+ // Inside the :S or :C modifiers, neither a colon nor the closing
+ // brace need to be escaped. Otherwise these patterns would become
+ // too difficult to read and write.
+ test("${VAR:C/[[:alnum:]]{2}/**/g}",
+ varUse("VAR", "C/[[:alnum:]]{2}/**/g"),
+ "")
+
+ // Some pkgsrc users really explore the darkest corners of bmake by using
+ // the backslash as the separator in the :S modifier. Sure, it works, it
+ // just looks totally unexpected to the average pkgsrc reader.
+ //
+ // Using the backslash as separator means that it cannot be used for anything
+ // else, not even for escaping other characters.
+ test("${VAR:S\\.post1\\\\1}",
+ varUse("VAR", "S\\.post1\\\\1"),
+ "")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifierAt__missing_at_after_variable_name(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ line := t.NewLine("filename.mk", 123, "${VAR:@varname}")
+ p := NewMkLexer(line.Text, line)
+
+ varUse := p.VarUse()
+
+ t.CheckDeepEquals(varUse, b.VarUse("VAR"))
+ t.CheckEquals(p.Rest(), "")
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid variable modifier \"@varname\" for \"VAR\".")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifierAt__dollar(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ line := t.NewLine("filename.mk", 123, "${VAR:@var@$$var@}")
+ p := NewMkLexer(line.Text, line)
+
+ varUse := p.VarUse()
+
+ t.CheckDeepEquals(varUse, b.VarUse("VAR", "@var@$$var@"))
+ t.CheckEquals(p.Rest(), "")
+ t.CheckOutputEmpty()
+}
+
+func (s *Suite) Test_MkLexer_varUseModifierAt__incomplete_without_warning(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ p := NewMkLexer("${VAR:@var@$$var}rest", nil)
+
+ varUse := p.VarUse()
+
+ t.CheckDeepEquals(varUse, b.VarUse("VAR", "@var@$$var}rest"))
+ t.CheckEquals(p.Rest(), "")
+ t.CheckOutputEmpty()
+}
+
+func (s *Suite) Test_MkLexer_varUseModifierAt(c *check.C) {
+ t := s.Init(c)
+
+ varUse := NewMkTokenBuilder().VarUse
+ test := func(text string, varUse *MkVarUse, rest string, diagnostics ...string) {
+ line := t.NewLine("Makefile", 20, "\t"+text)
+ p := NewMkLexer(text, line)
+
+ actual := p.VarUse()
+
+ t.CheckDeepEquals(actual, varUse)
+ t.CheckEquals(p.Rest(), rest)
+ t.CheckOutput(diagnostics)
+ }
+
+ test("${VAR:@",
+ varUse("VAR"),
+ "",
+ "WARN: Makefile:20: Invalid variable modifier \"@\" for \"VAR\".",
+ "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
+
+ test("${VAR:@i@${i}}", varUse("VAR", "@i@${i}}"), "",
+ "WARN: Makefile:20: Modifier ${VAR:@i@...@} is missing the final \"@\".",
+ "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
+
+ test("${VAR:@i@${i}@}", varUse("VAR", "@i@${i}@"), "")
+
+ test("${PKG_GROUPS:@g@${g:Q}:${PKG_GID.${g}:Q}@:C/:*$//g}",
+ varUse("PKG_GROUPS", "@g@${g:Q}:${PKG_GID.${g}:Q}@", "C/:*$//g"),
+ "")
+}
Index: pkgsrc/pkgtools/pkglint/files/intqa/qa.go
diff -u /dev/null pkgsrc/pkgtools/pkglint/files/intqa/qa.go:1.1
--- /dev/null Wed Nov 27 22:10:08 2019
+++ pkgsrc/pkgtools/pkglint/files/intqa/qa.go Wed Nov 27 22:10:07 2019
@@ -0,0 +1,540 @@
+// Package intqa provides quality assurance for the pkglint code.
+package intqa
+
+import (
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "io"
+ "os"
+ "path"
+ "reflect"
+ "sort"
+ "strings"
+ "unicode"
+)
+
+type Error int
+
+const (
+ ENone Error = iota + 1
+ EAll
+
+ // A function or method does not have a corresponding test.
+ EMissingTest
+
+ // The name of a test function does not correspond to a program
+ // element to be tested.
+ EMissingTestee
+
+ // The tests are not in the same order as their corresponding
+ // testees in the main code.
+ EOrder
+
+ // The test method does not have a valid name.
+ EName
+
+ // The file of the test method does not correspond to the
+ // file of the testee.
+ EFile
+
+ // All methods of a type must be in the same file as the type definition.
+ EMethodsSameFile
+)
+
+// QAChecker ensures that all test names follow a common naming scheme:
+// Test_${Type}_${Method}__${description_using_underscores}
+// Each of the variable parts may be omitted.
+type QAChecker struct {
+ errorf func(format string, args ...interface{})
+
+ filters []filter
+ order int
+
+ testees []*testee
+ tests []*test
+
+ errors []string
+ out io.Writer
+}
+
+// NewQAChecker creates a new checker.
+// By default, all errors are enabled;
+// call Configure to disable them selectively.
+func NewQAChecker(errorf func(format string, args ...interface{})) *QAChecker {
+ ck := QAChecker{errorf: errorf, out: os.Stderr}
+
+ // For test fixtures from https://gopkg.in/check/v1.
+ ck.Configure("*_test.go", "*", "SetUpTest", -EMissingTest)
+ ck.Configure("*_test.go", "*", "TearDownTest", -EMissingTest)
+
+ // See https://github.com/rillig/gobco.
+ ck.Configure("gobco_*.go", "gobco*", "*", -EMissingTest)
+ ck.Configure("gobco_*.go", "", "gobco*", -EMissingTest)
+
+ return &ck
+}
+
+// Configure sets the errors that are activated for the given code,
+// specified by shell patterns like in path.Match.
+//
+// All rules are applied in order. Later rules overwrite earlier rules.
+//
+// Individual errors can be enabled by giving their constant and disabled
+// by negating them, such as -EMissingTestee. To reset everything, use
+// either EAll or ENone.
+func (ck *QAChecker) Configure(filenames, typeNames, funcNames string, errors ...Error) {
+ ck.filters = append(ck.filters, filter{filenames, typeNames, funcNames, errors})
+}
+
+func (ck *QAChecker) Check() {
+ ck.load(".")
+ ck.checkTestees()
+ ck.checkTests()
+ ck.checkOrder()
+ ck.print()
+}
+
+// load loads all type, function and method names from the current package.
+func (ck *QAChecker) load(dir string) {
+
+ fileSet := token.NewFileSet()
+ pkgs, err := parser.ParseDir(fileSet, dir, nil, 0)
+ if err != nil {
+ panic(err)
+ }
+
+ for _, pkgname := range sortedKeys(pkgs) {
+ files := pkgs[pkgname].Files
+
+ for _, filename := range sortedKeys(files) {
+ file := files[filename]
+ for _, decl := range file.Decls {
+ ck.loadDecl(decl, filename)
+ }
+ }
+ }
+
+ ck.relate()
+}
+
+// loadDecl adds a single type or function declaration to the known elements.
+func (ck *QAChecker) loadDecl(decl ast.Decl, filename string) {
+ switch decl := decl.(type) {
+
+ case *ast.GenDecl:
+ for _, spec := range decl.Specs {
+ switch spec := spec.(type) {
+ case *ast.TypeSpec:
+ typeName := spec.Name.Name
+ ck.addCode(code{filename, typeName, "", 0}, nil)
+ }
+ }
+
+ case *ast.FuncDecl:
+ code := ck.parseFuncDecl(filename, decl)
+ ck.addCode(code, decl)
+ }
+}
+
+func (*QAChecker) parseFuncDecl(filename string, decl *ast.FuncDecl) code {
+ typeName := ""
+ if decl.Recv != nil {
+ typeExpr := decl.Recv.List[0].Type.(ast.Expr)
+ if star, ok := typeExpr.(*ast.StarExpr); ok {
+ typeExpr = star.X
+ }
+ typeName = typeExpr.(*ast.Ident).Name
+ }
+
+ funcName := decl.Name.Name
+ return code{filename, typeName, funcName, 0}
+}
+
+func (ck *QAChecker) addCode(code code, decl *ast.FuncDecl) {
+ if code.isTestScope() && code.isFunc() && code.Func == "TestMain" {
+ // This is not a test for Main, but a wrapper function of the test.
+ // Therefore it is completely ignored.
+ // See https://golang.org/pkg/testing/#hdr-Main.
+ //
+ // Among others, this function is created by
+ // https://github.com/rillig/gobco when measuring the branch
+ // coverage of a package.
+ return
+ }
+
+ if !ck.isRelevant(code.file, code.Type, code.Func, EAll) {
+ return
+ }
+
+ if ck.isTest(code, decl) {
+ ck.addTest(code)
+ } else {
+ ck.addTestee(code)
+ }
+}
+
+func (*QAChecker) isTest(code code, decl *ast.FuncDecl) bool {
+ if !code.isTestScope() || !strings.HasPrefix(code.Func, "Test") {
+ return false
+ }
+ if decl.Type.Params.NumFields() != 1 {
+ return false
+ }
+
+ paramType := decl.Type.Params.List[0].Type
+ if star, ok := paramType.(*ast.StarExpr); ok {
+ paramType = star.X
+ }
+ if sel, ok := paramType.(*ast.SelectorExpr); ok {
+ paramType = sel.Sel
+ }
+
+ paramTypeName := paramType.(*ast.Ident).Name
+ return paramTypeName == "C" || paramTypeName == "T"
+}
+
+func (ck *QAChecker) addTestee(code code) {
+ code.order = ck.nextOrder()
+ ck.testees = append(ck.testees, &testee{code})
+}
+
+func (ck *QAChecker) addTest(code code) {
+ if !strings.HasPrefix(code.Func, "Test_") &&
+ code.Func != "Test" &&
+ ck.addError(
+ EName,
+ code,
+ "Test %q must start with %q.",
+ code.fullName(), "Test_") {
+
+ return
+ }
+
+ parts := strings.SplitN(code.Func, "__", 2)
+ testeeName := strings.TrimPrefix(strings.TrimPrefix(parts[0], "Test"), "_")
+ descr := ""
+ if len(parts) > 1 {
+ if parts[1] == "" &&
+ ck.addError(
+ EName,
+ code,
+ "Test %q must have a nonempty description.",
+ code.fullName()) {
+ return
+ }
+ descr = parts[1]
+ }
+
+ code.order = ck.nextOrder()
+ ck.tests = append(ck.tests, &test{code, testeeName, descr, nil})
+}
+
+func (ck *QAChecker) nextOrder() int {
+ id := ck.order
+ ck.order++
+ return id
+}
+
+// relate connects the tests to their testees.
+func (ck *QAChecker) relate() {
+ testeesByPrefix := make(map[string]*testee)
+ for _, testee := range ck.testees {
+ prefix := join(testee.Type, "_", testee.Func)
+ testeesByPrefix[prefix] = testee
+ }
+
+ for _, test := range ck.tests {
+ test.testee = testeesByPrefix[test.testeeName]
+ }
+}
+
+func (ck *QAChecker) checkTests() {
+ for _, test := range ck.tests {
+ ck.checkTestFile(test)
+ ck.checkTestTestee(test)
+ ck.checkTestDescr(test)
+ }
+}
+
+func (ck *QAChecker) checkTestFile(test *test) {
+ testee := test.testee
+ if testee == nil || testee.file == test.file {
+ return
+ }
+
+ correctTestFile := testee.testFile()
+ if correctTestFile == test.file {
+ return
+ }
+
+ ck.addError(
+ EFile,
+ test.code,
+ "Test %q for %q must be in %s instead of %s.",
+ test.fullName(), testee.fullName(), correctTestFile, test.file)
+}
+
+func (ck *QAChecker) checkTestTestee(test *test) {
+ testee := test.testee
+ if testee != nil || test.testeeName == "" {
+ return
+ }
+
+ testeeName := strings.Replace(test.testeeName, "_", ".", -1)
+ ck.addError(
+ EMissingTestee,
+ test.code,
+ "Missing testee %q for test %q.",
+ testeeName, test.fullName())
+}
+
+// checkTestDescr ensures that the type or function name of the testee
+// does not accidentally end up in the description of the test. This could
+// happen if there is a double underscore instead of a single underscore.
+func (ck *QAChecker) checkTestDescr(test *test) {
+ testee := test.testee
+ if testee == nil || testee.isMethod() || !isCamelCase(test.descr) {
+ return
+ }
+
+ ck.addError(
+ EName,
+ testee.code,
+ "%s: Test description %q must not use CamelCase in the first word.",
+ test.fullName(), test.descr)
+}
+
+func (ck *QAChecker) checkTestees() {
+ ck.checkTesteesTest()
+ ck.checkTesteesMethodsSameFile()
+}
+
+// checkTesteesTest ensures that each testee has a corresponding unit test.
+func (ck *QAChecker) checkTesteesTest() {
+ tested := make(map[*testee]bool)
+ for _, test := range ck.tests {
+ tested[test.testee] = true
+ }
+
+ for _, testee := range ck.testees {
+ if tested[testee] {
+ continue
+ }
+
+ testName := "Test_" + join(testee.Type, "_", testee.Func)
+ ck.addError(
+ EMissingTest,
+ testee.code,
+ "Missing unit test %q for %q.",
+ testName, testee.fullName())
+ }
+}
+
+// checkTesteesMethodsSameFile ensures that all methods of a type are
+// defined in the same file or in the corresponding test file.
+func (ck *QAChecker) checkTesteesMethodsSameFile() {
+ types := map[string]*testee{}
+ for _, testee := range ck.testees {
+ if testee.isType() {
+ types[testee.Type] = testee
+ }
+ }
+
+ for _, testee := range ck.testees {
+ if !testee.isMethod() {
+ continue
+ }
+ method := testee
+
+ typ := types[method.Type]
+ if typ == nil || method.file == typ.file {
+ continue
+ }
+
+ if method.isTestScope() == typ.isTestScope() {
+ ck.addError(
+ EMethodsSameFile,
+ testee.code,
+ "Method %s must be in %s, like its type.",
+ testee.fullName(), typ.file)
+ continue
+ }
+
+ correctFile := typ.testFile()
+ if method.file == correctFile {
+ continue
+ }
+
+ ck.addError(
+ EMethodsSameFile,
+ testee.code,
+ "Method %s must be in %s, corresponding to its type.",
+ testee.fullName(), correctFile)
+ }
+}
+
+// isRelevant checks whether the given error is enabled.
+func (ck *QAChecker) isRelevant(filename, typeName, funcName string, e Error) bool {
+ mask := ^uint64(0)
+ for _, filter := range ck.filters {
+ if matches(filename, filter.filenames) &&
+ matches(typeName, filter.typeNames) &&
+ matches(funcName, filter.funcNames) {
+ mask = ck.errorsMask(mask, filter.errors...)
+ }
+ }
+ return mask&ck.errorsMask(0, e) != 0
+}
+
+func (ck *QAChecker) errorsMask(mask uint64, errors ...Error) uint64 {
+ for _, err := range errors {
+ if err == ENone {
+ mask = 0
+ } else if err == EAll {
+ mask = ^uint64(0)
+ } else if err < 0 {
+ mask &= ^(uint64(1) << -uint(err))
+ } else {
+ mask |= uint64(1) << uint(err)
+ }
+ }
+ return mask
+}
+
+// checkOrder ensures that the tests appear in the same order as their
+// counterparts in the main code.
+func (ck *QAChecker) checkOrder() {
+ maxOrderByFile := make(map[string]*test)
+
+ for _, test := range ck.tests {
+ testee := test.testee
+ if testee == nil {
+ continue
+ }
+
+ maxOrder := maxOrderByFile[testee.file]
+ if maxOrder == nil || testee.order > maxOrder.testee.order {
+ maxOrderByFile[testee.file] = test
+ }
+
+ if maxOrder != nil && testee.order < maxOrder.testee.order {
+ insertBefore := maxOrder
+ for _, before := range ck.tests {
+ if before.file == test.file && before.testee != nil && before.testee.order > testee.order {
+ insertBefore = before
+ break
+ }
+ }
+
+ ck.addError(
+ EOrder,
+ test.code,
+ "Test %q must be ordered before %q.",
+ test.fullName(), insertBefore.fullName())
+ }
+ }
+}
+
+func (ck *QAChecker) addError(e Error, c code, format string, args ...interface{}) bool {
+ relevant := ck.isRelevant(c.file, c.Type, c.Func, e)
+ if relevant {
+ ck.errors = append(ck.errors, fmt.Sprintf(format, args...))
+ }
+ return relevant
+}
+
+func (ck *QAChecker) print() {
+ for _, msg := range ck.errors {
+ _, _ = fmt.Fprintln(ck.out, msg)
+ }
+
+ n := len(ck.errors)
+ if n > 1 {
+ ck.errorf("%d errors.", n)
+ } else if n == 1 {
+ ck.errorf("%d error.", n)
+ }
+}
+
+type filter struct {
+ filenames string
+ typeNames string
+ funcNames string
+ errors []Error
+}
+
+// testee is an element of the source code that can be tested.
+type testee struct {
+ code
+}
+
+type test struct {
+ code
+
+ testeeName string // The method name without the "Test_" prefix and description
+ descr string // The part after the "__" in the method name
+ testee *testee
+}
+
+// code is either a type, a function or a method.
+type code struct {
+ file string // the file containing the code
+ Type string // the type, e.g. MkLine
+ Func string // the function or method name, e.g. Warnf
+ order int // the relative order in the file
+}
+
+func (c *code) fullName() string { return join(c.Type, ".", c.Func) }
+func (c *code) isFunc() bool { return c.Type == "" }
+func (c *code) isType() bool { return c.Func == "" }
+func (c *code) isMethod() bool { return c.Type != "" && c.Func != "" }
+
+func (c *code) isTestScope() bool {
+ return strings.HasSuffix(c.file, "_test.go")
+}
+
+func (c *code) testFile() string {
+ if strings.HasSuffix(c.file, "_test.go") {
+ return c.file
+ }
+ return strings.TrimSuffix(c.file, ".go") + "_test.go"
+}
+
+func isCamelCase(str string) bool {
+ for i := 0; i+1 < len(str); i++ {
+ if str[i] == '_' {
+ return false
+ }
+ if unicode.IsLower(rune(str[i])) && unicode.IsUpper(rune(str[i+1])) {
+ return true
+ }
+ }
+ return false
+}
+
+func join(a, sep, b string) string {
+ if a == "" || b == "" {
+ sep = ""
+ }
+ return a + sep + b
+}
+
+func matches(subj string, pattern string) bool {
+ ok, err := path.Match(pattern, subj)
+ if err != nil {
+ panic(err)
+ }
+ return ok
+}
+
+// sortedKeys returns the sorted keys from an arbitrary map.
+func sortedKeys(m interface{}) []string {
+ var keys []string
+ for _, key := range reflect.ValueOf(m).MapKeys() {
+ keys = append(keys, key.Interface().(string))
+ }
+ sort.Strings(keys)
+ return keys
+}
Index: pkgsrc/pkgtools/pkglint/files/intqa/qa_test.go
diff -u /dev/null pkgsrc/pkgtools/pkglint/files/intqa/qa_test.go:1.1
--- /dev/null Wed Nov 27 22:10:08 2019
+++ pkgsrc/pkgtools/pkglint/files/intqa/qa_test.go Wed Nov 27 22:10:07 2019
@@ -0,0 +1,722 @@
+package intqa
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "gopkg.in/check.v1"
+ "io/ioutil"
+ "path"
+ "testing"
+)
+
+type Suite struct {
+ c *check.C
+ ck *QAChecker
+ summary string
+}
+
+func Test(t *testing.T) {
+ check.Suite(&Suite{})
+ check.TestingT(t)
+}
+
+func (s *Suite) Init(c *check.C) *QAChecker {
+ errorf := func(format string, args ...interface{}) {
+ s.summary = fmt.Sprintf(format, args...)
+ }
+
+ s.c = c
+ s.ck = NewQAChecker(errorf)
+ s.ck.out = ioutil.Discard
+ return s.ck
+}
+
+func (s *Suite) TearDownTest(c *check.C) {
+ s.c = c
+ // The testees and tests are not validated to be checked
+ // since that would create too much boilerplate code.
+ s.CheckErrors(nil...)
+ s.CheckSummary("")
+}
+
+func (s *Suite) CheckTestees(testees ...*testee) {
+ s.c.Check(s.ck.testees, check.DeepEquals, testees)
+ s.ck.testees = nil
+}
+
+func (*Suite) newTestee(filename, typeName, funcName string, order int) *testee {
+ return &testee{code{filename, typeName, funcName, order}}
+}
+
+func (s *Suite) CheckTests(tests ...*test) {
+ s.c.Check(s.ck.tests, check.DeepEquals, tests)
+ s.ck.tests = nil
+}
+
+func (*Suite) newTest(filename, typeName, funcName string, order int, testeeName, descr string, testee *testee) *test {
+ c := code{filename, typeName, funcName, order}
+ return &test{c, testeeName, descr, testee}
+}
+
+func (s *Suite) CheckErrors(errors ...string) {
+ s.c.Check(s.ck.errors, check.DeepEquals, errors)
+ s.ck.errors = nil
+}
+
+func (s *Suite) CheckSummary(summary string) {
+ s.c.Check(s.summary, check.Equals, summary)
+ s.summary = ""
+}
+
+func (s *Suite) Test_NewQAChecker(c *check.C) {
+ ck := s.Init(c)
+
+ c.Check(ck.isRelevant("*_test.go", "Suite", "SetUpTest", EAll), check.Equals, true)
+ c.Check(ck.isRelevant("*_test.go", "Suite", "SetUpTest", EMissingTest), check.Equals, false)
+
+ c.Check(ck.isRelevant("*_test.go", "Suite", "TearDownTest", EAll), check.Equals, true)
+ c.Check(ck.isRelevant("*_test.go", "Suite", "TearDownTest", EMissingTest), check.Equals, false)
+}
+
+func (s *Suite) Test_QAChecker_Configure(c *check.C) {
+ ck := s.Init(c)
+
+ ck.Configure("*", "*", "*", ENone) // overwrite initialization from Suite.Init
+
+ c.Check(ck.isRelevant("", "", "", EAll), check.Equals, false)
+ c.Check(ck.isRelevant("", "", "", EMissingTestee), check.Equals, false)
+ c.Check(ck.isRelevant("", "", "", EMissingTest), check.Equals, false)
+
+ ck.Configure("*", "*", "*", EAll)
+
+ c.Check(ck.isRelevant("", "", "", EAll), check.Equals, true)
+ c.Check(ck.isRelevant("", "", "", EMissingTestee), check.Equals, true)
+ c.Check(ck.isRelevant("", "", "", EMissingTest), check.Equals, true)
+
+ ck.Configure("*", "*", "*", -EMissingTestee)
+
+ c.Check(ck.isRelevant("", "", "", EAll), check.Equals, true)
+ c.Check(ck.isRelevant("", "", "", EMissingTestee), check.Equals, false)
+ c.Check(ck.isRelevant("", "", "", EMissingTest), check.Equals, true)
+
+ ck.Configure("*", "*", "*", ENone, EMissingTest)
+
+ c.Check(ck.isRelevant("", "", "", EAll), check.Equals, true)
+ c.Check(ck.isRelevant("", "", "", EMissingTestee), check.Equals, false)
+ c.Check(ck.isRelevant("", "", "", EMissingTest), check.Equals, true)
+
+ ck.Configure("*", "*", "*", EAll, -EMissingTest)
+
+ c.Check(ck.isRelevant("", "", "", EAll), check.Equals, true)
+ c.Check(ck.isRelevant("", "", "", EMissingTestee), check.Equals, true)
+ c.Check(ck.isRelevant("", "", "", EMissingTest), check.Equals, false)
+}
+
+func (s *Suite) Test_QAChecker_Configure__ignore_single_function(c *check.C) {
+ ck := s.Init(c)
+
+ ck.Configure("*", "*", "*", EAll)
+
+ // The intention of this rule is that this particular function is ignored.
+ // Everything else from that file is still processed.
+ ck.Configure("*_test.go", "", "TestMain", ENone)
+
+ c.Check(ck.isRelevant("file_test.go", "", "", EAll), check.Equals, true)
+ c.Check(ck.isRelevant("file_test.go", "*", "*", EAll), check.Equals, true)
+ c.Check(ck.isRelevant("file_test.go", "*", "Other", EAll), check.Equals, true)
+ c.Check(ck.isRelevant("file_test.go", "", "TestMain", EAll), check.Equals, false)
+ c.Check(ck.isRelevant("file_test.go", "*", "TestMain", EAll), check.Equals, true)
+}
+
+func (s *Suite) Test_QAChecker_Check(c *check.C) {
+ ck := s.Init(c)
+
+ ck.Configure("*", "*", "", -EMissingTest)
+ ck.Configure("*", "Suite", "*", -EMissingTest)
+
+ ck.Check()
+
+ s.CheckErrors(
+ "Missing unit test \"Test_QAChecker_addCode\" for \"QAChecker.addCode\".",
+ "Missing unit test \"Test_QAChecker_relate\" for \"QAChecker.relate\".",
+ "Missing unit test \"Test_QAChecker_isRelevant\" for \"QAChecker.isRelevant\".")
+ s.CheckSummary("3 errors.")
+}
+
+func (s *Suite) Test_QAChecker_load__filtered_nothing(c *check.C) {
+ ck := s.Init(c)
+
+ ck.Configure("*", "*", "*", ENone)
+
+ ck.load(".")
+
+ s.CheckTestees(
+ nil...)
+ s.CheckTests(
+ nil...)
+}
+
+func (s *Suite) Test_QAChecker_load__filtered_only_Value(c *check.C) {
+ ck := s.Init(c)
+
+ ck.Configure("*", "*", "*", ENone)
+ ck.Configure("*", "Value", "*", EAll)
+
+ ck.load(".")
+
+ s.CheckTestees(
+ s.newTestee("qa_test.go", "Value", "", 0),
+ s.newTestee("qa_test.go", "Value", "Method", 1))
+ s.CheckTests(
+ nil...)
+}
+
+func (s *Suite) Test_QAChecker_load__panic(c *check.C) {
+ ck := s.Init(c)
+
+ c.Check(
+ func() { ck.load("does-not-exist") },
+ check.PanicMatches,
+ `^open does-not-exist\b.*`)
+}
+
+func (s *Suite) Test_QAChecker_loadDecl(c *check.C) {
+ ck := s.Init(c)
+
+ load := func(filename, decl string) {
+ fset := token.NewFileSet()
+ file, err := parser.ParseFile(fset, filename, "package p; "+decl, 0)
+ c.Assert(err, check.IsNil)
+ ck.loadDecl(file.Decls[0], filename)
+ }
+
+ load("file_test.go", "type TypeName int")
+
+ s.CheckTestees(
+ s.newTestee("file_test.go", "TypeName", "", 0))
+
+ // The freestanding TestMain function is ignored by a hardcoded rule,
+ // independently of the configuration.
+ load("file_test.go", "func TestMain() {}")
+
+ s.CheckTestees(
+ nil...)
+ s.CheckTests(
+ nil...)
+
+ // The TestMain method on a type is relevant, but violates the naming rule.
+ // Therefore it is ignored.
+ load("file_test.go", "func (Suite) TestMain(*check.C) {}")
+
+ s.CheckTestees(
+ nil...)
+ s.CheckTests(
+ nil...)
+ s.CheckErrors(
+ "Test \"Suite.TestMain\" must start with \"Test_\".")
+
+ // The above error can be disabled, and then the method is handled
+ // like any other test method.
+ ck.Configure("*", "Suite", "*", -EName)
+ load("file_test.go", "func (Suite) TestMain(*check.C) {}")
+
+ s.CheckTestees(
+ nil...)
+ s.CheckTests(
+ s.newTest("file_test.go", "Suite", "TestMain", 1, "Main", "", nil))
+
+ // There is no special handling for TestMain method with a description.
+ load("file_test.go", "func (Suite) TestMain__descr(*check.C) {}")
+
+ s.CheckTestees(
+ nil...)
+ s.CheckTests(
+ s.newTest("file_test.go", "Suite", "TestMain__descr", 2, "Main", "descr", nil))
+ s.CheckErrors(
+ nil...)
+}
+
+func (s *Suite) Test_QAChecker_parseFuncDecl(c *check.C) {
+ _ = s.Init(c)
+
+ testFunc := func(filename, decl, typeName, funcName string) {
+ fset := token.NewFileSet()
+ file, err := parser.ParseFile(fset, filename, "package p; "+decl, 0)
+ c.Assert(err, check.IsNil)
+ fn := file.Decls[0].(*ast.FuncDecl)
+ actual := (*QAChecker).parseFuncDecl(nil, filename, fn)
+ c.Check(actual, check.Equals, code{filename, typeName, funcName, 0})
+ }
+
+ testFunc("f_test.go", "func (t Type) Test() {}",
+ "Type", "Test")
+ testFunc("f_test.go", "func (t Type) Test_Type_Method() {}",
+ "Type", "Test_Type_Method")
+ testFunc("f_test.go", "func Test() {}",
+ "", "Test")
+ testFunc("f_test.go", "func Test_Type_Method() {}",
+ "", "Test_Type_Method")
+}
+
+func (s *Suite) Test_QAChecker_isTest(c *check.C) {
+ _ = s.Init(c)
+
+ test := func(filename, typeName, funcName string, isTest bool) {
+ code := code{filename, typeName, funcName, 0}
+ c.Check((*QAChecker).isTest(nil, code, nil), check.Equals, isTest)
+ }
+
+ testFunc := func(filename, decl string, isTest bool) {
+ fset := token.NewFileSet()
+ file, err := parser.ParseFile(fset, filename, "package p; "+decl, 0)
+ c.Assert(err, check.IsNil)
+ fn := file.Decls[0].(*ast.FuncDecl)
+ code := (*QAChecker).parseFuncDecl(nil, filename, fn)
+ c.Check((*QAChecker).isTest(nil, code, fn), check.Equals, isTest)
+ }
+
+ test("f.go", "Type", "", false)
+ test("f.go", "", "Func", false)
+ test("f.go", "Type", "Method", false)
+ test("f.go", "Type", "Test", false)
+ test("f.go", "Type", "Test_Type_Method", false)
+ test("f.go", "", "Test_Type_Method", false)
+
+ testFunc("f_test.go", "func (t Type) Test(c *check.C) {}", true)
+ testFunc("f_test.go", "func (t Type) Test_Type_Method(c *check.C) {}", true)
+ testFunc("f_test.go", "func Test_Type_Method(c *check.C) {}", true)
+ testFunc("f_test.go", "func Test_Type_Method(c *C) {}", true)
+ testFunc("f_test.go", "func Test_Type_Method(c C) {}", true)
+ testFunc("f_test.go", "func Test_Type_Method(t *testing.T) {}", true)
+ testFunc("f_test.go", "func Test_Type_Method(X) {}", false)
+ testFunc("f_test.go", "func Test_Type_Method(int) {}", false)
+}
+
+func (s *Suite) Test_QAChecker_addTestee(c *check.C) {
+ ck := s.Init(c)
+
+ ck.addTestee(code{"filename.go", "Type", "Method", 0})
+
+ s.CheckTestees(
+ s.newTestee("filename.go", "Type", "Method", 0))
+}
+
+func (s *Suite) Test_QAChecker_addTest(c *check.C) {
+ ck := s.Init(c)
+
+ ck.addTest(code{"filename.go", "Type", "Method", 0})
+
+ s.CheckTests(
+ nil...)
+ s.CheckErrors(
+ "Test \"Type.Method\" must start with \"Test_\".")
+}
+
+func (s *Suite) Test_QAChecker_addTest__empty_description(c *check.C) {
+ ck := s.Init(c)
+
+ ck.addTest(code{"f_test.go", "Suite", "Test_Method__", 0})
+
+ s.CheckErrors(
+ "Test \"Suite.Test_Method__\" must have a nonempty description.")
+
+ // The test is not registered and thus cannot complain about its missing
+ // testee.
+ ck.checkTests()
+
+ s.CheckErrors(
+ nil...)
+}
+
+func (s *Suite) Test_QAChecker_addTest__suppressed_empty_description(c *check.C) {
+ ck := s.Init(c)
+
+ ck.Configure("*", "*", "*", -EName)
+ ck.addTest(code{"f_test.go", "Suite", "Test_Method__", 0})
+
+ s.CheckErrors(
+ nil...)
+
+ // Since there was no error above, the test is added normally
+ // and can complain about its missing testee.
+ ck.checkTests()
+
+ s.CheckErrors(
+ "Missing testee \"Method\" for test \"Suite.Test_Method__\".")
+}
+
+func (s *Suite) Test_QAChecker_nextOrder(c *check.C) {
+ ck := s.Init(c)
+
+ c.Check(ck.nextOrder(), check.Equals, 0)
+ c.Check(ck.nextOrder(), check.Equals, 1)
+ c.Check(ck.nextOrder(), check.Equals, 2)
+}
+
+func (s *Suite) Test_QAChecker_checkTests(c *check.C) {
+ ck := s.Init(c)
+
+ ck.tests = append(ck.tests,
+ s.newTest("wrong_test.go", "", "Test_Func", 0, "Func", "",
+ s.newTestee("source.go", "", "Func", 1)))
+
+ ck.checkTests()
+
+ s.CheckErrors(
+ "Test \"Test_Func\" for \"Func\" " +
+ "must be in source_test.go instead of wrong_test.go.")
+}
+
+func (s *Suite) Test_QAChecker_checkTestFile__global(c *check.C) {
+ ck := s.Init(c)
+
+ ck.checkTestFile(&test{
+ code{"demo_test.go", "Suite", "Test__Global", 0},
+ "",
+ "",
+ &testee{code{"other.go", "", "Global", 0}}})
+
+ s.CheckErrors(
+ "Test \"Suite.Test__Global\" for \"Global\" " +
+ "must be in other_test.go instead of demo_test.go.")
+}
+
+func (s *Suite) Test_QAChecker_checkTestTestee__global(c *check.C) {
+ ck := s.Init(c)
+
+ ck.checkTestTestee(&test{
+ code{"demo_test.go", "Suite", "Test__Global", 0},
+ "",
+ "",
+ nil})
+
+ s.CheckErrors(
+ nil...)
+}
+
+func (s *Suite) Test_QAChecker_checkTestTestee__no_testee(c *check.C) {
+ ck := s.Init(c)
+
+ ck.checkTestTestee(&test{
+ code{"demo_test.go", "Suite", "Test_Missing", 0},
+ "Missing",
+ "",
+ nil})
+
+ s.CheckErrors(
+ "Missing testee \"Missing\" for test \"Suite.Test_Missing\".")
+}
+
+func (s *Suite) Test_QAChecker_checkTestTestee__testee_exists(c *check.C) {
+ ck := s.Init(c)
+
+ ck.checkTestTestee(&test{
+ code{"demo_test.go", "Suite", "Test_Missing", 0},
+ "Missing",
+ "",
+ &testee{}})
+
+ s.CheckErrors(
+ nil...)
+}
+
+func (s *Suite) Test_QAChecker_checkTestDescr__camel_case(c *check.C) {
+ ck := s.Init(c)
+
+ ck.checkTestDescr(&test{
+ code{"demo_test.go", "Suite", "Test_Missing__CamelCase", 0},
+ "Missing",
+ "CamelCase",
+ &testee{}})
+
+ s.CheckErrors(
+ "Suite.Test_Missing__CamelCase: Test description \"CamelCase\" " +
+ "must not use CamelCase in the first word.")
+}
+
+func (s *Suite) Test_QAChecker_checkTestees(c *check.C) {
+ ck := s.Init(c)
+
+ ck.testees = []*testee{s.newTestee("s.go", "", "Func", 0)}
+ ck.tests = nil // force an error
+
+ ck.checkTestees()
+
+ s.CheckErrors(
+ "Missing unit test \"Test_Func\" for \"Func\".")
+}
+
+func (s *Suite) Test_QAChecker_checkTesteesTest(c *check.C) {
+ ck := s.Init(c)
+
+ ck.addTestee(code{"demo.go", "Type", "", 0})
+ ck.addTestee(code{"demo.go", "", "Func", 0})
+ ck.addTestee(code{"demo.go", "Type", "Method", 0})
+ ck.addTestee(code{"demo.go", "OkType", "", 0})
+ ck.addTestee(code{"demo.go", "", "OkFunc", 0})
+ ck.addTestee(code{"demo.go", "OkType", "Method", 0})
+ ck.addTest(code{"demo_test.go", "", "Test_OkType", 0})
+ ck.addTest(code{"demo_test.go", "", "Test_OkFunc", 0})
+ ck.addTest(code{"demo_test.go", "", "Test_OkType_Method", 0})
+ ck.relate()
+
+ ck.checkTesteesTest()
+
+ s.CheckErrors(
+ "Missing unit test \"Test_Type\" for \"Type\".",
+ "Missing unit test \"Test_Func\" for \"Func\".",
+ "Missing unit test \"Test_Type_Method\" for \"Type.Method\".")
+}
+
+func (s *Suite) Test_QAChecker_checkTesteesMethodsSameFile(c *check.C) {
+ ck := s.Init(c)
+
+ ck.addTestee(code{"main.go", "Main", "", 0})
+ ck.addTestee(code{"main.go", "Main", "MethodOk", 1})
+ ck.addTestee(code{"other.go", "Main", "MethodWrong", 2})
+ ck.addTestee(code{"main_test.go", "Main", "MethodOkTest", 3})
+ ck.addTestee(code{"other_test.go", "Main", "MethodWrongTest", 4})
+ ck.addTestee(code{"main_test.go", "T", "", 100})
+ ck.addTestee(code{"main_test.go", "T", "MethodOk", 101})
+ ck.addTestee(code{"other_test.go", "T", "MethodWrong", 102})
+
+ ck.checkTesteesMethodsSameFile()
+
+ s.CheckErrors(
+ "Method Main.MethodWrong must be in main.go, like its type.",
+ "Method Main.MethodWrongTest must be in main_test.go, "+
+ "corresponding to its type.",
+ "Method T.MethodWrong must be in main_test.go, like its type.")
+}
+
+func (s *Suite) Test_QAChecker_errorsMask(c *check.C) {
+ ck := s.Init(c)
+
+ c.Check(ck.errorsMask(0, EAll), check.Equals, ^uint64(0))
+ c.Check(ck.errorsMask(12345, ENone), check.Equals, uint64(0))
+ c.Check(ck.errorsMask(12345, ENone, EMissingTest), check.Equals, uint64(8))
+ c.Check(ck.errorsMask(2, EMissingTest), check.Equals, uint64(10))
+}
+
+func (s *Suite) Test_QAChecker_checkOrder(c *check.C) {
+ ck := s.Init(c)
+
+ ck.addTestee(code{"f.go", "T", "", 10})
+ ck.addTestee(code{"f.go", "T", "M1", 11})
+ ck.addTestee(code{"f.go", "T", "M2", 12})
+ ck.addTestee(code{"f.go", "T", "M3", 13})
+ ck.addTest(code{"a_test.go", "S", "Test_A", 98}) // different file, is skipped
+ ck.addTest(code{"f_test.go", "S", "Test_Missing", 99}) // missing testee, is skipped
+ ck.addTest(code{"f_test.go", "S", "Test_T_M1", 100}) // maxTestee = 11
+ ck.addTest(code{"f_test.go", "S", "Test_T_M2", 101}) // maxTestee = 12
+ ck.addTest(code{"f_test.go", "S", "Test_T", 102}) // testee 10 < maxTestee 12: insert before first [.testee > testee 10] == T_M1
+ ck.addTest(code{"f_test.go", "S", "Test_T_M3", 103}) // maxTestee = 13
+ ck.addTest(code{"f_test.go", "S", "Test_T__1", 104}) // testee < maxTestee: insert before first [testee > 10]
+ ck.addTest(code{"f_test.go", "S", "Test_T__2", 105}) // testee < maxTestee: insert before first [testee > 10]
+ ck.addTest(code{"f_test.go", "S", "Test_T_M2__1", 106}) // testee < maxTestee: insert before first [testee > 12] == T_M3
+ ck.relate()
+
+ ck.checkOrder()
+
+ s.CheckErrors(
+ "Test \"S.Test_T\" must be ordered before \"S.Test_T_M1\".",
+ "Test \"S.Test_T__1\" must be ordered before \"S.Test_T_M1\".",
+ "Test \"S.Test_T__2\" must be ordered before \"S.Test_T_M1\".",
+ "Test \"S.Test_T_M2__1\" must be ordered before \"S.Test_T_M3\".")
+}
+
+func (s *Suite) Test_QAChecker_addError(c *check.C) {
+ ck := s.Init(c)
+
+ ck.Configure("ignored*", "*", "*", -EName)
+ ok1 := ck.addError(EName, code{"ignored.go", "", "Func", 0}, "E1")
+ ok2 := ck.addError(EName, code{"reported.go", "", "Func", 0}, "E2")
+
+ c.Check(ok1, check.Equals, false)
+ c.Check(ok2, check.Equals, true)
+ s.CheckErrors(
+ "E2")
+}
+
+func (s *Suite) Test_QAChecker_print__empty(c *check.C) {
+ var out bytes.Buffer
+ ck := s.Init(c)
+ ck.out = &out
+
+ ck.print()
+
+ c.Check(out.String(), check.Equals, "")
+}
+
+func (s *Suite) Test_QAChecker_print__1_error(c *check.C) {
+ var out bytes.Buffer
+ ck := s.Init(c)
+ ck.out = &out
+ ck.addError(EName, code{}, "1")
+
+ ck.print()
+
+ c.Check(out.String(), check.Equals, "1\n")
+ s.CheckErrors("1")
+ s.CheckSummary("1 error.")
+}
+
+func (s *Suite) Test_QAChecker_print__2_errors(c *check.C) {
+ var out bytes.Buffer
+ ck := s.Init(c)
+ ck.out = &out
+ ck.addError(EName, code{}, "1")
+ ck.addError(EName, code{}, "2")
+
+ ck.print()
+
+ c.Check(out.String(), check.Equals, "1\n2\n")
+ s.CheckErrors("1", "2")
+ s.CheckSummary("2 errors.")
+}
+
+func (s *Suite) Test_code_fullName(c *check.C) {
+ _ = s.Init(c)
+
+ test := func(typeName, funcName, fullName string) {
+ code := code{"filename", typeName, funcName, 0}
+ c.Check(code.fullName(), check.Equals, fullName)
+ }
+
+ test("Type", "", "Type")
+ test("", "Func", "Func")
+ test("Type", "Method", "Type.Method")
+}
+
+func (s *Suite) Test_code_isFunc(c *check.C) {
+ _ = s.Init(c)
+
+ test := func(typeName, funcName string, isFunc bool) {
+ code := code{"filename", typeName, funcName, 0}
+ c.Check(code.isFunc(), check.Equals, isFunc)
+ }
+
+ test("Type", "", false)
+ test("", "Func", true)
+ test("Type", "Method", false)
+}
+
+func (s *Suite) Test_code_isType(c *check.C) {
+ _ = s.Init(c)
+
+ test := func(typeName, funcName string, isType bool) {
+ code := code{"filename", typeName, funcName, 0}
+ c.Check(code.isType(), check.Equals, isType)
+ }
+
+ test("Type", "", true)
+ test("", "Func", false)
+ test("Type", "Method", false)
+}
+
+func (s *Suite) Test_code_isMethod(c *check.C) {
+ _ = s.Init(c)
+
+ test := func(typeName, funcName string, isMethod bool) {
+ code := code{"filename", typeName, funcName, 0}
+ c.Check(code.isMethod(), check.Equals, isMethod)
+ }
+
+ test("Type", "", false)
+ test("", "Func", false)
+ test("Type", "Method", true)
+}
+
+func (s *Suite) Test_code_isTestScope(c *check.C) {
+ _ = s.Init(c)
+
+ test := func(filename string, isTestScope bool) {
+ code := code{filename, "", "", 0}
+ c.Check(code.isTestScope(), check.Equals, isTestScope)
+ }
+
+ test("f.go", false)
+ test("test.go", false)
+ test("_test.go", true)
+ test("file_test.go", true)
+ test("file_linux_test.go", true)
+}
+
+func (s *Suite) Test_code_testFile(c *check.C) {
+ _ = s.Init(c)
+
+ test := func(filename string, testFile string) {
+ code := code{filename, "", "", 0}
+ c.Check(code.testFile(), check.Equals, testFile)
+ }
+
+ test("f.go", "f_test.go")
+ test("test.go", "test_test.go")
+ test("_test.go", "_test.go")
+ test("file_test.go", "file_test.go")
+ test("file_linux_test.go", "file_linux_test.go")
+}
+
+func (s *Suite) Test_isCamelCase(c *check.C) {
+ _ = s.Init(c)
+
+ c.Check(isCamelCase(""), check.Equals, false)
+ c.Check(isCamelCase("Word"), check.Equals, false)
+ c.Check(isCamelCase("Ada_Case"), check.Equals, false)
+ c.Check(isCamelCase("snake_case"), check.Equals, false)
+ c.Check(isCamelCase("CamelCase"), check.Equals, true)
+
+ // After the first underscore of the description, any CamelCase
+ // is ignored because there is no danger of confusing the method
+ // name with the description.
+ c.Check(isCamelCase("Word_CamelCase"), check.Equals, false)
+}
+
+func (s *Suite) Test_join(c *check.C) {
+ _ = s.Init(c)
+
+ c.Check(join("", " and ", ""), check.Equals, "")
+ c.Check(join("one", " and ", ""), check.Equals, "one")
+ c.Check(join("", " and ", "two"), check.Equals, "two")
+ c.Check(join("one", " and ", "two"), check.Equals, "one and two")
+}
+
+func (s *Suite) Test_matches(c *check.C) {
+ _ = s.Init(c)
+
+ c.Check(matches("*", "*"), check.Equals, true)
+ c.Check(matches("anything", "*"), check.Equals, true)
+ c.Check(matches("*", "anything"), check.Equals, false)
+ c.Check(func() { matches("any", "[") }, check.Panics, path.ErrBadPattern)
+}
+
+func (s *Suite) Test_sortedKeys(c *check.C) {
+ _ = s.Init(c)
+
+ m := make(map[string]uint8)
+ m["first"] = 1
+ m["second"] = 2
+ m["third"] = 3
+ m["fourth"] = 4
+
+ c.Check(
+ sortedKeys(m),
+ check.DeepEquals,
+ []string{"first", "fourth", "second", "third"})
+}
+
+func (s *Suite) Test_Value_Method(c *check.C) {
+ _ = s.Init(c)
+
+ // Just for code coverage of checkTestFile, to have a piece of code
+ // that lives in the same file as its test.
+}
+
+type Value struct{}
+
+// Method has no star on the receiver,
+// for code coverage of QAChecker.loadDecl.
+func (Value) Method() {}
Home |
Main Index |
Thread Index |
Old Index