Snap for 8383211 from f10932f763d058b0dcb3acfb795c869996fef47b to build-tools-release

Change-Id: I0969e0c20848f202469acc85c3681437b8b62fdc
diff --git a/METADATA b/METADATA
index 8316c34..caf2398 100644
--- a/METADATA
+++ b/METADATA
@@ -1,8 +1,5 @@
 name: "golang-x-tools"
-description:
-    "This subrepository holds the source for various packages and tools that "
-    "support the Go programming language"
-
+description: "This subrepository holds the source for various packages and tools that support the Go programming language"
 third_party {
   url {
     type: HOMEPAGE
@@ -12,7 +9,11 @@
     type: GIT
     value: "https://go.googlesource.com/tools/"
   }
-  version: "3e0d083b858b3fdb7d095b5a3deb184aa0a5d35e"
-  last_upgrade_date { year: 2021 month: 8 day: 27 }
+  version: "e693fb417253d14786976bd29a456961aa8b6343"
   license_type: NOTICE
+  last_upgrade_date {
+    year: 2022
+    month: 3
+    day: 29
+  }
 }
diff --git a/README.md b/README.md
index 789747b..71a945a 100644
--- a/README.md
+++ b/README.md
@@ -9,15 +9,14 @@
 distributions.
 
 Others, including the Go `guru` and the test coverage tool, can be fetched with
-`go get`.
+`go install`.
 
 Packages include a type-checker for Go and an implementation of the
 Static Single Assignment form (SSA) representation for Go programs.
 
 ## Download/Install
 
-The easiest way to install is to run `go get -u golang.org/x/tools/...`. You can
-also manually git clone the repository to `$GOPATH/src/golang.org/x/tools`.
+The easiest way to install is to run `go install golang.org/x/tools/...@latest`.
 
 ## JS/CSS Formatting
 
diff --git a/cmd/callgraph/main.go b/cmd/callgraph/main.go
index f74c278..f83be0e 100644
--- a/cmd/callgraph/main.go
+++ b/cmd/callgraph/main.go
@@ -166,7 +166,7 @@
 
 func doCallgraph(dir, gopath, algo, format string, tests bool, args []string) error {
 	if len(args) == 0 {
-		fmt.Fprintln(os.Stderr, Usage)
+		fmt.Fprint(os.Stderr, Usage)
 		return nil
 	}
 
diff --git a/cmd/callgraph/main_test.go b/cmd/callgraph/main_test.go
index f486def..7e838f7 100644
--- a/cmd/callgraph/main_test.go
+++ b/cmd/callgraph/main_test.go
@@ -34,6 +34,10 @@
 }
 
 func TestCallgraph(t *testing.T) {
+	if b := os.Getenv("GO_BUILDER_NAME"); b == "windows-arm64-10" {
+		t.Skipf("skipping due to suspected file corruption bug on %s builder (https://go.dev/issue/50706)", b)
+	}
+
 	testenv.NeedsTool(t, "go")
 
 	gopath, err := filepath.Abs("testdata")
diff --git a/cmd/cover/cover.go b/cmd/cover/cover.go
index e093364..42a7e37 100644
--- a/cmd/cover/cover.go
+++ b/cmd/cover/cover.go
@@ -42,8 +42,8 @@
 `
 
 func usage() {
-	fmt.Fprintln(os.Stderr, usageMessage)
-	fmt.Fprintln(os.Stderr, "Flags:")
+	fmt.Fprint(os.Stderr, usageMessage)
+	fmt.Fprintln(os.Stderr, "\nFlags:")
 	flag.PrintDefaults()
 	fmt.Fprintln(os.Stderr, "\n  Only one of -html, -func, or -mode may be set.")
 	os.Exit(2)
diff --git a/cmd/eg/eg.go b/cmd/eg/eg.go
index 6463ac4..1629b80 100644
--- a/cmd/eg/eg.go
+++ b/cmd/eg/eg.go
@@ -59,7 +59,8 @@
 	args := flag.Args()
 
 	if *helpFlag {
-		fmt.Fprint(os.Stderr, eg.Help)
+		help := eg.Help // hide %s from vet
+		fmt.Fprint(os.Stderr, help)
 		os.Exit(2)
 	}
 
diff --git a/cmd/file2fuzz/main.go b/cmd/file2fuzz/main.go
index f0c8939..350ed0a 100644
--- a/cmd/file2fuzz/main.go
+++ b/cmd/file2fuzz/main.go
@@ -9,7 +9,7 @@
 //
 //	file2fuzz [-o output] [input...]
 //
-// The defualt behavior is to read input from stdin and write the converted
+// The default behavior is to read input from stdin and write the converted
 // output to stdout. If any position arguments are provided stdin is ignored
 // and the arguments are assumed to be input files to convert.
 //
diff --git a/cmd/file2fuzz/main_test.go b/cmd/file2fuzz/main_test.go
index 55d824c..fe2c103 100644
--- a/cmd/file2fuzz/main_test.go
+++ b/cmd/file2fuzz/main_test.go
@@ -5,67 +5,41 @@
 package main
 
 import (
-	"fmt"
 	"io/ioutil"
 	"os"
 	"os/exec"
 	"path/filepath"
-	"runtime"
 	"strings"
 	"sync"
 	"testing"
 )
 
-// The setup for this test is mostly cribbed from x/exp/txtar.
+func TestMain(m *testing.M) {
+	if os.Getenv("GO_FILE2FUZZ_TEST_IS_FILE2FUZZ") != "" {
+		main()
+		os.Exit(0)
+	}
 
-var buildBin struct {
+	os.Exit(m.Run())
+}
+
+var f2f struct {
 	once sync.Once
-	name string
+	path string
 	err  error
 }
 
-func binPath(t *testing.T) string {
-	t.Helper()
-	if _, err := exec.LookPath("go"); err != nil {
-		t.Skipf("cannot build file2fuzz binary: %v", err)
-	}
-
-	buildBin.once.Do(func() {
-		exe, err := ioutil.TempFile("", "file2fuzz-*.exe")
-		if err != nil {
-			buildBin.err = err
-			return
-		}
-		exe.Close()
-		buildBin.name = exe.Name()
-
-		cmd := exec.Command("go", "build", "-o", buildBin.name, ".")
-		out, err := cmd.CombinedOutput()
-		if err != nil {
-			buildBin.err = fmt.Errorf("%s: %v\n%s", strings.Join(cmd.Args, " "), err, out)
-		}
-	})
-
-	if buildBin.err != nil {
-		if runtime.GOOS == "android" {
-			t.Skipf("skipping test after failing to build file2fuzz binary: go_android_exec may have failed to copy needed dependencies (see https://golang.org/issue/37088)")
-		}
-		t.Fatal(buildBin.err)
-	}
-	return buildBin.name
-}
-
-func TestMain(m *testing.M) {
-	os.Exit(m.Run())
-	if buildBin.name != "" {
-		os.Remove(buildBin.name)
-	}
-}
-
 func file2fuzz(t *testing.T, dir string, args []string, stdin string) (string, bool) {
-	t.Helper()
-	cmd := exec.Command(binPath(t), args...)
+	f2f.once.Do(func() {
+		f2f.path, f2f.err = os.Executable()
+	})
+	if f2f.err != nil {
+		t.Fatal(f2f.err)
+	}
+
+	cmd := exec.Command(f2f.path, args...)
 	cmd.Dir = dir
+	cmd.Env = append(os.Environ(), "PWD="+dir, "GO_FILE2FUZZ_TEST_IS_FILE2FUZZ=1")
 	if stdin != "" {
 		cmd.Stdin = strings.NewReader(stdin)
 	}
diff --git a/cmd/fiximports/main_test.go b/cmd/fiximports/main_test.go
index 9d2c94c..bbc4a2e 100644
--- a/cmd/fiximports/main_test.go
+++ b/cmd/fiximports/main_test.go
@@ -245,6 +245,9 @@
 
 // TestDryRun tests that the -n flag suppresses calls to writeFile.
 func TestDryRun(t *testing.T) {
+	if os.Getenv("GO_BUILDER_NAME") == "plan9-arm" {
+		t.Skipf("skipping test that times out on plan9-arm; see https://go.dev/issue/50775")
+	}
 	testenv.NeedsTool(t, "go")
 
 	*dryrun = true
diff --git a/cmd/getgo/.gitignore b/cmd/getgo/.gitignore
index d4984ab..47fe984 100644
--- a/cmd/getgo/.gitignore
+++ b/cmd/getgo/.gitignore
@@ -1,3 +1,2 @@
 build
-testgetgo
 getgo
diff --git a/cmd/getgo/main_test.go b/cmd/getgo/main_test.go
index 0c0e8b9..fc28c5d 100644
--- a/cmd/getgo/main_test.go
+++ b/cmd/getgo/main_test.go
@@ -13,50 +13,27 @@
 	"io/ioutil"
 	"os"
 	"os/exec"
-	"runtime"
 	"testing"
 )
 
-const (
-	testbin = "testgetgo"
-)
-
-var (
-	exeSuffix string // ".exe" on Windows
-)
-
-func init() {
-	if runtime.GOOS == "windows" {
-		exeSuffix = ".exe"
-	}
-}
-
-// TestMain creates a getgo command for testing purposes and
-// deletes it after the tests have been run.
 func TestMain(m *testing.M) {
+	if os.Getenv("GO_GETGO_TEST_IS_GETGO") != "" {
+		main()
+		os.Exit(0)
+	}
+
 	if os.Getenv("GOGET_INTEGRATION") == "" {
 		fmt.Fprintln(os.Stderr, "main_test: Skipping integration tests with GOGET_INTEGRATION unset")
 		return
 	}
 
-	args := []string{"build", "-tags", testbin, "-o", testbin + exeSuffix}
-	out, err := exec.Command("go", args...).CombinedOutput()
-	if err != nil {
-		fmt.Fprintf(os.Stderr, "building %s failed: %v\n%s", testbin, err, out)
-		os.Exit(2)
-	}
-
 	// Don't let these environment variables confuse the test.
 	os.Unsetenv("GOBIN")
 	os.Unsetenv("GOPATH")
 	os.Unsetenv("GIT_ALLOW_PROTOCOL")
 	os.Unsetenv("PATH")
 
-	r := m.Run()
-
-	os.Remove(testbin + exeSuffix)
-
-	os.Exit(r)
+	os.Exit(m.Run())
 }
 
 func createTmpHome(t *testing.T) string {
@@ -72,12 +49,18 @@
 // doRun runs the test getgo command, recording stdout and stderr and
 // returning exit status.
 func doRun(t *testing.T, args ...string) error {
+	exe, err := os.Executable()
+	if err != nil {
+		t.Fatal(err)
+	}
+	t.Helper()
+
+	t.Logf("running getgo %v", args)
 	var stdout, stderr bytes.Buffer
-	t.Logf("running %s %v", testbin, args)
-	cmd := exec.Command("./"+testbin+exeSuffix, args...)
+	cmd := exec.Command(exe, args...)
 	cmd.Stdout = &stdout
 	cmd.Stderr = &stderr
-	cmd.Env = os.Environ()
+	cmd.Env = append(os.Environ(), "GO_GETGO_TEST_IS_GETGO=1")
 	status := cmd.Run()
 	if stdout.Len() > 0 {
 		t.Log("standard output:")
diff --git a/cmd/godoc/doc.go b/cmd/godoc/doc.go
index 6dda278..279b2b1 100644
--- a/cmd/godoc/doc.go
+++ b/cmd/godoc/doc.go
@@ -52,13 +52,6 @@
 		Go root directory
 	-http=addr
 		HTTP service address (e.g., '127.0.0.1:6060' or just ':6060')
-	-analysis=type,pointer
-		comma-separated list of analyses to perform
-		"type": display identifier resolution, type info, method sets,
-			'implements', and static callees
-		"pointer": display channel peers, callers and dynamic callees
-			(significantly slower)
-		See https://golang.org/lib/godoc/analysis/help.html for details.
 	-templates=""
 		directory containing alternate template files; if set,
 		the directory may provide alternative template files
@@ -115,5 +108,7 @@
 See "Godoc: documenting Go code" for how to write good comments for godoc:
 https://golang.org/doc/articles/godoc_documenting_go_code.html
 
+Deprecated: godoc cannot select what version of a package is displayed.
+Instead, use golang.org/x/pkgsite/cmd/pkgsite.
 */
 package main // import "golang.org/x/tools/cmd/godoc"
diff --git a/cmd/godoc/godoc_test.go b/cmd/godoc/godoc_test.go
index ac6bacd..76568c3 100644
--- a/cmd/godoc/godoc_test.go
+++ b/cmd/godoc/godoc_test.go
@@ -5,11 +5,9 @@
 package main_test
 
 import (
-	"bufio"
 	"bytes"
 	"fmt"
 	"go/build"
-	"io"
 	"io/ioutil"
 	"net"
 	"net/http"
@@ -479,135 +477,3 @@
 		t.Errorf("stderr contains 'go mod download', is that intentional?\nstderr=%q", stderr.String())
 	}
 }
-
-// Basic integration test for godoc -analysis=type (via HTTP interface).
-func TestTypeAnalysis(t *testing.T) {
-	bin, cleanup := buildGodoc(t)
-	defer cleanup()
-	testTypeAnalysis(t, packagestest.GOPATH, bin)
-	// TODO(golang.org/issue/34473): Add support for type, pointer
-	// analysis in module mode, then enable its test coverage here.
-}
-func testTypeAnalysis(t *testing.T, x packagestest.Exporter, bin string) {
-	if runtime.GOOS == "plan9" {
-		t.Skip("skipping test on plan9 (issue #11974)") // see comment re: Plan 9 below
-	}
-
-	// Write a fake GOROOT/GOPATH.
-	// TODO(golang.org/issue/34473): This test uses import paths without a dot in first
-	// path element. This is not viable in module mode; import paths will need to change.
-	e := packagestest.Export(t, x, []packagestest.Module{
-		{
-			Name: "app",
-			Files: map[string]interface{}{
-				"main.go": `
-package main
-import "lib"
-func main() { print(lib.V) }
-`,
-			},
-		},
-		{
-			Name: "lib",
-			Files: map[string]interface{}{
-				"lib.go": `
-package lib
-type T struct{}
-const C = 3
-var V T
-func (T) F() int { return C }
-`,
-			},
-		},
-	})
-	goroot := filepath.Join(e.Temp(), "goroot")
-	if err := os.Mkdir(goroot, 0755); err != nil {
-		t.Fatalf("os.Mkdir(%q) failed: %v", goroot, err)
-	}
-	defer e.Cleanup()
-
-	// Start the server.
-	addr := serverAddress(t)
-	cmd := exec.Command(bin, fmt.Sprintf("-http=%s", addr), "-analysis=type")
-	cmd.Dir = e.Config.Dir
-	// Point to an empty GOROOT directory to speed things up
-	// by not doing type analysis for the entire real GOROOT.
-	// TODO(golang.org/issue/34473): This test optimization may not be viable in module mode.
-	cmd.Env = append(e.Config.Env, fmt.Sprintf("GOROOT=%s", goroot))
-	cmd.Stdout = os.Stderr
-	stderr, err := cmd.StderrPipe()
-	if err != nil {
-		t.Fatal(err)
-	}
-	cmd.Args[0] = "godoc"
-	if err := cmd.Start(); err != nil {
-		t.Fatalf("failed to start godoc: %s", err)
-	}
-	defer killAndWait(cmd)
-	waitForServerReady(t, cmd, addr)
-
-	// Wait for type analysis to complete.
-	reader := bufio.NewReader(stderr)
-	for {
-		s, err := reader.ReadString('\n') // on Plan 9 this fails
-		if err != nil {
-			t.Fatal(err)
-		}
-		fmt.Fprint(os.Stderr, s)
-		if strings.Contains(s, "Type analysis complete.") {
-			break
-		}
-	}
-	go io.Copy(os.Stderr, reader)
-
-	t0 := time.Now()
-
-	// Make an HTTP request and check for a regular expression match.
-	// The patterns are very crude checks that basic type information
-	// has been annotated onto the source view.
-tryagain:
-	for _, test := range []struct{ url, pattern string }{
-		{"/src/lib/lib.go", "L2.*package .*Package docs for lib.*/lib"},
-		{"/src/lib/lib.go", "L3.*type .*type info for T.*struct"},
-		{"/src/lib/lib.go", "L5.*var V .*type T struct"},
-		{"/src/lib/lib.go", "L6.*func .*type T struct.*T.*return .*const C untyped int.*C"},
-
-		{"/src/app/main.go", "L2.*package .*Package docs for app"},
-		{"/src/app/main.go", "L3.*import .*Package docs for lib.*lib"},
-		{"/src/app/main.go", "L4.*func main.*package lib.*lib.*var lib.V lib.T.*V"},
-	} {
-		url := fmt.Sprintf("http://%s%s", addr, test.url)
-		resp, err := http.Get(url)
-		if err != nil {
-			t.Errorf("GET %s failed: %s", url, err)
-			continue
-		}
-		body, err := ioutil.ReadAll(resp.Body)
-		resp.Body.Close()
-		if err != nil {
-			t.Errorf("GET %s: failed to read body: %s (response: %v)", url, err, resp)
-			continue
-		}
-
-		if !bytes.Contains(body, []byte("Static analysis features")) {
-			// Type analysis results usually become available within
-			// ~4ms after godoc startup (for this input on my machine).
-			if elapsed := time.Since(t0); elapsed > 500*time.Millisecond {
-				t.Fatalf("type analysis results still unavailable after %s", elapsed)
-			}
-			time.Sleep(10 * time.Millisecond)
-			goto tryagain
-		}
-
-		match, err := regexp.Match(test.pattern, body)
-		if err != nil {
-			t.Errorf("regexp.Match(%q) failed: %s", test.pattern, err)
-			continue
-		}
-		if !match {
-			// This is a really ugly failure message.
-			t.Errorf("GET %s: body doesn't match %q, got:\n%s",
-				url, test.pattern, string(body))
-		}
-	}
-}
diff --git a/cmd/godoc/main.go b/cmd/godoc/main.go
index 8780f8b..352bb4b 100644
--- a/cmd/godoc/main.go
+++ b/cmd/godoc/main.go
@@ -25,7 +25,6 @@
 	"flag"
 	"fmt"
 	"go/build"
-	exec "golang.org/x/sys/execabs"
 	"io"
 	"log"
 	"net/http"
@@ -38,8 +37,9 @@
 	"runtime"
 	"strings"
 
+	exec "golang.org/x/sys/execabs"
+
 	"golang.org/x/tools/godoc"
-	"golang.org/x/tools/godoc/analysis"
 	"golang.org/x/tools/godoc/static"
 	"golang.org/x/tools/godoc/vfs"
 	"golang.org/x/tools/godoc/vfs/gatefs"
@@ -59,8 +59,6 @@
 	// file-based index
 	writeIndex = flag.Bool("write_index", false, "write index to a file; the file name must be specified with -index_files")
 
-	analysisFlag = flag.String("analysis", "", `comma-separated list of analyses to perform when in GOPATH mode (supported: type, pointer). See https://golang.org/lib/godoc/analysis/help.html`)
-
 	// network
 	httpAddr = flag.String("http", defaultAddr, "HTTP service address")
 
@@ -208,28 +206,22 @@
 	if goModFile != "" {
 		fmt.Printf("using module mode; GOMOD=%s\n", goModFile)
 
-		if *analysisFlag != "" {
-			fmt.Fprintln(os.Stderr, "The -analysis flag is supported only in GOPATH mode at this time.")
-			fmt.Fprintln(os.Stderr, "See https://golang.org/issue/34473.")
-			usage()
-		}
-
 		// Detect whether to use vendor mode or not.
-		mainMod, vendorEnabled, err := gocommand.VendorEnabled(context.Background(), gocommand.Invocation{}, &gocommand.Runner{})
+		vendorEnabled, mainModVendor, err := gocommand.VendorEnabled(context.Background(), gocommand.Invocation{}, &gocommand.Runner{})
 		if err != nil {
 			fmt.Fprintf(os.Stderr, "failed to determine if vendoring is enabled: %v", err)
 			os.Exit(1)
 		}
 		if vendorEnabled {
 			// Bind the root directory of the main module.
-			fs.Bind(path.Join("/src", mainMod.Path), gatefs.New(vfs.OS(mainMod.Dir), fsGate), "/", vfs.BindAfter)
+			fs.Bind(path.Join("/src", mainModVendor.Path), gatefs.New(vfs.OS(mainModVendor.Dir), fsGate), "/", vfs.BindAfter)
 
 			// Bind the vendor directory.
 			//
 			// Note that in module mode, vendor directories in locations
 			// other than the main module's root directory are ignored.
 			// See https://golang.org/ref/mod#vendoring.
-			vendorDir := filepath.Join(mainMod.Dir, "vendor")
+			vendorDir := filepath.Join(mainModVendor.Dir, "vendor")
 			fs.Bind("/src", gatefs.New(vfs.OS(vendorDir), fsGate), "/", vfs.BindAfter)
 
 		} else {
@@ -266,20 +258,6 @@
 		}
 	}
 
-	var typeAnalysis, pointerAnalysis bool
-	if *analysisFlag != "" {
-		for _, a := range strings.Split(*analysisFlag, ",") {
-			switch a {
-			case "type":
-				typeAnalysis = true
-			case "pointer":
-				pointerAnalysis = true
-			default:
-				log.Fatalf("unknown analysis: %s", a)
-			}
-		}
-	}
-
 	var corpus *godoc.Corpus
 	if goModFile != "" {
 		corpus = godoc.NewCorpus(moduleFS{fs})
@@ -376,11 +354,6 @@
 		go corpus.RunIndexer()
 	}
 
-	// Start type/pointer analysis.
-	if typeAnalysis || pointerAnalysis {
-		go analysis.Run(pointerAnalysis, &corpus.Analysis)
-	}
-
 	// Start http server.
 	if *verbose {
 		log.Println("starting HTTP server")
diff --git a/cmd/goimports/doc.go b/cmd/goimports/doc.go
index f344d80..5a5b900 100644
--- a/cmd/goimports/doc.go
+++ b/cmd/goimports/doc.go
@@ -7,7 +7,7 @@
 Command goimports updates your Go import lines,
 adding missing ones and removing unreferenced ones.
 
-     $ go get golang.org/x/tools/cmd/goimports
+     $ go install golang.org/x/tools/cmd/goimports@latest
 
 In addition to fixing imports, goimports also formats
 your code in the same style as gofmt so it can be used
diff --git a/cmd/gomvpkg/main.go b/cmd/gomvpkg/main.go
index 20f6111..5de1e44 100644
--- a/cmd/gomvpkg/main.go
+++ b/cmd/gomvpkg/main.go
@@ -83,7 +83,7 @@
 	}
 
 	if *helpFlag || *fromFlag == "" || *toFlag == "" {
-		fmt.Println(Usage)
+		fmt.Print(Usage)
 		return
 	}
 
diff --git a/cmd/gorename/gorename_test.go b/cmd/gorename/gorename_test.go
index 2928051..30b8796 100644
--- a/cmd/gorename/gorename_test.go
+++ b/cmd/gorename/gorename_test.go
@@ -331,8 +331,8 @@
 		bin += ".exe"
 	}
 	cmd := exec.Command("go", "build", "-o", bin)
-	if err := cmd.Run(); err != nil {
-		t.Fatalf("Building gorename: %v", err)
+	if out, err := cmd.CombinedOutput(); err != nil {
+		t.Fatalf("Building gorename: %v\n%s", err, out)
 	}
 	return tmp, bin, func() { os.RemoveAll(tmp) }
 }
diff --git a/cmd/gorename/main.go b/cmd/gorename/main.go
index 03e9958..e59abd7 100644
--- a/cmd/gorename/main.go
+++ b/cmd/gorename/main.go
@@ -46,7 +46,7 @@
 	}
 
 	if *helpFlag || (*offsetFlag == "" && *fromFlag == "" && *toFlag == "") {
-		fmt.Println(rename.Usage)
+		fmt.Print(rename.Usage)
 		return
 	}
 
diff --git a/cmd/gotype/gotype.go b/cmd/gotype/gotype.go
index dbb2626..22fe4aa 100644
--- a/cmd/gotype/gotype.go
+++ b/cmd/gotype/gotype.go
@@ -167,7 +167,8 @@
 `
 
 func usage() {
-	fmt.Fprintln(os.Stderr, usageString)
+	fmt.Fprint(os.Stderr, usageString)
+	fmt.Fprintln(os.Stderr)
 	flag.PrintDefaults()
 	os.Exit(2)
 }
diff --git a/cmd/goyacc/yacc.go b/cmd/goyacc/yacc.go
index 848717e..70d01f0 100644
--- a/cmd/goyacc/yacc.go
+++ b/cmd/goyacc/yacc.go
@@ -51,6 +51,7 @@
 	"fmt"
 	"go/format"
 	"io/ioutil"
+	"math"
 	"os"
 	"strconv"
 	"strings"
@@ -2157,7 +2158,7 @@
 	if !lflag {
 		fmt.Fprintf(ftable, "\n//line yacctab:1")
 	}
-	fmt.Fprintf(ftable, "\nvar %sExca = [...]int{\n", prefix)
+	var actions []int
 
 	if len(errors) > 0 {
 		stateTable = make([]Row, nstate)
@@ -2230,10 +2231,11 @@
 				}
 			}
 		}
-		wract(i)
+		actions = addActions(actions, i)
 	}
 
-	fmt.Fprintf(ftable, "}\n")
+	arrayOutColumns("Exca", actions, 2, false)
+	fmt.Fprintf(ftable, "\n")
 	ftable.WriteRune('\n')
 	fmt.Fprintf(ftable, "const %sPrivate = %v\n", prefix, PRIVATE)
 }
@@ -2278,7 +2280,7 @@
 // output state i
 // temp1 has the actions, lastred the default
 //
-func wract(i int) {
+func addActions(act []int, i int) []int {
 	var p, p1 int
 
 	// find the best choice for lastred
@@ -2351,18 +2353,19 @@
 				continue
 			}
 			if flag == 0 {
-				fmt.Fprintf(ftable, "\t-1, %v,\n", i)
+				act = append(act, -1, i)
 			}
 			flag++
-			fmt.Fprintf(ftable, "\t%v, %v,\n", p, p1)
+			act = append(act, p, p1)
 			zzexcp++
 		}
 	}
 	if flag != 0 {
 		defact[i] = -2
-		fmt.Fprintf(ftable, "\t-2, %v,\n", lastred)
+		act = append(act, -2, lastred)
 	}
 	optst[i] = os
+	return act
 }
 
 //
@@ -2855,7 +2858,7 @@
 		}
 	}
 	arout("Chk", temp1, nstate)
-	arout("Def", defact, nstate)
+	arrayOutColumns("Def", defact[:nstate], 10, false)
 
 	// put out token translation tables
 	// table 1 has 0-256
@@ -2903,8 +2906,7 @@
 
 	// table 3 has everything else
 	ftable.WriteRune('\n')
-	fmt.Fprintf(ftable, "var %sTok3 = [...]int{\n\t", prefix)
-	c = 0
+	var v []int
 	for i = 1; i <= ntokens; i++ {
 		j = tokset[i].value
 		if j >= 0 && j < 256 {
@@ -2914,19 +2916,11 @@
 			continue
 		}
 
-		if c%5 != 0 {
-			ftable.WriteRune(' ')
-		}
-		fmt.Fprintf(ftable, "%d, %d,", j, i)
-		c++
-		if c%5 == 0 {
-			fmt.Fprint(ftable, "\n\t")
-		}
+		v = append(v, j, i)
 	}
-	if c%5 != 0 {
-		ftable.WriteRune(' ')
-	}
-	fmt.Fprintf(ftable, "%d,\n}\n", 0)
+	v = append(v, 0)
+	arout("Tok3", v, len(v))
+	fmt.Fprintf(ftable, "\n")
 
 	// Custom error messages.
 	fmt.Fprintf(ftable, "\n")
@@ -3013,21 +3007,65 @@
 	}
 }
 
-func arout(s string, v []int, n int) {
+func minMax(v []int) (min, max int) {
+	if len(v) == 0 {
+		return
+	}
+	min = v[0]
+	max = v[0]
+	for _, i := range v {
+		if i < min {
+			min = i
+		}
+		if i > max {
+			max = i
+		}
+	}
+	return
+}
+
+// return the smaller integral base type to store the values in v
+func minType(v []int, allowUnsigned bool) (typ string) {
+	typ = "int"
+	typeLen := 8
+	min, max := minMax(v)
+	checkType := func(name string, size, minType, maxType int) {
+		if min >= minType && max <= maxType && typeLen > size {
+			typ = name
+			typeLen = size
+		}
+	}
+	checkType("int32", 4, math.MinInt32, math.MaxInt32)
+	checkType("int16", 2, math.MinInt16, math.MaxInt16)
+	checkType("int8", 1, math.MinInt8, math.MaxInt8)
+	if allowUnsigned {
+		// Do not check for uint32, not worth and won't compile on 32 bit systems
+		checkType("uint16", 2, 0, math.MaxUint16)
+		checkType("uint8", 1, 0, math.MaxUint8)
+	}
+	return
+}
+
+func arrayOutColumns(s string, v []int, columns int, allowUnsigned bool) {
 	s = prefix + s
 	ftable.WriteRune('\n')
-	fmt.Fprintf(ftable, "var %v = [...]int{", s)
-	for i := 0; i < n; i++ {
-		if i%10 == 0 {
+	minType := minType(v, allowUnsigned)
+	fmt.Fprintf(ftable, "var %v = [...]%s{", s, minType)
+	for i, val := range v {
+		if i%columns == 0 {
 			fmt.Fprintf(ftable, "\n\t")
 		} else {
 			ftable.WriteRune(' ')
 		}
-		fmt.Fprintf(ftable, "%d,", v[i])
+		fmt.Fprintf(ftable, "%d,", val)
 	}
 	fmt.Fprintf(ftable, "\n}\n")
 }
 
+func arout(s string, v []int, n int) {
+	arrayOutColumns(s, v[:n], 10, true)
+}
+
 //
 // output the summary on y.output
 //
@@ -3332,9 +3370,9 @@
 	expected := make([]int, 0, 4)
 
 	// Look for shiftable tokens.
-	base := $$Pact[state]
+	base := int($$Pact[state])
 	for tok := TOKSTART; tok-1 < len($$Toknames); tok++ {
-		if n := base + tok; n >= 0 && n < $$Last && $$Chk[$$Act[n]] == tok {
+		if n := base + tok; n >= 0 && n < $$Last && int($$Chk[int($$Act[n])]) == tok {
 			if len(expected) == cap(expected) {
 				return res
 			}
@@ -3344,13 +3382,13 @@
 
 	if $$Def[state] == -2 {
 		i := 0
-		for $$Exca[i] != -1 || $$Exca[i+1] != state {
+		for $$Exca[i] != -1 || int($$Exca[i+1]) != state {
 			i += 2
 		}
 
 		// Look for tokens that we accept or reduce.
 		for i += 2; $$Exca[i] >= 0; i += 2 {
-			tok := $$Exca[i]
+			tok := int($$Exca[i])
 			if tok < TOKSTART || $$Exca[i+1] == 0 {
 				continue
 			}
@@ -3381,30 +3419,30 @@
 	token = 0
 	char = lex.Lex(lval)
 	if char <= 0 {
-		token = $$Tok1[0]
+		token = int($$Tok1[0])
 		goto out
 	}
 	if char < len($$Tok1) {
-		token = $$Tok1[char]
+		token = int($$Tok1[char])
 		goto out
 	}
 	if char >= $$Private {
 		if char < $$Private+len($$Tok2) {
-			token = $$Tok2[char-$$Private]
+			token = int($$Tok2[char-$$Private])
 			goto out
 		}
 	}
 	for i := 0; i < len($$Tok3); i += 2 {
-		token = $$Tok3[i+0]
+		token = int($$Tok3[i+0])
 		if token == char {
-			token = $$Tok3[i+1]
+			token = int($$Tok3[i+1])
 			goto out
 		}
 	}
 
 out:
 	if token == 0 {
-		token = $$Tok2[1] /* unknown char */
+		token = int($$Tok2[1]) /* unknown char */
 	}
 	if $$Debug >= 3 {
 		__yyfmt__.Printf("lex %s(%d)\n", $$Tokname(token), uint(char))
@@ -3459,7 +3497,7 @@
 	$$S[$$p].yys = $$state
 
 $$newstate:
-	$$n = $$Pact[$$state]
+	$$n = int($$Pact[$$state])
 	if $$n <= $$Flag {
 		goto $$default /* simple state */
 	}
@@ -3470,8 +3508,8 @@
 	if $$n < 0 || $$n >= $$Last {
 		goto $$default
 	}
-	$$n = $$Act[$$n]
-	if $$Chk[$$n] == $$token { /* valid shift */
+	$$n = int($$Act[$$n])
+	if int($$Chk[$$n]) == $$token { /* valid shift */
 		$$rcvr.char = -1
 		$$token = -1
 		$$VAL = $$rcvr.lval
@@ -3484,7 +3522,7 @@
 
 $$default:
 	/* default state action */
-	$$n = $$Def[$$state]
+	$$n = int($$Def[$$state])
 	if $$n == -2 {
 		if $$rcvr.char < 0 {
 			$$rcvr.char, $$token = $$lex1($$lex, &$$rcvr.lval)
@@ -3493,18 +3531,18 @@
 		/* look through exception table */
 		xi := 0
 		for {
-			if $$Exca[xi+0] == -1 && $$Exca[xi+1] == $$state {
+			if $$Exca[xi+0] == -1 && int($$Exca[xi+1]) == $$state {
 				break
 			}
 			xi += 2
 		}
 		for xi += 2; ; xi += 2 {
-			$$n = $$Exca[xi+0]
+			$$n = int($$Exca[xi+0])
 			if $$n < 0 || $$n == $$token {
 				break
 			}
 		}
-		$$n = $$Exca[xi+1]
+		$$n = int($$Exca[xi+1])
 		if $$n < 0 {
 			goto ret0
 		}
@@ -3526,10 +3564,10 @@
 
 			/* find a state where "error" is a legal shift action */
 			for $$p >= 0 {
-				$$n = $$Pact[$$S[$$p].yys] + $$ErrCode
+				$$n = int($$Pact[$$S[$$p].yys]) + $$ErrCode
 				if $$n >= 0 && $$n < $$Last {
-					$$state = $$Act[$$n] /* simulate a shift of "error" */
-					if $$Chk[$$state] == $$ErrCode {
+					$$state = int($$Act[$$n]) /* simulate a shift of "error" */
+					if int($$Chk[$$state]) == $$ErrCode {
 						goto $$stack
 					}
 				}
@@ -3565,7 +3603,7 @@
 	$$pt := $$p
 	_ = $$pt // guard against "declared and not used"
 
-	$$p -= $$R2[$$n]
+	$$p -= int($$R2[$$n])
 	// $$p is now the index of $0. Perform the default action. Iff the
 	// reduced production is ε, $1 is possibly out of range.
 	if $$p+1 >= len($$S) {
@@ -3576,16 +3614,16 @@
 	$$VAL = $$S[$$p+1]
 
 	/* consult goto table to find next state */
-	$$n = $$R1[$$n]
-	$$g := $$Pgo[$$n]
+	$$n = int($$R1[$$n])
+	$$g := int($$Pgo[$$n])
 	$$j := $$g + $$S[$$p].yys + 1
 
 	if $$j >= $$Last {
-		$$state = $$Act[$$g]
+		$$state = int($$Act[$$g])
 	} else {
-		$$state = $$Act[$$j]
-		if $$Chk[$$state] != -$$n {
-			$$state = $$Act[$$g]
+		$$state = int($$Act[$$j])
+		if int($$Chk[$$state]) != -$$n {
+			$$state = int($$Act[$$g])
 		}
 	}
 	// dummy call; replaced with literal code
diff --git a/cmd/guru/guru.go b/cmd/guru/guru.go
index 8dea3b5..2eafca6 100644
--- a/cmd/guru/guru.go
+++ b/cmd/guru/guru.go
@@ -126,8 +126,7 @@
 // and their dependencies.
 func setupPTA(prog *ssa.Program, lprog *loader.Program, ptaLog io.Writer, reflection bool) (*pointer.Config, error) {
 	// For each initial package (specified on the command line),
-	// if it has a main function, analyze that,
-	// otherwise analyze its tests, if any.
+	// analyze the package if it has a main function.
 	var mains []*ssa.Package
 	for _, info := range lprog.InitialPackages() {
 		p := prog.Package(info.Pkg)
@@ -135,8 +134,6 @@
 		// Add package to the pointer analysis scope.
 		if p.Pkg.Name() == "main" && p.Func("main") != nil {
 			mains = append(mains, p)
-		} else if main := prog.CreateTestMainPackage(p); main != nil {
-			mains = append(mains, main)
 		}
 	}
 	if mains == nil {
diff --git a/cmd/guru/guru_test.go b/cmd/guru/guru_test.go
index 0699db9..d446f01 100644
--- a/cmd/guru/guru_test.go
+++ b/cmd/guru/guru_test.go
@@ -215,6 +215,9 @@
 	}
 
 	for _, output := range outputs {
+		// Replace occurrences of interface{} with any, for consistent output
+		// across go 1.18 and earlier.
+		output = strings.ReplaceAll(output, "interface{}", "any")
 		fmt.Fprintf(out, "%s\n", output)
 	}
 
diff --git a/cmd/guru/main.go b/cmd/guru/main.go
index 8e4af00..4fde4d2 100644
--- a/cmd/guru/main.go
+++ b/cmd/guru/main.go
@@ -105,8 +105,8 @@
 `
 
 func printHelp() {
-	fmt.Fprintln(os.Stderr, helpMessage)
-	fmt.Fprintln(os.Stderr, "Flags:")
+	fmt.Fprint(os.Stderr, helpMessage)
+	fmt.Fprintln(os.Stderr, "\nFlags:")
 	flag.PrintDefaults()
 }
 
diff --git a/cmd/guru/testdata/src/implements/main.golden b/cmd/guru/testdata/src/implements/main.golden
index 1077c98..71d00ce 100644
--- a/cmd/guru/testdata/src/implements/main.golden
+++ b/cmd/guru/testdata/src/implements/main.golden
@@ -13,7 +13,7 @@
 	implements F
 
 -------- @implements slice --------
-slice type []int implements only interface{}
+slice type []int implements only any
 
 -------- @implements C --------
 pointer type *C
diff --git a/cmd/guru/testdata/src/pointsto/main.golden b/cmd/guru/testdata/src/pointsto/main.golden
index 7b12b2a..40a830f 100644
--- a/cmd/guru/testdata/src/pointsto/main.golden
+++ b/cmd/guru/testdata/src/pointsto/main.golden
@@ -81,7 +81,7 @@
 
 Error: pointer analysis wants an expression of reference type; got ()
 -------- @pointsto var-ref-s-f --------
-this interface{} may contain these dynamic types:
+this any may contain these dynamic types:
 	chan bool, may point to:
 		makechan
 
diff --git a/cmd/guru/testdata/src/reflection/main.golden b/cmd/guru/testdata/src/reflection/main.golden
index 4782132..2a84071 100644
--- a/cmd/guru/testdata/src/reflection/main.golden
+++ b/cmd/guru/testdata/src/reflection/main.golden
@@ -8,7 +8,7 @@
 		makemap
 
 -------- @pointsto p1 --------
-this interface{} may contain these dynamic types:
+this any may contain these dynamic types:
 	*bool, may point to:
 		reflection.b
 	*int, may point to:
diff --git a/cmd/signature-fuzzer/README.md b/cmd/signature-fuzzer/README.md
new file mode 100644
index 0000000..a7de540
--- /dev/null
+++ b/cmd/signature-fuzzer/README.md
@@ -0,0 +1,159 @@
+# signature-fuzzer
+
+This directory contains utilities for fuzz testing of Go function signatures, for use in developing/testing a Go compiler.
+
+The basic idea of the fuzzer is that it emits source code for a stand-alone Go program; this generated program is a series of pairs of functions, a "Caller" function and a "Checker" function. The signature of the Checker function is generated randomly (random number of parameters and returns, each with randomly chosen types). The "Caller" func contains invocations of the "Checker" function, each passing randomly chosen values to the params of the "Checker", then the caller verifies that expected values are returned correctly. The "Checker" function in turn has code to verify that the expected values (more details below).
+
+There are three main parts to the fuzzer: a generator package, a driver package, and a runner package.
+
+The "generator" contains the guts of the fuzzer, the bits that actually emit the random code.
+
+The "driver" is a stand-alone program that invokes the generator to create a single test program. It is not terribly useful on its own (since it doesn't actually build or run the generated program), but it is handy for debugging the generator or looking at examples of the emitted code.
+
+The "runner" is a more complete test harness; it repeatedly runs the generator to create a new test program, builds the test program, then runs it (checking for errors along the way). If at any point a build or test fails, the "runner" harness attempts a minimization process to try to narrow down the failure to a single package and/or function.
+
+## What the generated code looks like
+
+Generated Go functions will have an "interesting" set of signatures (mix of
+arrays, scalars, structs), intended to pick out corner cases and odd bits in the
+Go compiler's code that handles function calls and returns.
+
+The first generated file is genChecker.go, which contains function that look something
+like this (simplified):
+
+```
+type StructF4S0 struct {
+F0 float64
+F1 int16
+F2 uint16
+}
+
+// 0 returns 2 params
+func Test4(p0 int8, p1 StructF4S0)  {
+  c0 := int8(-1)
+  if p0 != c0 {
+    NoteFailure(4, "parm", 0)
+  }
+  c1 := StructF4S0{float64(2), int16(-3), uint16(4)}
+  if p1 != c1 {
+    NoteFailure(4, "parm", 1)
+  }
+  return
+}
+```
+
+Here the test generator has randomly selected 0 return values and 2 params, then randomly generated types for the params.
+
+The generator then emits code on the calling side into the file "genCaller.go", which might look like:
+
+```
+func Caller4() {
+var p0 int8
+p0 = int8(-1)
+var p1 StructF4S0
+p1 = StructF4S0{float64(2), int16(-3), uint16(4)}
+// 0 returns 2 params
+Test4(p0, p1)
+}
+```
+
+The generator then emits some utility functions (ex: NoteFailure) and a main routine that cycles through all of the tests.
+
+## Trying a single run of the generator
+
+To generate a set of source files just to see what they look like, you can build and run the test generator as follows. This creates a new directory "cabiTest" containing generated test files:
+
+```
+$ git clone https://golang.org/x/tools
+$ cd tools/cmd/signature-fuzzer/fuzz-driver
+$ go build .
+$ ./fuzz-driver -numpkgs 3 -numfcns 5 -seed 12345 -outdir /tmp/sigfuzzTest -pkgpath foobar
+$ cd /tmp/sigfuzzTest
+$ find . -type f -print
+./genCaller1/genCaller1.go
+./genUtils/genUtils.go
+./genChecker1/genChecker1.go
+./genChecker0/genChecker0.go
+./genCaller2/genCaller2.go
+./genCaller0/genCaller0.go
+./genMain.go
+./go.mod
+./genChecker2/genChecker2.go
+$
+```
+
+You can build and run the generated files in the usual way:
+
+```
+$ cd /tmp/sigfuzzTest
+$ go build .
+$ ./foobar
+starting main
+finished 15 tests
+$
+
+```
+
+## Example usage for the test runner
+
+The test runner orchestrates multiple runs of the fuzzer, iteratively emitting code, building it, and testing the resulting binary. To use the runner, build and invoke it with a specific number of iterations; it will select a new random seed on each invocation. The runner will terminate as soon as it finds a failure. Example:
+
+```
+$ git clone https://golang.org/x/tools
+$ cd tools/cmd/signature-fuzzer/fuzz-runner
+$ go build .
+$ ./fuzz-runner -numit=3
+... begin iteration 0 with current seed 67104558
+starting main
+finished 1000 tests
+... begin iteration 1 with current seed 67104659
+starting main
+finished 1000 tests
+... begin iteration 2 with current seed 67104760
+starting main
+finished 1000 tests
+$
+```
+
+If the runner encounters a failure, it will try to perform test-case "minimization", e.g. attempt to isolate the failure
+
+```
+$ cd tools/cmd/signature-fuzzer/fuzz-runner
+$ go build .
+$ ./fuzz-runner -n=10
+./fuzz-runner -n=10
+... begin iteration 0 with current seed 40661762
+Error: fail [reflect] |20|3|1| =Checker3.Test1= return 1
+error executing cmd ./fzTest: exit status 1
+... starting minimization for failed directory /tmp/fuzzrun1005327337/fuzzTest
+package minimization succeeded: found bad pkg 3
+function minimization succeeded: found bad fcn 1
+$
+```
+
+Here the runner has generates a failure, minimized it down to a single function and package, and left the resulting program in the output directory /tmp/fuzzrun1005327337/fuzzTest.
+
+## Limitations, future work
+
+No support yet for variadic functions.
+
+The set of generated types is still a bit thin; it has fairly limited support for interface values, and doesn't include channels.
+
+Todos:
+
+- better interface value coverage
+
+- implement testing of reflect.MakeFunc
+
+- extend to work with generic code of various types
+
+- extend to work in a debugging scenario (e.g. instead of just emitting code,
+  emit a script of debugger commands to run the program with expected
+  responses from the debugger)
+
+- rework things so that instead of always checking all of a given parameter
+  value, we sometimes skip over elements (or just check the length of a slice
+  or string as opposed to looking at its value)
+
+- consider adding runtime.GC() calls at some points in the generated code
+
diff --git a/cmd/signature-fuzzer/fuzz-driver/driver.go b/cmd/signature-fuzzer/fuzz-driver/driver.go
new file mode 100644
index 0000000..f61ca4b
--- /dev/null
+++ b/cmd/signature-fuzzer/fuzz-driver/driver.go
@@ -0,0 +1,168 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Stand-alone driver for emitting function-signature test code.  This
+// program is mainly just a wrapper around the code that lives in the
+// fuzz-generator package; it is useful for generating a specific bad
+// code scenario for a given seed, or for doing development on the
+// fuzzer, but for doing actual fuzz testing, better to use
+// fuzz-runner.
+
+package main
+
+import (
+	"flag"
+	"fmt"
+	"log"
+	"math/rand"
+	"os"
+	"time"
+
+	generator "golang.org/x/tools/cmd/signature-fuzzer/internal/fuzz-generator"
+)
+
+// Basic options
+var numfcnflag = flag.Int("numfcns", 10, "Number of test func pairs to emit in each package")
+var numpkgflag = flag.Int("numpkgs", 1, "Number of test packages to emit")
+var seedflag = flag.Int64("seed", -1, "Random seed")
+var tagflag = flag.String("tag", "gen", "Prefix name of go files/pkgs to generate")
+var outdirflag = flag.String("outdir", "", "Output directory for generated files")
+var pkgpathflag = flag.String("pkgpath", "gen", "Base package path for generated files")
+
+// Options used for test case minimization.
+var fcnmaskflag = flag.String("fcnmask", "", "Mask containing list of fcn numbers to emit")
+var pkmaskflag = flag.String("pkgmask", "", "Mask containing list of pkg numbers to emit")
+
+// Options used to control which features are used in the generated code.
+var reflectflag = flag.Bool("reflect", true, "Include testing of reflect.Call.")
+var deferflag = flag.Bool("defer", true, "Include testing of defer stmts.")
+var recurflag = flag.Bool("recur", true, "Include testing of recursive calls.")
+var takeaddrflag = flag.Bool("takeaddr", true, "Include functions that take the address of their parameters and results.")
+var methodflag = flag.Bool("method", true, "Include testing of method calls.")
+var inlimitflag = flag.Int("inmax", -1, "Max number of input params.")
+var outlimitflag = flag.Int("outmax", -1, "Max number of input params.")
+var pragmaflag = flag.String("pragma", "", "Tag generated test routines with pragma //go:<value>.")
+var maxfailflag = flag.Int("maxfail", 10, "Maximum runtime failures before test self-terminates")
+var stackforceflag = flag.Bool("forcestackgrowth", true, "Use hooks to force stack growth.")
+
+// Debugging options
+var verbflag = flag.Int("v", 0, "Verbose trace output level")
+
+// Debugging/testing options. These tell the generator to emit "bad" code so as to
+// test the logic for detecting errors and/or minimization (in the fuzz runner).
+var emitbadflag = flag.Int("emitbad", 0, "[Testing only] force generator to emit 'bad' code.")
+var selbadpkgflag = flag.Int("badpkgidx", 0, "[Testing only] select index of bad package (used with -emitbad)")
+var selbadfcnflag = flag.Int("badfcnidx", 0, "[Testing only] select index of bad function (used with -emitbad)")
+
+// Misc options
+var goimpflag = flag.Bool("goimports", false, "Run 'goimports' on generated code.")
+var randctlflag = flag.Int("randctl", generator.RandCtlChecks|generator.RandCtlPanic, "Wraprand control flag")
+
+func verb(vlevel int, s string, a ...interface{}) {
+	if *verbflag >= vlevel {
+		fmt.Printf(s, a...)
+		fmt.Printf("\n")
+	}
+}
+
+func usage(msg string) {
+	if len(msg) > 0 {
+		fmt.Fprintf(os.Stderr, "error: %s\n", msg)
+	}
+	fmt.Fprintf(os.Stderr, "usage: fuzz-driver [flags]\n\n")
+	flag.PrintDefaults()
+	fmt.Fprintf(os.Stderr, "Example:\n\n")
+	fmt.Fprintf(os.Stderr, "  fuzz-driver -numpkgs=23 -numfcns=19 -seed 10101 -outdir gendir\n\n")
+	fmt.Fprintf(os.Stderr, "  \tgenerates a Go program with 437 test cases (23 packages, each \n")
+	fmt.Fprintf(os.Stderr, "  \twith 19 functions, for a total of 437 funcs total) into a set of\n")
+	fmt.Fprintf(os.Stderr, "  \tsub-directories in 'gendir', using random see 10101\n")
+
+	os.Exit(2)
+}
+
+func setupTunables() {
+	tunables := generator.DefaultTunables()
+	if !*reflectflag {
+		tunables.DisableReflectionCalls()
+	}
+	if !*deferflag {
+		tunables.DisableDefer()
+	}
+	if !*recurflag {
+		tunables.DisableRecursiveCalls()
+	}
+	if !*takeaddrflag {
+		tunables.DisableTakeAddr()
+	}
+	if !*methodflag {
+		tunables.DisableMethodCalls()
+	}
+	if *inlimitflag != -1 {
+		tunables.LimitInputs(*inlimitflag)
+	}
+	if *outlimitflag != -1 {
+		tunables.LimitOutputs(*outlimitflag)
+	}
+	generator.SetTunables(tunables)
+}
+
+func main() {
+	log.SetFlags(0)
+	log.SetPrefix("fuzz-driver: ")
+	flag.Parse()
+	generator.Verbctl = *verbflag
+	if *outdirflag == "" {
+		usage("select an output directory with -o flag")
+	}
+	verb(1, "in main verblevel=%d", *verbflag)
+	if *seedflag == -1 {
+		// user has not selected a specific seed -- pick one.
+		now := time.Now()
+		*seedflag = now.UnixNano() % 123456789
+		verb(0, "selected seed: %d", *seedflag)
+	}
+	rand.Seed(*seedflag)
+	if flag.NArg() != 0 {
+		usage("unknown extra arguments")
+	}
+	verb(1, "tag is %s", *tagflag)
+
+	fcnmask, err := generator.ParseMaskString(*fcnmaskflag, "fcn")
+	if err != nil {
+		usage(fmt.Sprintf("mangled fcn mask arg: %v", err))
+	}
+	pkmask, err := generator.ParseMaskString(*pkmaskflag, "pkg")
+	if err != nil {
+		usage(fmt.Sprintf("mangled pkg mask arg: %v", err))
+	}
+	verb(2, "pkg mask is %v", pkmask)
+	verb(2, "fn mask is %v", fcnmask)
+
+	verb(1, "starting generation")
+	setupTunables()
+	config := generator.GenConfig{
+		PkgPath:          *pkgpathflag,
+		Tag:              *tagflag,
+		OutDir:           *outdirflag,
+		NumTestPackages:  *numpkgflag,
+		NumTestFunctions: *numfcnflag,
+		Seed:             *seedflag,
+		Pragma:           *pragmaflag,
+		FcnMask:          fcnmask,
+		PkgMask:          pkmask,
+		MaxFail:          *maxfailflag,
+		ForceStackGrowth: *stackforceflag,
+		RandCtl:          *randctlflag,
+		RunGoImports:     *goimpflag,
+		EmitBad:          *emitbadflag,
+		BadPackageIdx:    *selbadpkgflag,
+		BadFuncIdx:       *selbadfcnflag,
+	}
+	errs := generator.Generate(config)
+	if errs != 0 {
+		log.Fatal("errors during generation")
+	}
+	verb(1, "... files written to directory %s", *outdirflag)
+	verb(1, "leaving main")
+}
diff --git a/cmd/signature-fuzzer/fuzz-driver/drv_test.go b/cmd/signature-fuzzer/fuzz-driver/drv_test.go
new file mode 100644
index 0000000..7de74c6
--- /dev/null
+++ b/cmd/signature-fuzzer/fuzz-driver/drv_test.go
@@ -0,0 +1,73 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"os"
+	"os/exec"
+	"path/filepath"
+	"runtime"
+	"testing"
+
+	"golang.org/x/tools/internal/testenv"
+)
+
+// buildDriver builds the fuzz-driver executable, returning its path.
+func buildDriver(t *testing.T) string {
+	t.Helper()
+	if runtime.GOOS == "android" {
+		t.Skipf("the dependencies are not available on android")
+		return ""
+	}
+	bindir := filepath.Join(t.TempDir(), "bin")
+	err := os.Mkdir(bindir, os.ModePerm)
+	if err != nil {
+		t.Fatal(err)
+	}
+	binary := filepath.Join(bindir, "driver")
+	if runtime.GOOS == "windows" {
+		binary += ".exe"
+	}
+	cmd := exec.Command("go", "build", "-o", binary)
+	if err := cmd.Run(); err != nil {
+		t.Fatalf("Building fuzz-driver: %v", err)
+	}
+	return binary
+}
+
+func TestEndToEndIntegration(t *testing.T) {
+	testenv.NeedsTool(t, "go")
+	td := t.TempDir()
+
+	// Build the fuzz-driver binary.
+	// Note: if more tests are added to this package, move this to single setup fcn, so
+	// that we don't have to redo the build each time.
+	binary := buildDriver(t)
+
+	// Kick off a run.
+	gendir := filepath.Join(td, "gen")
+	args := []string{"-numfcns", "3", "-numpkgs", "1", "-seed", "101", "-outdir", gendir}
+	c := exec.Command(binary, args...)
+	b, err := c.CombinedOutput()
+	if err != nil {
+		t.Fatalf("error invoking fuzz-driver: %v\n%s", err, b)
+	}
+
+	found := ""
+	walker := func(path string, info os.FileInfo, err error) error {
+		found = found + ":" + info.Name()
+		return nil
+	}
+
+	// Make sure it emits something.
+	err2 := filepath.Walk(gendir, walker)
+	if err2 != nil {
+		t.Fatalf("error from filepath.Walk: %v", err2)
+	}
+	const expected = ":gen:genCaller0:genCaller0.go:genChecker0:genChecker0.go:genMain.go:genUtils:genUtils.go:go.mod"
+	if found != expected {
+		t.Errorf("walk of generated code: got %s want %s", found, expected)
+	}
+}
diff --git a/cmd/signature-fuzzer/fuzz-runner/rnr_test.go b/cmd/signature-fuzzer/fuzz-runner/rnr_test.go
new file mode 100644
index 0000000..2bab5b4
--- /dev/null
+++ b/cmd/signature-fuzzer/fuzz-runner/rnr_test.go
@@ -0,0 +1,145 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"fmt"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"runtime"
+	"strings"
+	"testing"
+
+	"golang.org/x/tools/internal/testenv"
+)
+
+func canRace(t *testing.T) bool {
+	_, err := exec.Command("go", "run", "-race", "./testdata/himom.go").CombinedOutput()
+	return err == nil
+}
+
+// buildRunner builds the fuzz-runner executable, returning its path.
+func buildRunner(t *testing.T) string {
+	bindir := filepath.Join(t.TempDir(), "bin")
+	err := os.Mkdir(bindir, os.ModePerm)
+	if err != nil {
+		t.Fatal(err)
+	}
+	binary := filepath.Join(bindir, "runner")
+	if runtime.GOOS == "windows" {
+		binary += ".exe"
+	}
+	cmd := exec.Command("go", "build", "-o", binary)
+	if err := cmd.Run(); err != nil {
+		t.Fatalf("Building fuzz-runner: %v", err)
+	}
+	return binary
+}
+
+// TestRunner builds the binary, then kicks off a collection of sub-tests that invoke it.
+func TestRunner(t *testing.T) {
+	testenv.NeedsTool(t, "go")
+	if runtime.GOOS == "android" {
+		t.Skipf("the dependencies are not available on android")
+	}
+	binaryPath := buildRunner(t)
+
+	// Sub-tests using the binary built above.
+	t.Run("Basic", func(t *testing.T) { testBasic(t, binaryPath) })
+	t.Run("Race", func(t *testing.T) { testRace(t, binaryPath) })
+	t.Run("Minimization1", func(t *testing.T) { testMinimization1(t, binaryPath) })
+	t.Run("Minimization2", func(t *testing.T) { testMinimization2(t, binaryPath) })
+}
+
+func testBasic(t *testing.T, binaryPath string) {
+	t.Parallel()
+	args := []string{"-numit=1", "-numfcns=1", "-numpkgs=1", "-seed=103", "-cleancache=0"}
+	c := exec.Command(binaryPath, args...)
+	b, err := c.CombinedOutput()
+	t.Logf("%s\n", b)
+	if err != nil {
+		t.Fatalf("error invoking fuzz-runner: %v", err)
+	}
+}
+
+func testRace(t *testing.T, binaryPath string) {
+	t.Parallel()
+	// For this test to work, the current test platform has to support the
+	// race detector. Check to see if that is the case by running a very
+	// simple Go program through it.
+	if !canRace(t) {
+		t.Skip("current platform does not appear to support the race detector")
+	}
+
+	args := []string{"-v=1", "-numit=1", "-race", "-numfcns=3", "-numpkgs=3", "-seed=987", "-cleancache=0"}
+	c := exec.Command(binaryPath, args...)
+	b, err := c.CombinedOutput()
+	t.Logf("%s\n", b)
+	if err != nil {
+		t.Fatalf("error invoking fuzz-runner: %v", err)
+	}
+}
+
+func testMinimization1(t *testing.T, binaryPath string) {
+	if binaryPath == "" {
+		t.Skipf("No runner binary")
+	}
+	t.Parallel()
+	// Fire off the runner passing it -emitbad=1, so that the generated code
+	// contains illegal Go code (which will force the build to fail). Verify that
+	// it does fail, that the error reflects the nature of the failure, and that
+	// we can minimize the error down to a single package.
+	args := []string{"-emitbad=1", "-badfcnidx=2", "-badpkgidx=2",
+		"-forcetmpclean", "-cleancache=0",
+		"-numit=1", "-numfcns=3", "-numpkgs=3", "-seed=909"}
+	invocation := fmt.Sprintf("%s %v", binaryPath, args)
+	c := exec.Command(binaryPath, args...)
+	b, err := c.CombinedOutput()
+	t.Logf("%s\n", b)
+	if err == nil {
+		t.Fatalf("unexpected pass of fuzz-runner (invocation %q): %v", invocation, err)
+	}
+	result := string(b)
+	if !strings.Contains(result, "syntax error") {
+		t.Fatalf("-emitbad=1 did not trigger syntax error (invocation %q): output: %s", invocation, result)
+	}
+	if !strings.Contains(result, "package minimization succeeded: found bad pkg 2") {
+		t.Fatalf("failed to minimize package (invocation %q): output: %s", invocation, result)
+	}
+	if !strings.Contains(result, "function minimization succeeded: found bad fcn 2") {
+		t.Fatalf("failed to minimize package (invocation %q): output: %s", invocation, result)
+	}
+}
+
+func testMinimization2(t *testing.T, binaryPath string) {
+	if binaryPath == "" {
+		t.Skipf("No runner binary")
+	}
+	t.Parallel()
+	// Fire off the runner passing it -emitbad=2, so that the
+	// generated code forces a runtime error. Verify that it does
+	// fail, and that the error is reflective.
+	args := []string{"-emitbad=2", "-badfcnidx=1", "-badpkgidx=1",
+		"-forcetmpclean", "-cleancache=0",
+		"-numit=1", "-numfcns=3", "-numpkgs=3", "-seed=55909"}
+	invocation := fmt.Sprintf("%s %v", binaryPath, args)
+	c := exec.Command(binaryPath, args...)
+	b, err := c.CombinedOutput()
+	t.Logf("%s\n", b)
+	if err == nil {
+		t.Fatalf("unexpected pass of fuzz-runner (invocation %q): %v", invocation, err)
+	}
+	result := string(b)
+	if !strings.Contains(result, "Error: fail") || !strings.Contains(result, "Checker1.Test1") {
+		t.Fatalf("-emitbad=2 did not trigger runtime error (invocation %q): output: %s", invocation, result)
+	}
+	if !strings.Contains(result, "package minimization succeeded: found bad pkg 1") {
+		t.Fatalf("failed to minimize package (invocation %q): output: %s", invocation, result)
+	}
+	if !strings.Contains(result, "function minimization succeeded: found bad fcn 1") {
+		t.Fatalf("failed to minimize package (invocation %q): output: %s", invocation, result)
+	}
+}
diff --git a/cmd/signature-fuzzer/fuzz-runner/runner.go b/cmd/signature-fuzzer/fuzz-runner/runner.go
new file mode 100644
index 0000000..4e5b413
--- /dev/null
+++ b/cmd/signature-fuzzer/fuzz-runner/runner.go
@@ -0,0 +1,443 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Program for performing test runs using "fuzz-driver".
+// Main loop iteratively runs "fuzz-driver" to create a corpus,
+// then builds and runs the code. If a failure in the run is
+// detected, then a testcase minimization phase kicks in.
+
+package main
+
+import (
+	"flag"
+	"fmt"
+	"io/ioutil"
+	"log"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"runtime"
+	"strconv"
+	"strings"
+	"time"
+
+	generator "golang.org/x/tools/cmd/signature-fuzzer/internal/fuzz-generator"
+)
+
+const pkName = "fzTest"
+
+// Basic options
+var verbflag = flag.Int("v", 0, "Verbose trace output level")
+var loopitflag = flag.Int("numit", 10, "Number of main loop iterations to run")
+var seedflag = flag.Int64("seed", -1, "Random seed")
+var execflag = flag.Bool("execdriver", false, "Exec fuzz-driver binary instead of invoking generator directly")
+var numpkgsflag = flag.Int("numpkgs", 50, "Number of test packages")
+var numfcnsflag = flag.Int("numfcns", 20, "Number of test functions per package.")
+
+// Debugging/testing options. These tell the generator to emit "bad" code so as to
+// test the logic for detecting errors and/or minimization.
+var emitbadflag = flag.Int("emitbad", -1, "[Testing only] force generator to emit 'bad' code.")
+var selbadpkgflag = flag.Int("badpkgidx", 0, "[Testing only] select index of bad package (used with -emitbad)")
+var selbadfcnflag = flag.Int("badfcnidx", 0, "[Testing only] select index of bad function (used with -emitbad)")
+var forcetmpcleanflag = flag.Bool("forcetmpclean", false, "[Testing only] force cleanup of temp dir")
+var cleancacheflag = flag.Bool("cleancache", true, "[Testing only] don't clean the go cache")
+var raceflag = flag.Bool("race", false, "[Testing only] build generated code with -race")
+
+func verb(vlevel int, s string, a ...interface{}) {
+	if *verbflag >= vlevel {
+		fmt.Printf(s, a...)
+		fmt.Printf("\n")
+	}
+}
+
+func warn(s string, a ...interface{}) {
+	fmt.Fprintf(os.Stderr, s, a...)
+	fmt.Fprintf(os.Stderr, "\n")
+}
+
+func fatal(s string, a ...interface{}) {
+	fmt.Fprintf(os.Stderr, s, a...)
+	fmt.Fprintf(os.Stderr, "\n")
+	os.Exit(1)
+}
+
+type config struct {
+	generator.GenConfig
+	tmpdir       string
+	gendir       string
+	buildOutFile string
+	runOutFile   string
+	gcflags      string
+	nerrors      int
+}
+
+func usage(msg string) {
+	if len(msg) > 0 {
+		fmt.Fprintf(os.Stderr, "error: %s\n", msg)
+	}
+	fmt.Fprintf(os.Stderr, "usage: fuzz-runner [flags]\n\n")
+	flag.PrintDefaults()
+	fmt.Fprintf(os.Stderr, "Example:\n\n")
+	fmt.Fprintf(os.Stderr, "  fuzz-runner -numit=500 -numpkgs=11 -numfcns=13 -seed=10101\n\n")
+	fmt.Fprintf(os.Stderr, "  \tRuns 500 rounds of test case generation\n")
+	fmt.Fprintf(os.Stderr, "  \tusing random see 10101, in each round emitting\n")
+	fmt.Fprintf(os.Stderr, "  \t11 packages each with 13 function pairs.\n")
+
+	os.Exit(2)
+}
+
+// docmd executes the specified command in the dir given and pipes the
+// output to stderr. return status is 0 if command passed, 1
+// otherwise.
+func docmd(cmd []string, dir string) int {
+	verb(2, "docmd: %s", strings.Join(cmd, " "))
+	c := exec.Command(cmd[0], cmd[1:]...)
+	if dir != "" {
+		c.Dir = dir
+	}
+	b, err := c.CombinedOutput()
+	st := 0
+	if err != nil {
+		warn("error executing cmd %s: %v",
+			strings.Join(cmd, " "), err)
+		st = 1
+	}
+	os.Stderr.Write(b)
+	return st
+}
+
+// docodmout forks and execs command 'cmd' in dir 'dir', redirecting
+// stderr and stdout from the execution to file 'outfile'.
+func docmdout(cmd []string, dir string, outfile string) int {
+	of, err := os.OpenFile(outfile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644)
+	if err != nil {
+		fatal("opening outputfile %s: %v", outfile, err)
+	}
+	c := exec.Command(cmd[0], cmd[1:]...)
+	defer of.Close()
+	if dir != "" {
+		verb(2, "setting cmd.Dir to %s", dir)
+		c.Dir = dir
+	}
+	verb(2, "docmdout: %s > %s", strings.Join(cmd, " "), outfile)
+	c.Stdout = of
+	c.Stderr = of
+	err = c.Run()
+	st := 0
+	if err != nil {
+		warn("error executing cmd %s: %v",
+			strings.Join(cmd, " "), err)
+		st = 1
+	}
+	return st
+}
+
+// gen is the main hook for kicking off code generation. For
+// non-minimization runs, 'singlepk' and 'singlefn' will both be -1
+// (indicating that we want all functions and packages to be
+// generated).  If 'singlepk' is set to a non-negative value, then
+// code generation will be restricted to the single package with that
+// index (as a try at minimization), similarly with 'singlefn'
+// restricting the codegen to a single specified function.
+func (c *config) gen(singlepk int, singlefn int) {
+
+	// clean the output dir
+	verb(2, "cleaning outdir %s", c.gendir)
+	if err := os.RemoveAll(c.gendir); err != nil {
+		fatal("error cleaning gen dir %s: %v", c.gendir, err)
+	}
+
+	// emit code into the output dir. Here we either invoke the
+	// generator directly, or invoke fuzz-driver if -execflag is
+	// set.  If the code generation process itself fails, this is
+	// typically a bug in the fuzzer itself, so it gets reported
+	// as a fatal error.
+	if *execflag {
+		args := []string{"fuzz-driver",
+			"-numpkgs", strconv.Itoa(c.NumTestPackages),
+			"-numfcns", strconv.Itoa(c.NumTestFunctions),
+			"-seed", strconv.Itoa(int(c.Seed)),
+			"-outdir", c.OutDir,
+			"-pkgpath", pkName,
+			"-maxfail", strconv.Itoa(c.MaxFail)}
+		if singlepk != -1 {
+			args = append(args, "-pkgmask", strconv.Itoa(singlepk))
+		}
+		if singlefn != -1 {
+			args = append(args, "-fcnmask", strconv.Itoa(singlefn))
+		}
+		if *emitbadflag != 0 {
+			args = append(args, "-emitbad", strconv.Itoa(*emitbadflag),
+				"-badpkgidx", strconv.Itoa(*selbadpkgflag),
+				"-badfcnidx", strconv.Itoa(*selbadfcnflag))
+		}
+		verb(1, "invoking fuzz-driver with args: %v", args)
+		st := docmd(args, "")
+		if st != 0 {
+			fatal("fatal error: generation failed, cmd was: %v", args)
+		}
+	} else {
+		if singlepk != -1 {
+			c.PkgMask = map[int]int{singlepk: 1}
+		}
+		if singlefn != -1 {
+			c.FcnMask = map[int]int{singlefn: 1}
+		}
+		verb(1, "invoking generator.Generate with config: %v", c.GenConfig)
+		errs := generator.Generate(c.GenConfig)
+		if errs != 0 {
+			log.Fatal("errors during generation")
+		}
+	}
+}
+
+// action performs a selected action/command in the generated code dir.
+func (c *config) action(cmd []string, outfile string, emitout bool) int {
+	st := docmdout(cmd, c.gendir, outfile)
+	if emitout {
+		content, err := ioutil.ReadFile(outfile)
+		if err != nil {
+			log.Fatal(err)
+		}
+		fmt.Fprintf(os.Stderr, "%s", content)
+	}
+	return st
+}
+
+func binaryName() string {
+	if runtime.GOOS == "windows" {
+		return pkName + ".exe"
+	} else {
+		return "./" + pkName
+	}
+}
+
+// build builds a generated corpus of Go code. If 'emitout' is set, then dump out the
+// results of the build after it completes (during minimization emitout is set to false,
+// since there is no need to see repeated errors).
+func (c *config) build(emitout bool) int {
+	// Issue a build of the generated code.
+	c.buildOutFile = filepath.Join(c.tmpdir, "build.err.txt")
+	cmd := []string{"go", "build", "-o", binaryName()}
+	if c.gcflags != "" {
+		cmd = append(cmd, "-gcflags=all="+c.gcflags)
+	}
+	if *raceflag {
+		cmd = append(cmd, "-race")
+	}
+	cmd = append(cmd, ".")
+	verb(1, "build command is: %v", cmd)
+	return c.action(cmd, c.buildOutFile, emitout)
+}
+
+// run invokes a binary built from a generated corpus of Go code. If
+// 'emitout' is set, then dump out the results of the run after it
+// completes.
+func (c *config) run(emitout bool) int {
+	// Issue a run of the generated code.
+	c.runOutFile = filepath.Join(c.tmpdir, "run.err.txt")
+	cmd := []string{filepath.Join(c.gendir, binaryName())}
+	verb(1, "run command is: %v", cmd)
+	return c.action(cmd, c.runOutFile, emitout)
+}
+
+type minimizeMode int
+
+const (
+	minimizeBuildFailure = iota
+	minimizeRuntimeFailure
+)
+
+// minimize tries to minimize a failing scenario down to a single
+// package and/or function if possible. This is done using an
+// iterative search. Here 'minimizeMode' tells us whether we're
+// looking for a compile-time error or a runtime error.
+func (c *config) minimize(mode minimizeMode) int {
+
+	verb(0, "... starting minimization for failed directory %s", c.gendir)
+
+	foundPkg := -1
+	foundFcn := -1
+
+	// Locate bad package. Uses brute-force linear search, could do better...
+	for pidx := 0; pidx < c.NumTestPackages; pidx++ {
+		verb(1, "minimization: trying package %d", pidx)
+		c.gen(pidx, -1)
+		st := c.build(false)
+		if mode == minimizeBuildFailure {
+			if st != 0 {
+				// Found.
+				foundPkg = pidx
+				c.nerrors++
+				break
+			}
+		} else {
+			if st != 0 {
+				warn("run minimization: unexpected build failed while searching for bad pkg")
+				return 1
+			}
+			st := c.run(false)
+			if st != 0 {
+				// Found.
+				c.nerrors++
+				verb(1, "run minimization found bad package: %d", pidx)
+				foundPkg = pidx
+				break
+			}
+		}
+	}
+	if foundPkg == -1 {
+		verb(0, "** minimization failed, could not locate bad package")
+		return 1
+	}
+	warn("package minimization succeeded: found bad pkg %d", foundPkg)
+
+	// clean unused packages
+	for pidx := 0; pidx < c.NumTestPackages; pidx++ {
+		if pidx != foundPkg {
+			chp := filepath.Join(c.gendir, fmt.Sprintf("%s%s%d", c.Tag, generator.CheckerName, pidx))
+			if err := os.RemoveAll(chp); err != nil {
+				fatal("failed to clean pkg subdir %s: %v", chp, err)
+			}
+			clp := filepath.Join(c.gendir, fmt.Sprintf("%s%s%d", c.Tag, generator.CallerName, pidx))
+			if err := os.RemoveAll(clp); err != nil {
+				fatal("failed to clean pkg subdir %s: %v", clp, err)
+			}
+		}
+	}
+
+	// Locate bad function. Again, brute force.
+	for fidx := 0; fidx < c.NumTestFunctions; fidx++ {
+		c.gen(foundPkg, fidx)
+		st := c.build(false)
+		if mode == minimizeBuildFailure {
+			if st != 0 {
+				// Found.
+				verb(1, "build minimization found bad function: %d", fidx)
+				foundFcn = fidx
+				break
+			}
+		} else {
+			if st != 0 {
+				warn("run minimization: unexpected build failed while searching for bad fcn")
+				return 1
+			}
+			st := c.run(false)
+			if st != 0 {
+				// Found.
+				verb(1, "run minimization found bad function: %d", fidx)
+				foundFcn = fidx
+				break
+			}
+		}
+		// not the function we want ... continue the hunt
+	}
+	if foundFcn == -1 {
+		verb(0, "** function minimization failed, could not locate bad function")
+		return 1
+	}
+	warn("function minimization succeeded: found bad fcn %d", foundFcn)
+
+	return 0
+}
+
+// cleanTemp removes the temp dir we've been working with.
+func (c *config) cleanTemp() {
+	if !*forcetmpcleanflag {
+		if c.nerrors != 0 {
+			verb(1, "preserving temp dir %s", c.tmpdir)
+			return
+		}
+	}
+	verb(1, "cleaning temp dir %s", c.tmpdir)
+	os.RemoveAll(c.tmpdir)
+}
+
+// perform is the top level driver routine for the program, containing the
+// main loop. Each iteration of the loop performs a generate/build/run
+// sequence, and then updates the seed afterwards if no failure is found.
+// If a failure is detected, we try to minimize it and then return without
+// attempting any additional tests.
+func (c *config) perform() int {
+	defer c.cleanTemp()
+
+	// Main loop
+	for iter := 0; iter < *loopitflag; iter++ {
+		if iter != 0 && iter%50 == 0 {
+			// Note: cleaning the Go cache periodically is
+			// pretty much a requirement if you want to do
+			// things like overnight runs of the fuzzer,
+			// but it is also a very unfriendly thing do
+			// to if we're executing as part of a unit
+			// test run (in which case there may be other
+			// tests running in parallel with this
+			// one). Check the "cleancache" flag before
+			// doing this.
+			if *cleancacheflag {
+				docmd([]string{"go", "clean", "-cache"}, "")
+			}
+		}
+		verb(0, "... begin iteration %d with current seed %d", iter, c.Seed)
+		c.gen(-1, -1)
+		st := c.build(true)
+		if st != 0 {
+			c.minimize(minimizeBuildFailure)
+			return 1
+		}
+		st = c.run(true)
+		if st != 0 {
+			c.minimize(minimizeRuntimeFailure)
+			return 1
+		}
+		// update seed so that we get different code on the next iter.
+		c.Seed += 101
+	}
+	return 0
+}
+
+func main() {
+	log.SetFlags(0)
+	log.SetPrefix("fuzz-runner: ")
+	flag.Parse()
+	if flag.NArg() != 0 {
+		usage("unknown extra arguments")
+	}
+	verb(1, "in main, verblevel=%d", *verbflag)
+
+	tmpdir, err := ioutil.TempDir("", "fuzzrun")
+	if err != nil {
+		fatal("creation of tempdir failed: %v", err)
+	}
+	gendir := filepath.Join(tmpdir, "fuzzTest")
+
+	// select starting seed
+	if *seedflag == -1 {
+		now := time.Now()
+		*seedflag = now.UnixNano() % 123456789
+	}
+
+	// set up params for this run
+	c := &config{
+		GenConfig: generator.GenConfig{
+			NumTestPackages:  *numpkgsflag, // 100
+			NumTestFunctions: *numfcnsflag, // 20
+			Seed:             *seedflag,
+			OutDir:           gendir,
+			Pragma:           "-maxfail=9999",
+			PkgPath:          pkName,
+			EmitBad:          *emitbadflag,
+			BadPackageIdx:    *selbadpkgflag,
+			BadFuncIdx:       *selbadfcnflag,
+		},
+		tmpdir: tmpdir,
+		gendir: gendir,
+	}
+
+	// kick off the main loop.
+	st := c.perform()
+
+	// done
+	verb(1, "leaving main, num errors=%d", c.nerrors)
+	os.Exit(st)
+}
diff --git a/cmd/signature-fuzzer/fuzz-runner/testdata/himom.go b/cmd/signature-fuzzer/fuzz-runner/testdata/himom.go
new file mode 100644
index 0000000..5ba783d
--- /dev/null
+++ b/cmd/signature-fuzzer/fuzz-runner/testdata/himom.go
@@ -0,0 +1,9 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+func main() {
+	println("hi mom!")
+}
diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/arrayparm.go b/cmd/signature-fuzzer/internal/fuzz-generator/arrayparm.go
new file mode 100644
index 0000000..32ccf7e
--- /dev/null
+++ b/cmd/signature-fuzzer/internal/fuzz-generator/arrayparm.go
@@ -0,0 +1,108 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package generator
+
+import (
+	"bytes"
+	"fmt"
+)
+
+// arrayparm describes a parameter of array type; it implements the
+// "parm" interface.
+type arrayparm struct {
+	aname     string
+	qname     string
+	nelements uint8
+	eltype    parm
+	slice     bool
+	isBlank
+	addrTakenHow
+	isGenValFunc
+	skipCompare
+}
+
+func (p arrayparm) IsControl() bool {
+	return false
+}
+
+func (p arrayparm) TypeName() string {
+	return p.aname
+}
+
+func (p arrayparm) QualName() string {
+	return p.qname
+}
+
+func (p arrayparm) Declare(b *bytes.Buffer, prefix string, suffix string, caller bool) {
+	n := p.aname
+	if caller {
+		n = p.qname
+	}
+	b.WriteString(fmt.Sprintf("%s %s%s", prefix, n, suffix))
+}
+
+func (p arrayparm) String() string {
+	return fmt.Sprintf("%s %d-element array of %s", p.aname, p.nelements, p.eltype.String())
+}
+
+func (p arrayparm) GenValue(s *genstate, f *funcdef, value int, caller bool) (string, int) {
+	var buf bytes.Buffer
+
+	verb(5, "arrayparm.GenValue(%d)", value)
+
+	n := p.aname
+	if caller {
+		n = p.qname
+	}
+	buf.WriteString(fmt.Sprintf("%s{", n))
+	for i := 0; i < int(p.nelements); i++ {
+		var valstr string
+		valstr, value = s.GenValue(f, p.eltype, value, caller)
+		writeCom(&buf, i)
+		buf.WriteString(valstr)
+	}
+	buf.WriteString("}")
+	return buf.String(), value
+}
+
+func (p arrayparm) GenElemRef(elidx int, path string) (string, parm) {
+	ene := p.eltype.NumElements()
+	verb(4, "begin GenElemRef(%d,%s) on %s ene %d", elidx, path, p.String(), ene)
+
+	// For empty arrays, convention is to return empty string
+	if ene == 0 {
+		return "", &p
+	}
+
+	// Find slot within array of element of interest
+	slot := elidx / ene
+
+	// If this is the element we're interested in, return it
+	if ene == 1 {
+		verb(4, "hit scalar element")
+		epath := fmt.Sprintf("%s[%d]", path, slot)
+		if path == "_" || p.IsBlank() {
+			epath = "_"
+		}
+		return epath, p.eltype
+	}
+
+	verb(4, "recur slot=%d GenElemRef(%d,...)", slot, elidx-(slot*ene))
+
+	// Otherwise our victim is somewhere inside the slot
+	ppath := fmt.Sprintf("%s[%d]", path, slot)
+	if p.IsBlank() {
+		ppath = "_"
+	}
+	return p.eltype.GenElemRef(elidx-(slot*ene), ppath)
+}
+
+func (p arrayparm) NumElements() int {
+	return p.eltype.NumElements() * int(p.nelements)
+}
+
+func (p arrayparm) HasPointer() bool {
+	return p.eltype.HasPointer() || p.slice
+}
diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/gen_test.go b/cmd/signature-fuzzer/internal/fuzz-generator/gen_test.go
new file mode 100644
index 0000000..4bd5bab
--- /dev/null
+++ b/cmd/signature-fuzzer/internal/fuzz-generator/gen_test.go
@@ -0,0 +1,322 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package generator
+
+import (
+	"bytes"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"runtime"
+	"testing"
+
+	"golang.org/x/tools/internal/testenv"
+)
+
+func mkGenState() *genstate {
+
+	return &genstate{
+		GenConfig: GenConfig{
+			Tag:              "gen",
+			OutDir:           "/tmp",
+			NumTestPackages:  1,
+			NumTestFunctions: 10,
+		},
+		ipref:       "foo/",
+		derefFuncs:  make(map[string]string),
+		assignFuncs: make(map[string]string),
+		allocFuncs:  make(map[string]string),
+		globVars:    make(map[string]string),
+	}
+}
+
+func TestBasic(t *testing.T) {
+	checkTunables(tunables)
+	s := mkGenState()
+	for i := 0; i < 1000; i++ {
+		s.wr = NewWrapRand(int64(i), RandCtlChecks|RandCtlPanic)
+		fp := s.GenFunc(i, i)
+		var buf bytes.Buffer
+		var b *bytes.Buffer = &buf
+		wr := NewWrapRand(int64(i), RandCtlChecks|RandCtlPanic)
+		s.wr = wr
+		s.emitCaller(fp, b, i)
+		s.wr = NewWrapRand(int64(i), RandCtlChecks|RandCtlPanic)
+		s.emitChecker(fp, b, i, true)
+		wr.Check(s.wr)
+	}
+	if s.errs != 0 {
+		t.Errorf("%d errors during Generate", s.errs)
+	}
+}
+
+func TestMoreComplicated(t *testing.T) {
+	saveit := tunables
+	defer func() { tunables = saveit }()
+
+	checkTunables(tunables)
+	s := mkGenState()
+	for i := 0; i < 10000; i++ {
+		s.wr = NewWrapRand(int64(i), RandCtlChecks|RandCtlPanic)
+		fp := s.GenFunc(i, i)
+		var buf bytes.Buffer
+		var b *bytes.Buffer = &buf
+		wr := NewWrapRand(int64(i), RandCtlChecks|RandCtlPanic)
+		s.wr = wr
+		s.emitCaller(fp, b, i)
+		verb(1, "finished iter %d caller", i)
+		s.wr = NewWrapRand(int64(i), RandCtlChecks|RandCtlPanic)
+		s.emitChecker(fp, b, i, true)
+		verb(1, "finished iter %d checker", i)
+		wr.Check(s.wr)
+		if s.errs != 0 {
+			t.Errorf("%d errors during Generate iter %d", s.errs, i)
+		}
+	}
+}
+
+func TestIsBuildable(t *testing.T) {
+	testenv.NeedsTool(t, "go")
+	if runtime.GOOS == "android" {
+		t.Skipf("the dependencies are not available on android")
+	}
+
+	td := t.TempDir()
+	verb(1, "generating into temp dir %s", td)
+	checkTunables(tunables)
+	pack := filepath.Base(td)
+	s := GenConfig{
+		Tag:              "x",
+		OutDir:           td,
+		PkgPath:          pack,
+		NumTestFunctions: 10,
+		NumTestPackages:  10,
+		MaxFail:          10,
+		RandCtl:          RandCtlChecks | RandCtlPanic,
+	}
+	errs := Generate(s)
+	if errs != 0 {
+		t.Errorf("%d errors during Generate", errs)
+	}
+
+	verb(1, "building %s\n", td)
+
+	cmd := exec.Command("go", "run", ".")
+	cmd.Dir = td
+	coutput, cerr := cmd.CombinedOutput()
+	if cerr != nil {
+		t.Errorf("go build command failed: %s\n", string(coutput))
+	}
+	verb(1, "output is: %s\n", string(coutput))
+}
+
+// TestExhaustive does a series of code genreation runs, starting with
+// (relatively) simple code and then getting progressively more
+// complex (more params, deeper structs, turning on additional
+// features such as address-taken vars and reflect testing). The
+// intent here is mainly to insure that the tester still works if you
+// turn things on and off, e.g. that each feature is separately
+// controllable and not linked to other things.
+func TestExhaustive(t *testing.T) {
+	testenv.NeedsTool(t, "go")
+	if runtime.GOOS == "android" {
+		t.Skipf("the dependencies are not available on android")
+	}
+
+	if testing.Short() {
+		t.Skip("skipping test in short mode.")
+	}
+
+	td := t.TempDir()
+	verb(1, "generating into temp dir %s", td)
+
+	scenarios := []struct {
+		name     string
+		adjuster func()
+	}{
+		{
+			"minimal",
+			func() {
+				tunables.nParmRange = 3
+				tunables.nReturnRange = 3
+				tunables.structDepth = 1
+				tunables.recurPerc = 0
+				tunables.methodPerc = 0
+				tunables.doReflectCall = false
+				tunables.doDefer = false
+				tunables.takeAddress = false
+				tunables.doFuncCallValues = false
+				tunables.doSkipCompare = false
+				checkTunables(tunables)
+			},
+		},
+		{
+			"moreparms",
+			func() {
+				tunables.nParmRange = 15
+				tunables.nReturnRange = 7
+				tunables.structDepth = 3
+				checkTunables(tunables)
+			},
+		},
+		{
+			"addrecur",
+			func() {
+				tunables.recurPerc = 20
+				checkTunables(tunables)
+			},
+		},
+		{
+			"addmethod",
+			func() {
+				tunables.methodPerc = 25
+				tunables.pointerMethodCallPerc = 30
+				checkTunables(tunables)
+			},
+		},
+		{
+			"addtakeaddr",
+			func() {
+				tunables.takeAddress = true
+				tunables.takenFraction = 20
+				checkTunables(tunables)
+			},
+		},
+		{
+			"addreflect",
+			func() {
+				tunables.doReflectCall = true
+				checkTunables(tunables)
+			},
+		},
+		{
+			"adddefer",
+			func() {
+				tunables.doDefer = true
+				checkTunables(tunables)
+			},
+		},
+		{
+			"addfuncval",
+			func() {
+				tunables.doFuncCallValues = true
+				checkTunables(tunables)
+			},
+		},
+		{
+			"addfuncval",
+			func() {
+				tunables.doSkipCompare = true
+				checkTunables(tunables)
+			},
+		},
+	}
+
+	// Loop over scenarios and make sure each one works properly.
+	for i, s := range scenarios {
+		t.Logf("running %s\n", s.name)
+		s.adjuster()
+		os.RemoveAll(td)
+		pack := filepath.Base(td)
+		c := GenConfig{
+			Tag:              "x",
+			OutDir:           td,
+			PkgPath:          pack,
+			NumTestFunctions: 10,
+			NumTestPackages:  10,
+			Seed:             int64(i + 9),
+			MaxFail:          10,
+			RandCtl:          RandCtlChecks | RandCtlPanic,
+		}
+		errs := Generate(c)
+		if errs != 0 {
+			t.Errorf("%d errors during scenarios %q Generate", errs, s.name)
+		}
+		cmd := exec.Command("go", "run", ".")
+		cmd.Dir = td
+		coutput, cerr := cmd.CombinedOutput()
+		if cerr != nil {
+			t.Fatalf("run failed for scenario %q:  %s\n", s.name, string(coutput))
+		}
+		verb(1, "output is: %s\n", string(coutput))
+	}
+}
+
+func TestEmitBadBuildFailure(t *testing.T) {
+	testenv.NeedsTool(t, "go")
+	if runtime.GOOS == "android" {
+		t.Skipf("the dependencies are not available on android")
+	}
+
+	td := t.TempDir()
+	verb(1, "generating into temp dir %s", td)
+
+	checkTunables(tunables)
+	pack := filepath.Base(td)
+	s := GenConfig{
+		Tag:              "x",
+		OutDir:           td,
+		PkgPath:          pack,
+		NumTestFunctions: 10,
+		NumTestPackages:  10,
+		MaxFail:          10,
+		RandCtl:          RandCtlChecks | RandCtlPanic,
+		EmitBad:          1,
+	}
+	errs := Generate(s)
+	if errs != 0 {
+		t.Errorf("%d errors during Generate", errs)
+	}
+
+	cmd := exec.Command("go", "build", ".")
+	cmd.Dir = td
+	coutput, cerr := cmd.CombinedOutput()
+	if cerr == nil {
+		t.Errorf("go build command passed, expected failure. output: %s\n", string(coutput))
+	}
+}
+
+func TestEmitBadRunFailure(t *testing.T) {
+	testenv.NeedsTool(t, "go")
+	if runtime.GOOS == "android" {
+		t.Skipf("the dependencies are not available on android")
+	}
+
+	td := t.TempDir()
+	verb(1, "generating into temp dir %s", td)
+
+	checkTunables(tunables)
+	pack := filepath.Base(td)
+	s := GenConfig{
+		Tag:              "x",
+		OutDir:           td,
+		PkgPath:          pack,
+		NumTestFunctions: 10,
+		NumTestPackages:  10,
+		MaxFail:          10,
+		RandCtl:          RandCtlChecks | RandCtlPanic,
+		EmitBad:          2,
+	}
+	errs := Generate(s)
+	if errs != 0 {
+		t.Errorf("%d errors during Generate", errs)
+	}
+
+	// build
+	cmd := exec.Command("go", "build", ".")
+	cmd.Dir = td
+	coutput, cerr := cmd.CombinedOutput()
+	if cerr != nil {
+		t.Fatalf("build failed: %s\n", string(coutput))
+	}
+
+	// run
+	cmd = exec.Command("./" + pack)
+	cmd.Dir = td
+	coutput, cerr = cmd.CombinedOutput()
+	if cerr == nil {
+		t.Fatalf("run passed, expected failure -- run output: %s", string(coutput))
+	}
+}
diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/generator.go b/cmd/signature-fuzzer/internal/fuzz-generator/generator.go
new file mode 100644
index 0000000..bbe53fb
--- /dev/null
+++ b/cmd/signature-fuzzer/internal/fuzz-generator/generator.go
@@ -0,0 +1,2269 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This package generates source code for a stand-alone Go program
+// useful for function signature fuzzing. The generated program is a
+// series of function pairs, a "Caller" function and a "Checker"
+// function. The signature of the Checker function is generated
+// randomly (random number of parameters and returns, each with
+// randomly chosen types). The "Caller" func contains invocations of
+// the "Checker" function, each passing randomly chosen values to the
+// params of the "Checker", then the caller verifies that expected
+// values are returned correctly.  The "Checker" function in turn has
+// code to verify that the expected values arrive correctly, and so
+// on.
+//
+// The main exported items of interest for this package are:
+//
+// - the Generate function, which takes a GenConfig object and emits
+//   code according to the config's specification
+//
+// - the GenConfig struct, which is basically a large collection of
+//   knobs/switches to control the mechanics of how/where code is
+//   generated
+//
+// - the TunableParams struct, which controls the nature of the
+//   generated code (for example, the maximum number of function
+//   parameters, etc), and the SetTunables func which tells the
+//   package what tunable parameters to use.
+
+// Notes for posterity:
+// - many parts of this package would have been better off being written
+//   using text/template instead of generating code directly; perhaps
+//   at some point it could be converted over (big job).
+// - for the various 'fractions' fields in the TunableParams struct,
+//   it would be good to have a named type of some sort, with methods
+//   for managing things like checking to make sure values sum to 100.
+
+package generator
+
+import (
+	"bytes"
+	"crypto/sha1"
+	"errors"
+	"fmt"
+	"html/template"
+	"log"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"strconv"
+	"strings"
+)
+
+// GenConfig contains configuration parameters relating to the
+// mechanics of the code generation, e.g. how many packages/functions
+// to emit, path to a directory into which we place the generated
+// code, prefixes/packagenames for the generate code, and so on.
+type GenConfig struct {
+	// Tag is a string prefix prepended to functions within
+	// the generated code.
+	Tag string
+
+	// Output directory in to which we'll emit generated code.
+	// This will be created if it does not exist.
+	OutDir string
+
+	// Packagepath prefix given to the generated code.
+	PkgPath string
+
+	// Number of test packages created within the generated corpus.
+	// Each test package is essentially an independent collection
+	// generated code; the point of having multiple packages is to
+	// be able to get faster builds (more parallelism), and to avoid
+	// the compile time issues that crop up with 'giant' packages.
+	NumTestPackages int
+
+	// Number of test function pairs within each generated test package.
+	// Each pair consists of a "caller" function and  "callee" function.
+	NumTestFunctions int
+
+	// Seed for random number generator.
+	Seed int64
+
+	// Pragma is a "// go:..." compiler directive to apply to the
+	// callee function as part of a generated function pair.
+	Pragma string
+
+	// Function and package mask used for minimization purposes.
+	// If a given mask is non-nil, then the generator will only
+	// emit code for a given func or package if its index is
+	// present in the mask map.
+	FcnMask map[int]int
+	PkgMask map[int]int
+
+	// Maximum number of failures to encounter before bailing out.
+	MaxFail int
+
+	// forcestackgrowth if set tells the generator to insert
+	// calls to runtime.gcTestMoveStackOnNextCall at various points
+	// in the generated code.
+	ForceStackGrowth bool
+
+	// Random number generator control flag (debugging)
+	RandCtl int
+
+	// Tells the generator to run "goimports" on the emitted code.
+	RunGoImports bool
+
+	// Debugging/testing hook. If set to 1, emit code that will cause the
+	// build to fail; if set to 2, emit code that will cause a test to fail.
+	EmitBad int
+
+	// If EmitBad above is set, then these can be used to select the ID of
+	// a specific bad func/package.
+	BadPackageIdx int
+	BadFuncIdx    int
+}
+
+const CallerName = "Caller"
+const CheckerName = "Checker"
+
+// TunableParams contains configuration parameters that control the
+// flavor of code generated for a given test function. This includes
+// things like the number of params/returns, the percentages of types
+// (int, struct, etc) of the params/returns, and so on.
+type TunableParams struct {
+	// between 0 and N params
+	nParmRange uint8
+
+	// between 0 and N returns
+	nReturnRange uint8
+
+	// structs have between 0 and N members
+	nStructFields uint8
+
+	// arrays/slices have between 0 and N elements
+	nArrayElements uint8
+
+	// fraction of slices vs arrays. This is a value between 0 and 100 (0 meaning
+	// no slices [only arrays] and 100 meaning all slices, no arrays).
+	sliceFraction uint8
+
+	// Controls how often "int" vars wind up as 8/16/32/64, should
+	// add up to 100. Ex: 100 0 0 0 means all ints are 8 bit, 25
+	// 25 25 25 means equal likelihood of all types.
+	intBitRanges [4]uint8
+
+	// Similar to the above but for 32/64 float types
+	floatBitRanges [2]uint8
+
+	// Similar to the above but for unsigned, signed ints.
+	unsignedRanges [2]uint8
+
+	// Percentage of params, struct fields that should be "_". Ranges
+	// from 0 to 100.
+	blankPerc uint8
+
+	// How deeply structs are allowed to be nested (ranges from 0 to N).
+	structDepth uint8
+
+	// Fraction of param and return types assigned to each of:
+	// struct/array/map/pointer/int/float/complex/byte/string at the
+	// top level. If nesting precludes using a struct, other types
+	// are chosen from instead according to same proportions. The sum
+	// of typeFractions values should add up to 100.
+	typeFractions [9]uint8
+
+	// Percentage of the time we'll emit recursive calls, from 0 to 100.
+	recurPerc uint8
+
+	// Percentage of time that we turn the test function into a method,
+	// and if it is a method, fraction of time that we use a pointer
+	// method call vs value method call. Each range from 0 to 100.
+	methodPerc            uint8
+	pointerMethodCallPerc uint8
+
+	// If true, test reflect.Call path as well.
+	doReflectCall bool
+
+	// If true, then randomly take addresses of params/returns.
+	takeAddress bool
+
+	// Fraction of the time that any params/returns are address taken.
+	// Ranges from 0 to 100.
+	takenFraction uint8
+
+	// For a given address-taken param or return, controls the
+	// manner in which the indirect read or write takes
+	// place. This is a set of percentages for
+	// not/simple/passed/heap, where "not" means not address
+	// taken, "simple" means a simple read or write, "passed"
+	// means that the address is passed to a well-behaved
+	// function, and "heap" means that the address is assigned to
+	// a global. Values in addrFractions should add up to 100.
+	addrFractions [4]uint8
+
+	// If true, then perform testing of go/defer statements.
+	doDefer bool
+
+	// fraction of test functions for which we emit a defer. Ranges from 0 to 100.
+	deferFraction uint8
+
+	// If true, randomly pick between emitting a value by literal
+	// (e.g. "int(1)" vs emitting a call to a function that
+	// will produce the same value (e.g. "myHelperEmitsInt1()").
+	doFuncCallValues bool
+
+	// Fraction of the time that we emit a function call to create
+	// a param value vs emitting a literal. Ranges from 0 to 100.
+	funcCallValFraction uint8
+
+	// If true, randomly decide to not check selected components of
+	// a composite value (e.g. for a struct, check field F1 but not F2).
+	// The intent is to generate partially live values.
+	doSkipCompare bool
+
+	// Fraction of the time that we decided to skip sub-components of
+	// composite values. Ranges from 0 to 100.
+	skipCompareFraction uint8
+}
+
+// SetTunables accepts a TunableParams object, checks to make sure
+// that the settings in it are sane/logical, and applies the
+// parameters for any subsequent calls to the Generate function. This
+// function will issue a fatal error if any of the tunable params are
+// incorrect/insane (for example, a 'percentage' value outside the
+// range of 0-100).
+func SetTunables(t TunableParams) {
+	checkTunables(t)
+	tunables = t
+}
+
+var defaultTypeFractions = [9]uint8{
+	10, // struct
+	10, // array
+	10, // map
+	15, // pointer
+	20, // numeric
+	15, // float
+	5,  // complex
+	5,  // byte
+	10, // string
+}
+
+const (
+	// Param not address taken.
+	StructTfIdx = iota
+	ArrayTfIdx
+	MapTfIdx
+	PointerTfIdx
+	NumericTfIdx
+	FloatTfIdx
+	ComplexTfIdx
+	ByteTfIdx
+	StringTfIdx
+)
+
+var tunables = TunableParams{
+	nParmRange:            15,
+	nReturnRange:          7,
+	nStructFields:         7,
+	nArrayElements:        5,
+	sliceFraction:         50,
+	intBitRanges:          [4]uint8{30, 20, 20, 30},
+	floatBitRanges:        [2]uint8{50, 50},
+	unsignedRanges:        [2]uint8{50, 50},
+	blankPerc:             15,
+	structDepth:           3,
+	typeFractions:         defaultTypeFractions,
+	recurPerc:             20,
+	methodPerc:            10,
+	pointerMethodCallPerc: 50,
+	doReflectCall:         true,
+	doDefer:               true,
+	takeAddress:           true,
+	doFuncCallValues:      true,
+	takenFraction:         20,
+	deferFraction:         30,
+	funcCallValFraction:   5,
+	doSkipCompare:         true,
+	skipCompareFraction:   10,
+	addrFractions:         [4]uint8{50, 25, 15, 10},
+}
+
+func DefaultTunables() TunableParams {
+	return tunables
+}
+
+func checkTunables(t TunableParams) {
+	var s int = 0
+
+	for _, v := range t.intBitRanges {
+		s += int(v)
+	}
+	if s != 100 {
+		log.Fatal(errors.New("intBitRanges tunable does not sum to 100"))
+	}
+
+	s = 0
+	for _, v := range t.unsignedRanges {
+		s += int(v)
+	}
+	if s != 100 {
+		log.Fatal(errors.New("unsignedRanges tunable does not sum to 100"))
+	}
+
+	if t.blankPerc > 100 {
+		log.Fatal(errors.New("blankPerc bad value, over 100"))
+	}
+	if t.recurPerc > 100 {
+		log.Fatal(errors.New("recurPerc bad value, over 100"))
+	}
+	if t.methodPerc > 100 {
+		log.Fatal(errors.New("methodPerc bad value, over 100"))
+	}
+	if t.pointerMethodCallPerc > 100 {
+		log.Fatal(errors.New("pointerMethodCallPerc bad value, over 100"))
+	}
+
+	s = 0
+	for _, v := range t.floatBitRanges {
+		s += int(v)
+	}
+	if s != 100 {
+		log.Fatal(errors.New("floatBitRanges tunable does not sum to 100"))
+	}
+
+	s = 0
+	for _, v := range t.typeFractions {
+		s += int(v)
+	}
+	if s != 100 {
+		panic(errors.New("typeFractions tunable does not sum to 100"))
+	}
+
+	s = 0
+	for _, v := range t.addrFractions {
+		s += int(v)
+	}
+	if s != 100 {
+		log.Fatal(errors.New("addrFractions tunable does not sum to 100"))
+	}
+	if t.takenFraction > 100 {
+		log.Fatal(errors.New("takenFraction not between 0 and 100"))
+	}
+	if t.deferFraction > 100 {
+		log.Fatal(errors.New("deferFraction not between 0 and 100"))
+	}
+	if t.sliceFraction > 100 {
+		log.Fatal(errors.New("sliceFraction not between 0 and 100"))
+	}
+	if t.skipCompareFraction > 100 {
+		log.Fatal(errors.New("skipCompareFraction not between 0 and 100"))
+	}
+}
+
+func (t *TunableParams) DisableReflectionCalls() {
+	t.doReflectCall = false
+}
+
+func (t *TunableParams) DisableRecursiveCalls() {
+	t.recurPerc = 0
+}
+
+func (t *TunableParams) DisableMethodCalls() {
+	t.methodPerc = 0
+}
+
+func (t *TunableParams) DisableTakeAddr() {
+	t.takeAddress = false
+}
+
+func (t *TunableParams) DisableDefer() {
+	t.doDefer = false
+}
+
+func (t *TunableParams) LimitInputs(n int) error {
+	if n > 100 {
+		return fmt.Errorf("value %d passed to LimitInputs is too large *(max 100)", n)
+	}
+	if n < 0 {
+		return fmt.Errorf("value %d passed to LimitInputs is invalid", n)
+	}
+	t.nParmRange = uint8(n)
+	return nil
+}
+
+func (t *TunableParams) LimitOutputs(n int) error {
+	if n > 100 {
+		return fmt.Errorf("value %d passed to LimitOutputs is too large *(max 100)", n)
+	}
+	if n < 0 {
+		return fmt.Errorf("value %d passed to LimitOutputs is invalid", n)
+	}
+	t.nReturnRange = uint8(n)
+	return nil
+}
+
+// ParseMaskString parses a string of the form K,J,...,M-N,Q-R,...,Z
+// e.g. comma-separated integers or ranges of integers, returning the
+// result in a form suitable for FcnMask or PkgMask fields in a
+// Config. Here "tag" holds the mask flavor (fcn or pkg) and "arg" is
+// the string argument to be parsed.
+func ParseMaskString(arg string, tag string) (map[int]int, error) {
+	if arg == "" {
+		return nil, nil
+	}
+	verb(1, "%s mask is %s", tag, arg)
+	m := make(map[int]int)
+	ss := strings.Split(arg, ":")
+	for _, s := range ss {
+		if strings.Contains(s, "-") {
+			rng := strings.Split(s, "-")
+			if len(rng) != 2 {
+				return nil, fmt.Errorf("malformed range %s in %s mask arg", s, tag)
+			}
+			i, err := strconv.Atoi(rng[0])
+			if err != nil {
+				return nil, fmt.Errorf("malformed range value %s in %s mask arg", rng[0], tag)
+			}
+			j, err2 := strconv.Atoi(rng[1])
+			if err2 != nil {
+				return nil, fmt.Errorf("malformed range value %s in %s mask arg", rng[1], tag)
+			}
+			for k := i; k < j; k++ {
+				m[k] = 1
+			}
+		} else {
+			i, err := strconv.Atoi(s)
+			if err != nil {
+				return nil, fmt.Errorf("malformed value %s in %s mask arg", s, tag)
+			}
+			m[i] = 1
+		}
+	}
+	return m, nil
+}
+
+func writeCom(b *bytes.Buffer, i int) {
+	if i != 0 {
+		b.WriteString(", ")
+	}
+}
+
+var Verbctl int = 0
+
+func verb(vlevel int, s string, a ...interface{}) {
+	if Verbctl >= vlevel {
+		fmt.Printf(s, a...)
+		fmt.Printf("\n")
+	}
+}
+
+type funcdef struct {
+	idx         int
+	structdefs  []structparm
+	arraydefs   []arrayparm
+	typedefs    []typedefparm
+	mapdefs     []mapparm
+	mapkeytypes []parm
+	mapkeytmps  []string
+	mapkeyts    string
+	receiver    parm
+	params      []parm
+	returns     []parm
+	values      []int
+	dodefc      uint8
+	dodefp      []uint8
+	rstack      int
+	recur       bool
+	isMethod    bool
+}
+
+type genstate struct {
+	GenConfig
+	ipref string
+	//tag            string
+	//numtpk         int
+	pkidx int
+	errs  int
+	//pragma         string
+	//sforce         bool
+	//randctl        int
+	tunables       TunableParams
+	tstack         []TunableParams
+	derefFuncs     map[string]string
+	newDerefFuncs  []funcdesc
+	assignFuncs    map[string]string
+	newAssignFuncs []funcdesc
+	allocFuncs     map[string]string
+	newAllocFuncs  []funcdesc
+	genvalFuncs    map[string]string
+	newGenvalFuncs []funcdesc
+	globVars       map[string]string
+	newGlobVars    []funcdesc
+	wr             *wraprand
+}
+
+func (s *genstate) intFlavor() string {
+	which := uint8(s.wr.Intn(100))
+	if which < s.tunables.unsignedRanges[0] {
+		return "uint"
+	}
+	return "int"
+}
+
+func (s *genstate) intBits() uint32 {
+	which := uint8(s.wr.Intn(100))
+	var t uint8 = 0
+	var bits uint32 = 8
+	for _, v := range s.tunables.intBitRanges {
+		t += v
+		if which < t {
+			return bits
+		}
+		bits *= 2
+	}
+	return uint32(s.tunables.intBitRanges[3])
+}
+
+func (s *genstate) floatBits() uint32 {
+	which := uint8(s.wr.Intn(100))
+	if which < s.tunables.floatBitRanges[0] {
+		return uint32(32)
+	}
+	return uint32(64)
+}
+
+func (s *genstate) genAddrTaken() addrTakenHow {
+	which := uint8(s.wr.Intn(100))
+	res := notAddrTaken
+	var t uint8 = 0
+	for _, v := range s.tunables.addrFractions {
+		t += v
+		if which < t {
+			return res
+		}
+		res++
+	}
+	return notAddrTaken
+}
+
+func (s *genstate) pushTunables() {
+	s.tstack = append(s.tstack, s.tunables)
+}
+
+func (s *genstate) popTunables() {
+	if len(s.tstack) == 0 {
+		panic("untables stack underflow")
+	}
+	s.tunables = s.tstack[0]
+	s.tstack = s.tstack[1:]
+}
+
+// redistributeFraction accepts a value 'toIncorporate' and updates
+// 'typeFraction' to add in the values from 'toIncorporate' equally to
+// all slots not in 'avoid'. This is done by successively walking
+// through 'typeFraction' adding 1 to each non-avoid slot, then
+// repeating until we've added a total of 'toIncorporate' elements.
+// See precludeSelectedTypes below for more info.
+func (s *genstate) redistributeFraction(toIncorporate uint8, avoid []int) {
+	inavoid := func(j int) bool {
+		for _, k := range avoid {
+			if j == k {
+				return true
+			}
+		}
+		return false
+	}
+
+	doredis := func() {
+		for {
+			for i := range s.tunables.typeFractions {
+				if inavoid(i) {
+					continue
+				}
+				s.tunables.typeFractions[i]++
+				toIncorporate--
+				if toIncorporate == 0 {
+					return
+				}
+			}
+		}
+	}
+	doredis()
+	checkTunables(s.tunables)
+}
+
+// precludeSelectedTypes accepts a set of values (t, t2, ...)
+// corresponding to slots in 'typeFractions', sums up the values from
+// the slots, zeroes out the slots, and finally takes the values and
+// redistributes them equally to the other slots.  For example,
+// suppose 'typeFractions' starts as [10, 10, 10, 15, 20, 15, 5, 5, 10],
+// then we decide we want to eliminate or 'knock out' map types and
+// pointer types (slots 2 and 3 in the array above) going forward.  To
+// restore the invariant that values in 'typeFractions' sum to 100, we
+// take the values from slots 2 and 3 (a total of 25) and evenly
+// distribute those values to the other slots in the array.
+func (s *genstate) precludeSelectedTypes(t int, t2 ...int) {
+	avoid := []int{t}
+	avoid = append(avoid, t2...)
+	f := uint8(0)
+	for _, idx := range avoid {
+		f += s.tunables.typeFractions[idx]
+		s.tunables.typeFractions[idx] = 0
+	}
+	s.redistributeFraction(f, avoid)
+}
+
+func (s *genstate) GenMapKeyType(f *funcdef, depth int, pidx int) parm {
+	s.pushTunables()
+	defer s.popTunables()
+	// maps we can't allow at all; pointers might be possible but
+	//  would be too much work to arrange. Avoid slices as well.
+	s.tunables.sliceFraction = 0
+	s.precludeSelectedTypes(MapTfIdx, PointerTfIdx)
+	return s.GenParm(f, depth+1, false, pidx)
+}
+
+func (s *genstate) GenParm(f *funcdef, depth int, mkctl bool, pidx int) parm {
+
+	// Enforcement for struct/array/map/pointer array nesting depth.
+	toodeep := depth >= int(s.tunables.structDepth)
+	if toodeep {
+		s.pushTunables()
+		defer s.popTunables()
+		s.precludeSelectedTypes(StructTfIdx, ArrayTfIdx, MapTfIdx, PointerTfIdx)
+	}
+
+	// Convert tf into a cumulative sum
+	tf := s.tunables.typeFractions
+	sum := uint8(0)
+	for i := 0; i < len(tf); i++ {
+		sum += tf[i]
+		tf[i] = sum
+	}
+
+	isblank := uint8(s.wr.Intn(100)) < s.tunables.blankPerc
+	addrTaken := notAddrTaken
+	if depth == 0 && tunables.takeAddress && !isblank {
+		addrTaken = s.genAddrTaken()
+	}
+	isGenValFunc := tunables.doFuncCallValues &&
+		uint8(s.wr.Intn(100)) < s.tunables.funcCallValFraction
+
+	// Make adjusted selection (pick a bucket within tf)
+	which := uint8(s.wr.Intn(100))
+	verb(3, "which=%d", which)
+	var retval parm
+	switch {
+	case which < tf[StructTfIdx]:
+		{
+			if toodeep {
+				panic("should not be here")
+			}
+			var sp structparm
+			ns := len(f.structdefs)
+			sp.sname = fmt.Sprintf("StructF%dS%d", f.idx, ns)
+			sp.qname = fmt.Sprintf("%s.StructF%dS%d",
+				s.checkerPkg(pidx), f.idx, ns)
+			f.structdefs = append(f.structdefs, sp)
+			tnf := int64(s.tunables.nStructFields) / int64(depth+1)
+			nf := int(s.wr.Intn(tnf))
+			for fi := 0; fi < nf; fi++ {
+				fp := s.GenParm(f, depth+1, false, pidx)
+				skComp := tunables.doSkipCompare &&
+					uint8(s.wr.Intn(100)) < s.tunables.skipCompareFraction
+				if skComp && checkableElements(fp) != 0 {
+					fp.SetSkipCompare(SkipAll)
+				}
+				sp.fields = append(sp.fields, fp)
+			}
+			f.structdefs[ns] = sp
+			retval = &sp
+		}
+	case which < tf[ArrayTfIdx]:
+		{
+			if toodeep {
+				panic("should not be here")
+			}
+			var ap arrayparm
+			ns := len(f.arraydefs)
+			nel := uint8(s.wr.Intn(int64(s.tunables.nArrayElements)))
+			issl := uint8(s.wr.Intn(100)) < s.tunables.sliceFraction
+			ap.aname = fmt.Sprintf("ArrayF%dS%dE%d", f.idx, ns, nel)
+			ap.qname = fmt.Sprintf("%s.ArrayF%dS%dE%d", s.checkerPkg(pidx),
+				f.idx, ns, nel)
+			f.arraydefs = append(f.arraydefs, ap)
+			ap.nelements = nel
+			ap.slice = issl
+			ap.eltype = s.GenParm(f, depth+1, false, pidx)
+			ap.eltype.SetBlank(false)
+			skComp := tunables.doSkipCompare &&
+				uint8(s.wr.Intn(100)) < s.tunables.skipCompareFraction
+			if skComp && checkableElements(ap.eltype) != 0 {
+				if issl {
+					ap.SetSkipCompare(SkipPayload)
+				}
+			}
+			f.arraydefs[ns] = ap
+			retval = &ap
+		}
+	case which < tf[MapTfIdx]:
+		{
+			if toodeep {
+				panic("should not be here")
+			}
+			var mp mapparm
+			ns := len(f.mapdefs)
+
+			// append early, since calls below might also append
+			f.mapdefs = append(f.mapdefs, mp)
+			f.mapkeytmps = append(f.mapkeytmps, "")
+			f.mapkeytypes = append(f.mapkeytypes, mp.keytype)
+			mp.aname = fmt.Sprintf("MapF%dM%d", f.idx, ns)
+			if f.mapkeyts == "" {
+				f.mapkeyts = fmt.Sprintf("MapKeysF%d", f.idx)
+			}
+			mp.qname = fmt.Sprintf("%s.MapF%dM%d", s.checkerPkg(pidx),
+				f.idx, ns)
+			mkt := fmt.Sprintf("Mk%dt%d", f.idx, ns)
+			mp.keytmp = mkt
+			mk := s.GenMapKeyType(f, depth+1, pidx)
+			mp.keytype = mk
+			mp.valtype = s.GenParm(f, depth+1, false, pidx)
+			mp.valtype.SetBlank(false)
+			mp.keytype.SetBlank(false)
+			// now update the previously appended placeholders
+			f.mapdefs[ns] = mp
+			f.mapkeytypes[ns] = mk
+			f.mapkeytmps[ns] = mkt
+			retval = &mp
+		}
+	case which < tf[PointerTfIdx]:
+		{
+			if toodeep {
+				panic("should not be here")
+			}
+			pp := mkPointerParm(s.GenParm(f, depth+1, false, pidx))
+			retval = &pp
+		}
+	case which < tf[NumericTfIdx]:
+		{
+			var ip numparm
+			ip.tag = s.intFlavor()
+			ip.widthInBits = s.intBits()
+			if mkctl {
+				ip.ctl = true
+			}
+			retval = &ip
+		}
+	case which < tf[FloatTfIdx]:
+		{
+			var fp numparm
+			fp.tag = "float"
+			fp.widthInBits = s.floatBits()
+			retval = &fp
+		}
+	case which < tf[ComplexTfIdx]:
+		{
+			var fp numparm
+			fp.tag = "complex"
+			fp.widthInBits = s.floatBits() * 2
+			retval = &fp
+		}
+	case which < tf[ByteTfIdx]:
+		{
+			var bp numparm
+			bp.tag = "byte"
+			bp.widthInBits = 8
+			retval = &bp
+		}
+	case which < tf[StringTfIdx]:
+		{
+			var sp stringparm
+			sp.tag = "string"
+			skComp := tunables.doSkipCompare &&
+				uint8(s.wr.Intn(100)) < s.tunables.skipCompareFraction
+			if skComp {
+				sp.SetSkipCompare(SkipPayload)
+			}
+			retval = &sp
+		}
+	default:
+		{
+			// fallback
+			var ip numparm
+			ip.tag = "uint"
+			ip.widthInBits = 8
+			retval = &ip
+		}
+	}
+	if !mkctl {
+		retval.SetBlank(isblank)
+	}
+	retval.SetAddrTaken(addrTaken)
+	retval.SetIsGenVal(isGenValFunc)
+	return retval
+}
+
+func (s *genstate) GenReturn(f *funcdef, depth int, pidx int) parm {
+	return s.GenParm(f, depth, false, pidx)
+}
+
+// GenFunc cooks up the random signature (and other attributes) of a
+// given checker function, returning a funcdef object that describes
+// the new fcn.
+func (s *genstate) GenFunc(fidx int, pidx int) *funcdef {
+	f := new(funcdef)
+	f.idx = fidx
+	numParams := int(s.wr.Intn(int64(1 + int(s.tunables.nParmRange))))
+	numReturns := int(s.wr.Intn(int64(1 + int(s.tunables.nReturnRange))))
+	f.recur = uint8(s.wr.Intn(100)) < s.tunables.recurPerc
+	f.isMethod = uint8(s.wr.Intn(100)) < s.tunables.methodPerc
+	genReceiverType := func() {
+		// Receiver type can't be pointer type. Temporarily update
+		// tunables to eliminate that possibility.
+		s.pushTunables()
+		defer s.popTunables()
+		s.precludeSelectedTypes(PointerTfIdx)
+		target := s.GenParm(f, 0, false, pidx)
+		target.SetBlank(false)
+		f.receiver = s.makeTypedefParm(f, target, pidx)
+		if f.receiver.IsBlank() {
+			f.recur = false
+		}
+	}
+	if f.isMethod {
+		genReceiverType()
+	}
+	needControl := f.recur
+	f.dodefc = uint8(s.wr.Intn(100))
+	pTaken := uint8(s.wr.Intn(100)) < s.tunables.takenFraction
+	for pi := 0; pi < numParams; pi++ {
+		newparm := s.GenParm(f, 0, needControl, pidx)
+		if !pTaken {
+			newparm.SetAddrTaken(notAddrTaken)
+		}
+		if newparm.IsControl() {
+			needControl = false
+		}
+		f.params = append(f.params, newparm)
+		f.dodefp = append(f.dodefp, uint8(s.wr.Intn(100)))
+	}
+	if f.recur && needControl {
+		f.recur = false
+	}
+
+	rTaken := uint8(s.wr.Intn(100)) < s.tunables.takenFraction
+	for ri := 0; ri < numReturns; ri++ {
+		r := s.GenReturn(f, 0, pidx)
+		if !rTaken {
+			r.SetAddrTaken(notAddrTaken)
+		}
+		f.returns = append(f.returns, r)
+	}
+	spw := uint(s.wr.Intn(11))
+	rstack := 1 << spw
+	if rstack < 4 {
+		rstack = 4
+	}
+	f.rstack = rstack
+	return f
+}
+
+func genDeref(p parm) (parm, string) {
+	curp := p
+	star := ""
+	for {
+		if pp, ok := curp.(*pointerparm); ok {
+			star += "*"
+			curp = pp.totype
+		} else {
+			return curp, star
+		}
+	}
+}
+
+func (s *genstate) eqFuncRef(f *funcdef, t parm, caller bool) string {
+	cp := ""
+	if f.mapkeyts != "" {
+		cp = "mkt."
+	} else if caller {
+		cp = s.checkerPkg(s.pkidx) + "."
+	}
+	return cp + "Equal" + t.TypeName()
+}
+
+// emitCompareFunc creates an 'equals' function for a specific
+// generated type (this is basically a way to compare objects that
+// contain pointer fields / pointery things).
+func (s *genstate) emitCompareFunc(f *funcdef, b *bytes.Buffer, p parm) {
+	if !p.HasPointer() {
+		return
+	}
+
+	tn := p.TypeName()
+	b.WriteString(fmt.Sprintf("// equal func for %s\n", tn))
+	b.WriteString("//go:noinline\n")
+	rcvr := ""
+	if f.mapkeyts != "" {
+		rcvr = fmt.Sprintf("(mkt *%s) ", f.mapkeyts)
+	}
+	b.WriteString(fmt.Sprintf("func %sEqual%s(left %s, right %s) bool {\n", rcvr, tn, tn, tn))
+	b.WriteString("  return ")
+	numel := p.NumElements()
+	ncmp := 0
+	for i := 0; i < numel; i++ {
+		lelref, lelparm := p.GenElemRef(i, "left")
+		relref, _ := p.GenElemRef(i, "right")
+		if lelref == "" || lelref == "_" {
+			continue
+		}
+		basep, star := genDeref(lelparm)
+		// Handle *p where p is an empty struct.
+		if basep.NumElements() == 0 {
+			continue
+		}
+		if ncmp != 0 {
+			b.WriteString("  && ")
+		}
+		ncmp++
+		if basep.HasPointer() {
+			efn := s.eqFuncRef(f, basep, false)
+			b.WriteString(fmt.Sprintf(" %s(%s%s, %s%s)", efn, star, lelref, star, relref))
+		} else {
+			b.WriteString(fmt.Sprintf("%s%s == %s%s", star, lelref, star, relref))
+		}
+	}
+	if ncmp == 0 {
+		b.WriteString("true")
+	}
+	b.WriteString("\n}\n\n")
+}
+
+// emitStructAndArrayDefs writes out definitions of the random types
+// we happened to cook up while generating code for a specific
+// function pair.
+func (s *genstate) emitStructAndArrayDefs(f *funcdef, b *bytes.Buffer) {
+	for _, str := range f.structdefs {
+		b.WriteString(fmt.Sprintf("type %s struct {\n", str.sname))
+		for fi, sp := range str.fields {
+			sp.Declare(b, "  "+str.FieldName(fi), "\n", false)
+		}
+		b.WriteString("}\n\n")
+		s.emitCompareFunc(f, b, &str)
+	}
+	for _, a := range f.arraydefs {
+		elems := fmt.Sprintf("%d", a.nelements)
+		if a.slice {
+			elems = ""
+		}
+		b.WriteString(fmt.Sprintf("type %s [%s]%s\n\n", a.aname,
+			elems, a.eltype.TypeName()))
+		s.emitCompareFunc(f, b, &a)
+	}
+	for _, a := range f.mapdefs {
+		b.WriteString(fmt.Sprintf("type %s map[%s]%s\n\n", a.aname,
+			a.keytype.TypeName(), a.valtype.TypeName()))
+		s.emitCompareFunc(f, b, &a)
+	}
+	for _, td := range f.typedefs {
+		b.WriteString(fmt.Sprintf("type %s %s\n\n", td.aname,
+			td.target.TypeName()))
+		s.emitCompareFunc(f, b, &td)
+	}
+	if f.mapkeyts != "" {
+		b.WriteString(fmt.Sprintf("type %s struct {\n", f.mapkeyts))
+		for i := range f.mapkeytypes {
+			f.mapkeytypes[i].Declare(b, "  "+f.mapkeytmps[i], "\n", false)
+		}
+		b.WriteString("}\n\n")
+	}
+}
+
+// GenValue method of genstate wraps the parm method of the same
+// name, but optionally returns a call to a function to produce
+// the value as opposed to a literal value.
+func (s *genstate) GenValue(f *funcdef, p parm, value int, caller bool) (string, int) {
+	var valstr string
+	valstr, value = p.GenValue(s, f, value, caller)
+	if !s.tunables.doFuncCallValues || !p.IsGenVal() || caller {
+		return valstr, value
+	}
+
+	mkInvoc := func(fname string) string {
+		meth := ""
+		if f.mapkeyts != "" {
+			meth = "mkt."
+		}
+		return fmt.Sprintf("%s%s()", meth, fname)
+	}
+
+	b := bytes.NewBuffer(nil)
+	p.Declare(b, "x", "", false)
+	h := sha1.New()
+	h.Write([]byte(valstr))
+	h.Write(b.Bytes())
+	if f.mapkeyts != "" {
+		h.Write([]byte(f.mapkeyts))
+	}
+	h.Write(b.Bytes())
+	bs := h.Sum(nil)
+	hashstr := fmt.Sprintf("%x", bs)
+	b.WriteString(hashstr)
+	tag := b.String()
+	fname, ok := s.genvalFuncs[tag]
+	if ok {
+		return mkInvoc(fname), value
+	}
+
+	fname = fmt.Sprintf("genval_%d", len(s.genvalFuncs))
+	s.newGenvalFuncs = append(s.newGenvalFuncs, funcdesc{p: p, name: fname, tag: tag, payload: valstr})
+	s.genvalFuncs[tag] = fname
+	return mkInvoc(fname), value
+}
+
+func (s *genstate) emitMapKeyTmps(f *funcdef, b *bytes.Buffer, pidx int, value int, caller bool) int {
+	if f.mapkeyts == "" {
+		return value
+	}
+	// map key tmps
+	cp := ""
+	if caller {
+		cp = s.checkerPkg(pidx) + "."
+	}
+	b.WriteString("  var mkt " + cp + f.mapkeyts + "\n")
+	for i, t := range f.mapkeytypes {
+		var keystr string
+		keystr, value = s.GenValue(f, t, value, caller)
+		tname := f.mapkeytmps[i]
+		b.WriteString(fmt.Sprintf("  %s := %s\n", tname, keystr))
+		b.WriteString(fmt.Sprintf("  mkt.%s = %s\n", tname, tname))
+	}
+	return value
+}
+
+func (s *genstate) emitCheckReturnsInCaller(f *funcdef, b *bytes.Buffer, pidx int, reflectCall bool) {
+	cm := f.complexityMeasure()
+	rvalp := func(ri int) string {
+		if reflectCall {
+			return fmt.Sprintf("rr%dv", ri)
+		}
+		return fmt.Sprintf("r%d", ri)
+	}
+	failTag := "\"return\""
+	if reflectCall {
+		failTag = "\"reflect return\""
+	}
+	for ri, rp := range f.returns {
+		if reflectCall {
+			b.WriteString(fmt.Sprintf("  rr%di := rvslice[%d].Interface()\n", ri, ri))
+			b.WriteString(fmt.Sprintf("  rr%dv:= rr%di.(", ri, ri))
+			rp.Declare(b, "", "", true)
+			b.WriteString(")\n")
+		}
+		pfc := ""
+		curp, star := genDeref(rp)
+		// Handle *p where p is an empty struct.
+		if curp.NumElements() == 0 {
+			b.WriteString(fmt.Sprintf("  _, _ = %s, c%d // zero size\n", rvalp(ri), ri))
+			continue
+		}
+		if star != "" {
+			pfc = fmt.Sprintf("ParamFailCount[%d] == 0 && ", pidx)
+		}
+		if curp.HasPointer() {
+			efn := "!" + s.eqFuncRef(f, curp, true)
+			b.WriteString(fmt.Sprintf("  if %s%s(%s%s, %sc%d) {\n", pfc, efn, star, rvalp(ri), star, ri))
+		} else {
+			b.WriteString(fmt.Sprintf("  if %s%s%s != %sc%d {\n", pfc, star, rvalp(ri), star, ri))
+		}
+		b.WriteString(fmt.Sprintf("    NoteFailure(%d, %d, %d, \"%s\", %s, %d, true, uint64(0))\n", cm, pidx, f.idx, s.checkerPkg(pidx), failTag, ri))
+		b.WriteString("  }\n")
+	}
+}
+
+func (s *genstate) emitCaller(f *funcdef, b *bytes.Buffer, pidx int) {
+
+	b.WriteString(fmt.Sprintf("func %s%d(mode string) {\n", CallerName, f.idx))
+
+	b.WriteString(fmt.Sprintf("  BeginFcn(%d)\n", pidx))
+
+	if s.EmitBad == 1 {
+		if s.BadPackageIdx == pidx && s.BadFuncIdx == f.idx {
+			b.WriteString("  bad code here, should cause build failure <<==\n")
+		}
+	}
+
+	var value int = 1
+
+	s.wr.Checkpoint("before mapkeytmps")
+	value = s.emitMapKeyTmps(f, b, pidx, value, true)
+
+	// generate return constants
+	s.wr.Checkpoint("before return constants")
+	for ri, r := range f.returns {
+		rc := fmt.Sprintf("c%d", ri)
+		value = s.emitVarAssign(f, b, r, rc, value, true)
+	}
+
+	// generate param constants
+	s.wr.Checkpoint("before param constants")
+	for pi, p := range f.params {
+		verb(4, "emitCaller gen p%d value=%d", pi, value)
+		if p.IsControl() {
+			_ = uint8(s.wr.Intn(100)) < 50
+			p.Declare(b, fmt.Sprintf("  var p%d ", pi), " = 10\n", true)
+		} else {
+			pc := fmt.Sprintf("p%d", pi)
+			value = s.emitVarAssign(f, b, p, pc, value, true)
+		}
+		f.values = append(f.values, value)
+	}
+
+	// generate receiver constant if applicable
+	if f.isMethod {
+		s.wr.Checkpoint("before receiver constant")
+		f.receiver.Declare(b, "  var rcvr", "\n", true)
+		valstr, value := s.GenValue(f, f.receiver, value, true)
+		b.WriteString(fmt.Sprintf("  rcvr = %s\n", valstr))
+		f.values = append(f.values, value)
+	}
+
+	b.WriteString(fmt.Sprintf("  Mode[%d] = \"\"\n", pidx))
+
+	// calling code
+	b.WriteString(fmt.Sprintf("  // %d returns %d params\n",
+		len(f.returns), len(f.params)))
+	if s.ForceStackGrowth {
+		b.WriteString("  hackStack() // force stack growth on next call\n")
+	}
+	b.WriteString("  if mode == \"normal\" {\n")
+	b.WriteString("  ")
+	for ri := range f.returns {
+		writeCom(b, ri)
+		b.WriteString(fmt.Sprintf("r%d", ri))
+	}
+	if len(f.returns) > 0 {
+		b.WriteString(" := ")
+	}
+	pref := s.checkerPkg(pidx)
+	if f.isMethod {
+		pref = "rcvr"
+	}
+	b.WriteString(fmt.Sprintf("%s.Test%d(", pref, f.idx))
+	for pi := range f.params {
+		writeCom(b, pi)
+		b.WriteString(fmt.Sprintf("p%d", pi))
+	}
+	b.WriteString(")\n")
+
+	// check values returned (normal call case)
+	s.emitCheckReturnsInCaller(f, b, pidx, false /* not a reflect call */)
+	b.WriteString("  }") // end of 'if normal call' block
+	if s.tunables.doReflectCall {
+		b.WriteString("else {\n") // beginning of reflect call block
+		// now make the same call via reflection
+		b.WriteString("  // same call via reflection\n")
+		b.WriteString(fmt.Sprintf("  Mode[%d] = \"reflect\"\n", pidx))
+		if f.isMethod {
+			b.WriteString("  rcv := reflect.ValueOf(rcvr)\n")
+			b.WriteString(fmt.Sprintf("  rc := rcv.MethodByName(\"Test%d\")\n", f.idx))
+		} else {
+			b.WriteString(fmt.Sprintf("  rc := reflect.ValueOf(%s.Test%d)\n",
+				s.checkerPkg(pidx), f.idx))
+		}
+		b.WriteString("  ")
+		if len(f.returns) > 0 {
+			b.WriteString("rvslice := ")
+		}
+		b.WriteString("  rc.Call([]reflect.Value{")
+		for pi := range f.params {
+			writeCom(b, pi)
+			b.WriteString(fmt.Sprintf("reflect.ValueOf(p%d)", pi))
+		}
+		b.WriteString("})\n")
+
+		// check values returned (reflect call case)
+		s.emitCheckReturnsInCaller(f, b, pidx, true /* is a reflect call */)
+		b.WriteString("}\n") // end of reflect call block
+	}
+
+	b.WriteString(fmt.Sprintf("\n  EndFcn(%d)\n", pidx))
+
+	b.WriteString("}\n\n")
+}
+
+func checkableElements(p parm) int {
+	if p.IsBlank() {
+		return 0
+	}
+	sp, isstruct := p.(*structparm)
+	if isstruct {
+		s := 0
+		for fi := range sp.fields {
+			s += checkableElements(sp.fields[fi])
+		}
+		return s
+	}
+	ap, isarray := p.(*arrayparm)
+	if isarray {
+		if ap.nelements == 0 {
+			return 0
+		}
+		return int(ap.nelements) * checkableElements(ap.eltype)
+	}
+	return 1
+}
+
+// funcdesc describes an auto-generated helper function or global
+// variable, such as an allocation function (returns new(T)) or a
+// pointer assignment function (assigns value of T to type *T). Here
+// 'p' is a param type T, 'pp' is a pointer type *T, 'name' is the
+// name within the generated code of the function or variable and
+// 'tag' is a descriptive tag used to look up the entity in a map (so
+// that we don't have to emit multiple copies of a function that
+// assigns int to *int, for example).
+type funcdesc struct {
+	p       parm
+	pp      parm
+	name    string
+	tag     string
+	payload string
+}
+
+func (s *genstate) emitDerefFuncs(b *bytes.Buffer, emit bool) {
+	b.WriteString("// dereference helpers\n")
+	for _, fd := range s.newDerefFuncs {
+		if !emit {
+			b.WriteString(fmt.Sprintf("\n// skip derefunc %s\n", fd.name))
+			delete(s.derefFuncs, fd.tag)
+			continue
+		}
+		b.WriteString("\n//go:noinline\n")
+		b.WriteString(fmt.Sprintf("func %s(", fd.name))
+		fd.pp.Declare(b, "x", "", false)
+		b.WriteString(") ")
+		fd.p.Declare(b, "", "", false)
+		b.WriteString(" {\n")
+		b.WriteString("  return *x\n")
+		b.WriteString("}\n")
+	}
+	s.newDerefFuncs = nil
+}
+
+func (s *genstate) emitAssignFuncs(b *bytes.Buffer, emit bool) {
+	b.WriteString("// assign helpers\n")
+	for _, fd := range s.newAssignFuncs {
+		if !emit {
+			b.WriteString(fmt.Sprintf("\n// skip assignfunc %s\n", fd.name))
+			delete(s.assignFuncs, fd.tag)
+			continue
+		}
+		b.WriteString("\n//go:noinline\n")
+		b.WriteString(fmt.Sprintf("func %s(", fd.name))
+		fd.pp.Declare(b, "x", "", false)
+		b.WriteString(", ")
+		fd.p.Declare(b, "v", "", false)
+		b.WriteString(") {\n")
+		b.WriteString("  *x = v\n")
+		b.WriteString("}\n")
+	}
+	s.newAssignFuncs = nil
+}
+
+func (s *genstate) emitNewFuncs(b *bytes.Buffer, emit bool) {
+	b.WriteString("// 'new' funcs\n")
+	for _, fd := range s.newAllocFuncs {
+		if !emit {
+			b.WriteString(fmt.Sprintf("\n// skip newfunc %s\n", fd.name))
+			delete(s.allocFuncs, fd.tag)
+			continue
+		}
+		b.WriteString("\n//go:noinline\n")
+		b.WriteString(fmt.Sprintf("func %s(", fd.name))
+		fd.p.Declare(b, "i", "", false)
+		b.WriteString(") ")
+		fd.pp.Declare(b, "", "", false)
+		b.WriteString(" {\n")
+		b.WriteString("  x := new(")
+		fd.p.Declare(b, "", "", false)
+		b.WriteString(")\n")
+		b.WriteString("  *x = i\n")
+		b.WriteString("  return x\n")
+		b.WriteString("}\n\n")
+	}
+	s.newAllocFuncs = nil
+}
+
+func (s *genstate) emitGlobalVars(b *bytes.Buffer, emit bool) {
+	b.WriteString("// global vars\n")
+	for _, fd := range s.newGlobVars {
+		if !emit {
+			b.WriteString(fmt.Sprintf("\n// skip gvar %s\n", fd.name))
+			delete(s.globVars, fd.tag)
+			continue
+		}
+		b.WriteString("var ")
+		fd.pp.Declare(b, fd.name, "", false)
+		b.WriteString("\n")
+	}
+	s.newGlobVars = nil
+	b.WriteString("\n")
+}
+
+func (s *genstate) emitGenValFuncs(f *funcdef, b *bytes.Buffer, emit bool) {
+	b.WriteString("// genval helpers\n")
+	for _, fd := range s.newGenvalFuncs {
+		if !emit {
+			b.WriteString(fmt.Sprintf("\n// skip genvalfunc %s\n", fd.name))
+			delete(s.genvalFuncs, fd.tag)
+			continue
+		}
+		b.WriteString("\n//go:noinline\n")
+		rcvr := ""
+		if f.mapkeyts != "" {
+			rcvr = fmt.Sprintf("(mkt *%s) ", f.mapkeyts)
+		}
+		b.WriteString(fmt.Sprintf("func %s%s() ", rcvr, fd.name))
+		fd.p.Declare(b, "", "", false)
+		b.WriteString(" {\n")
+		if f.mapkeyts != "" {
+			contained := containedParms(fd.p)
+			for _, cp := range contained {
+				mp, ismap := cp.(*mapparm)
+				if ismap {
+					b.WriteString(fmt.Sprintf("  %s := mkt.%s\n",
+						mp.keytmp, mp.keytmp))
+					b.WriteString(fmt.Sprintf("  _ = %s\n", mp.keytmp))
+				}
+			}
+		}
+		b.WriteString(fmt.Sprintf("  return %s\n", fd.payload))
+		b.WriteString("}\n")
+	}
+	s.newGenvalFuncs = nil
+}
+
+func (s *genstate) emitAddrTakenHelpers(f *funcdef, b *bytes.Buffer, emit bool) {
+	b.WriteString("// begin addr taken helpers\n")
+	s.emitDerefFuncs(b, emit)
+	s.emitAssignFuncs(b, emit)
+	s.emitNewFuncs(b, emit)
+	s.emitGlobalVars(b, emit)
+	s.emitGenValFuncs(f, b, emit)
+	b.WriteString("// end addr taken helpers\n")
+}
+
+func (s *genstate) genGlobVar(p parm) string {
+	var pp parm
+	ppp := mkPointerParm(p)
+	pp = &ppp
+	b := bytes.NewBuffer(nil)
+	pp.Declare(b, "gv", "", false)
+	tag := b.String()
+	gv, ok := s.globVars[tag]
+	if ok {
+		return gv
+	}
+	gv = fmt.Sprintf("gvar_%d", len(s.globVars))
+	s.newGlobVars = append(s.newGlobVars, funcdesc{pp: pp, p: p, name: gv, tag: tag})
+	s.globVars[tag] = gv
+	return gv
+}
+
+func (s *genstate) genParamDerefFunc(p parm) string {
+	var pp parm
+	ppp := mkPointerParm(p)
+	pp = &ppp
+	b := bytes.NewBuffer(nil)
+	pp.Declare(b, "x", "", false)
+	tag := b.String()
+	f, ok := s.derefFuncs[tag]
+	if ok {
+		return f
+	}
+	f = fmt.Sprintf("deref_%d", len(s.derefFuncs))
+	s.newDerefFuncs = append(s.newDerefFuncs, funcdesc{pp: pp, p: p, name: f, tag: tag})
+	s.derefFuncs[tag] = f
+	return f
+}
+
+func (s *genstate) genAssignFunc(p parm) string {
+	var pp parm
+	ppp := mkPointerParm(p)
+	pp = &ppp
+	b := bytes.NewBuffer(nil)
+	pp.Declare(b, "x", "", false)
+	tag := b.String()
+	f, ok := s.assignFuncs[tag]
+	if ok {
+		return f
+	}
+	f = fmt.Sprintf("retassign_%d", len(s.assignFuncs))
+	s.newAssignFuncs = append(s.newAssignFuncs, funcdesc{pp: pp, p: p, name: f, tag: tag})
+	s.assignFuncs[tag] = f
+	return f
+}
+
+func (s *genstate) genAllocFunc(p parm) string {
+	var pp parm
+	ppp := mkPointerParm(p)
+	pp = &ppp
+	b := bytes.NewBuffer(nil)
+	pp.Declare(b, "x", "", false)
+	tag := b.String()
+	f, ok := s.allocFuncs[tag]
+	if ok {
+		return f
+	}
+	f = fmt.Sprintf("New_%d", len(s.allocFuncs))
+	s.newAllocFuncs = append(s.newAllocFuncs, funcdesc{pp: pp, p: p, name: f, tag: tag})
+	s.allocFuncs[tag] = f
+	return f
+}
+
+func (s *genstate) genParamRef(p parm, idx int) string {
+	switch p.AddrTaken() {
+	case notAddrTaken:
+		return fmt.Sprintf("p%d", idx)
+	case addrTakenSimple, addrTakenHeap:
+		return fmt.Sprintf("(*ap%d)", idx)
+	case addrTakenPassed:
+		f := s.genParamDerefFunc(p)
+		return fmt.Sprintf("%s(ap%d)", f, idx)
+	default:
+		panic("bad")
+	}
+}
+
+func (s *genstate) genReturnAssign(b *bytes.Buffer, r parm, idx int, val string) {
+	switch r.AddrTaken() {
+	case notAddrTaken:
+		b.WriteString(fmt.Sprintf("  r%d = %s\n", idx, val))
+	case addrTakenSimple, addrTakenHeap:
+		b.WriteString(fmt.Sprintf("  (*ar%d) = %v\n", idx, val))
+	case addrTakenPassed:
+		f := s.genAssignFunc(r)
+		b.WriteString(fmt.Sprintf("  %s(ar%d, %v)\n", f, idx, val))
+	default:
+		panic("bad")
+	}
+}
+
+func (s *genstate) emitParamElemCheck(f *funcdef, b *bytes.Buffer, p parm, pvar string, cvar string, paramidx int, elemidx int) {
+	if p.SkipCompare() == SkipAll {
+		b.WriteString(fmt.Sprintf("  // selective skip of %s\n", pvar))
+		b.WriteString(fmt.Sprintf("  _ = %s\n", cvar))
+		return
+	} else if p.SkipCompare() == SkipPayload {
+		switch p.(type) {
+		case *stringparm, *arrayparm:
+			b.WriteString(fmt.Sprintf("  if len(%s) != len(%s) { // skip payload\n",
+				pvar, cvar))
+		default:
+			panic("should never happen")
+		}
+	} else {
+		basep, star := genDeref(p)
+		// Handle *p where p is an empty struct.
+		if basep.NumElements() == 0 {
+			return
+		}
+		if basep.HasPointer() {
+			efn := s.eqFuncRef(f, basep, false)
+			b.WriteString(fmt.Sprintf("  if !%s(%s%s, %s%s) {\n",
+				efn, star, pvar, star, cvar))
+		} else {
+			b.WriteString(fmt.Sprintf("  if %s%s != %s%s {\n",
+				star, pvar, star, cvar))
+		}
+	}
+	cm := f.complexityMeasure()
+	b.WriteString(fmt.Sprintf("    NoteFailureElem(%d, %d, %d, \"%s\", \"parm\", %d, %d, false, pad[0])\n", cm, s.pkidx, f.idx, s.checkerPkg(s.pkidx), paramidx, elemidx))
+	b.WriteString("    return\n")
+	b.WriteString("  }\n")
+}
+
+func (s *genstate) emitParamChecks(f *funcdef, b *bytes.Buffer, pidx int, value int) (int, bool) {
+	var valstr string
+	haveControl := false
+	dangling := []int{}
+	for pi, p := range f.params {
+		verb(4, "emitting parmcheck p%d numel=%d pt=%s value=%d",
+			pi, p.NumElements(), p.TypeName(), value)
+		// To balance code in caller
+		_ = uint8(s.wr.Intn(100)) < 50
+		if p.IsControl() {
+			b.WriteString(fmt.Sprintf("  if %s == 0 {\n",
+				s.genParamRef(p, pi)))
+			s.emitReturn(f, b, false)
+			b.WriteString("  }\n")
+			haveControl = true
+
+		} else if p.IsBlank() {
+			valstr, value = s.GenValue(f, p, value, false)
+			if f.recur {
+				b.WriteString(fmt.Sprintf("  brc%d := %s\n", pi, valstr))
+			} else {
+				b.WriteString(fmt.Sprintf("  _ = %s\n", valstr))
+			}
+		} else {
+			numel := p.NumElements()
+			cel := checkableElements(p)
+			for i := 0; i < numel; i++ {
+				verb(4, "emitting check-code for p%d el %d value=%d", pi, i, value)
+				elref, elparm := p.GenElemRef(i, s.genParamRef(p, pi))
+				valstr, value = s.GenValue(f, elparm, value, false)
+				if elref == "" || elref == "_" || cel == 0 {
+					b.WriteString(fmt.Sprintf("  // blank skip: %s\n", valstr))
+					continue
+				} else {
+					basep, _ := genDeref(elparm)
+					// Handle *p where p is an empty struct.
+					if basep.NumElements() == 0 {
+						continue
+					}
+					cvar := fmt.Sprintf("p%df%dc", pi, i)
+					b.WriteString(fmt.Sprintf("  %s := %s\n", cvar, valstr))
+					s.emitParamElemCheck(f, b, elparm, elref, cvar, pi, i)
+				}
+			}
+			if p.AddrTaken() != notAddrTaken {
+				dangling = append(dangling, pi)
+			}
+		}
+		if value != f.values[pi] {
+			fmt.Fprintf(os.Stderr, "internal error: checker/caller value mismatch after emitting param %d func Test%d pkg %s: caller %d checker %d\n", pi, f.idx, s.checkerPkg(pidx), f.values[pi], value)
+			s.errs++
+		}
+	}
+	for _, pi := range dangling {
+		b.WriteString(fmt.Sprintf("  _ = ap%d // ref\n", pi))
+	}
+
+	// receiver value check
+	if f.isMethod {
+		numel := f.receiver.NumElements()
+		for i := 0; i < numel; i++ {
+			verb(4, "emitting check-code for rcvr el %d value=%d", i, value)
+			elref, elparm := f.receiver.GenElemRef(i, "rcvr")
+			valstr, value = s.GenValue(f, elparm, value, false)
+			if elref == "" || strings.HasPrefix(elref, "_") || f.receiver.IsBlank() {
+				verb(4, "empty skip rcvr el %d", i)
+				continue
+			} else {
+
+				basep, _ := genDeref(elparm)
+				// Handle *p where p is an empty struct.
+				if basep.NumElements() == 0 {
+					continue
+				}
+				cvar := fmt.Sprintf("rcvrf%dc", i)
+				b.WriteString(fmt.Sprintf("  %s := %s\n", cvar, valstr))
+				s.emitParamElemCheck(f, b, elparm, elref, cvar, -1, i)
+			}
+		}
+	}
+
+	return value, haveControl
+}
+
+// emitDeferChecks creates code like
+//
+//     defer func(...args...) {
+//       check arg
+//       check param
+//     }(...)
+//
+// where we randomly choose to either pass a param through to the
+// function literal, or have the param captured by the closure, then
+// check its value in the defer.
+func (s *genstate) emitDeferChecks(f *funcdef, b *bytes.Buffer, pidx int, value int) int {
+
+	if len(f.params) == 0 {
+		return value
+	}
+
+	// make a pass through the params and randomly decide which will be passed into the func.
+	passed := []bool{}
+	for i := range f.params {
+		p := f.dodefp[i] < 50
+		passed = append(passed, p)
+	}
+
+	b.WriteString("  defer func(")
+	pc := 0
+	for pi, p := range f.params {
+		if p.IsControl() || p.IsBlank() {
+			continue
+		}
+		if passed[pi] {
+			writeCom(b, pc)
+			n := fmt.Sprintf("p%d", pi)
+			p.Declare(b, n, "", false)
+			pc++
+		}
+	}
+	b.WriteString(") {\n")
+
+	for pi, p := range f.params {
+		if p.IsControl() || p.IsBlank() {
+			continue
+		}
+		which := "passed"
+		if !passed[pi] {
+			which = "captured"
+		}
+		b.WriteString("  // check parm " + which + "\n")
+		numel := p.NumElements()
+		cel := checkableElements(p)
+		for i := 0; i < numel; i++ {
+			elref, elparm := p.GenElemRef(i, s.genParamRef(p, pi))
+			if elref == "" || elref == "_" || cel == 0 {
+				verb(4, "empty skip p%d el %d", pi, i)
+				continue
+			} else {
+				basep, _ := genDeref(elparm)
+				// Handle *p where p is an empty struct.
+				if basep.NumElements() == 0 {
+					continue
+				}
+				cvar := fmt.Sprintf("p%df%dc", pi, i)
+				s.emitParamElemCheck(f, b, elparm, elref, cvar, pi, i)
+			}
+		}
+	}
+	b.WriteString("  } (")
+	pc = 0
+	for pi, p := range f.params {
+		if p.IsControl() || p.IsBlank() {
+			continue
+		}
+		if passed[pi] {
+			writeCom(b, pc)
+			b.WriteString(fmt.Sprintf("p%d", pi))
+			pc++
+		}
+	}
+	b.WriteString(")\n\n")
+
+	return value
+}
+
+func (s *genstate) emitVarAssign(f *funcdef, b *bytes.Buffer, r parm, rname string, value int, caller bool) int {
+	var valstr string
+	isassign := uint8(s.wr.Intn(100)) < 50
+	if rmp, ismap := r.(*mapparm); ismap && isassign {
+		// emit: var m ... ; m[k] = v
+		r.Declare(b, "  "+rname+" := make(", ")\n", caller)
+		valstr, value = s.GenValue(f, rmp.valtype, value, caller)
+		b.WriteString(fmt.Sprintf("  %s[mkt.%s] = %s\n",
+			rname, rmp.keytmp, valstr))
+	} else {
+		// emit r = c
+		valstr, value = s.GenValue(f, r, value, caller)
+		b.WriteString(fmt.Sprintf("  %s := %s\n", rname, valstr))
+	}
+	return value
+}
+
+func (s *genstate) emitChecker(f *funcdef, b *bytes.Buffer, pidx int, emit bool) {
+	verb(4, "emitting struct and array defs")
+	s.emitStructAndArrayDefs(f, b)
+	b.WriteString(fmt.Sprintf("// %d returns %d params\n", len(f.returns), len(f.params)))
+	if s.Pragma != "" {
+		b.WriteString("//go:" + s.Pragma + "\n")
+	}
+	b.WriteString("//go:noinline\n")
+
+	b.WriteString("func")
+
+	if f.isMethod {
+		b.WriteString(" (")
+		n := "rcvr"
+		if f.receiver.IsBlank() {
+			n = "_"
+		}
+		f.receiver.Declare(b, n, "", false)
+		b.WriteString(")")
+	}
+
+	b.WriteString(fmt.Sprintf(" Test%d(", f.idx))
+
+	verb(4, "emitting checker p%d/Test%d", pidx, f.idx)
+
+	// params
+	for pi, p := range f.params {
+		writeCom(b, pi)
+		n := fmt.Sprintf("p%d", pi)
+		if p.IsBlank() {
+			n = "_"
+		}
+		p.Declare(b, n, "", false)
+	}
+	b.WriteString(") ")
+
+	// returns
+	if len(f.returns) > 0 {
+		b.WriteString("(")
+	}
+	for ri, r := range f.returns {
+		writeCom(b, ri)
+		r.Declare(b, fmt.Sprintf("r%d", ri), "", false)
+	}
+	if len(f.returns) > 0 {
+		b.WriteString(")")
+	}
+	b.WriteString(" {\n")
+
+	// local storage
+	b.WriteString("  // consume some stack space, so as to trigger morestack\n")
+	b.WriteString(fmt.Sprintf("  var pad [%d]uint64\n", f.rstack))
+	b.WriteString(fmt.Sprintf("  pad[FailCount[%d] & 0x1]++\n", pidx))
+
+	value := 1
+
+	// generate map key tmps
+	s.wr.Checkpoint("before map key temps")
+	value = s.emitMapKeyTmps(f, b, pidx, value, false)
+
+	// generate return constants
+	s.wr.Checkpoint("before return constants")
+	for ri, r := range f.returns {
+		rc := fmt.Sprintf("rc%d", ri)
+		value = s.emitVarAssign(f, b, r, rc, value, false)
+	}
+
+	// Prepare to reference params/returns by address.
+	lists := [][]parm{f.params, f.returns}
+	names := []string{"p", "r"}
+	var aCounts [2]int
+	for i, lst := range lists {
+		for pi, p := range lst {
+			if p.AddrTaken() == notAddrTaken {
+				continue
+			}
+			aCounts[i]++
+			n := names[i]
+			b.WriteString(fmt.Sprintf("  a%s%d := &%s%d\n", n, pi, n, pi))
+			if p.AddrTaken() == addrTakenHeap {
+				gv := s.genGlobVar(p)
+				b.WriteString(fmt.Sprintf("  %s = a%s%d\n", gv, n, pi))
+			}
+		}
+	}
+
+	if s.EmitBad == 2 {
+		if s.BadPackageIdx == pidx && s.BadFuncIdx == f.idx {
+			b.WriteString("  // force runtime failure here (debugging)\n")
+			b.WriteString(fmt.Sprintf("    NoteFailure(%d, %d, %d, \"%s\", \"artificial\", %d, true, uint64(0))\n", f.complexityMeasure(), pidx, f.idx, s.checkerPkg(pidx), 0))
+		}
+	}
+
+	// parameter checking code
+	var haveControl bool
+	s.wr.Checkpoint("before param checks")
+	value, haveControl = s.emitParamChecks(f, b, pidx, value)
+
+	// defer testing
+	if s.tunables.doDefer && f.dodefc < s.tunables.deferFraction {
+		s.wr.Checkpoint("before defer checks")
+		_ = s.emitDeferChecks(f, b, pidx, value)
+	}
+
+	// returns
+	s.emitReturn(f, b, haveControl)
+
+	b.WriteString(fmt.Sprintf("  // %d addr-taken params, %d addr-taken returns\n",
+		aCounts[0], aCounts[1]))
+
+	b.WriteString("}\n\n")
+
+	// emit any new helper funcs referenced by this test function
+	s.emitAddrTakenHelpers(f, b, emit)
+}
+
+// complexityMeasure returns an integer that estimates how complex a
+// given test function is relative to some other function. The more
+// parameters + returns and the more complicated the types of the
+// params/returns, the higher the number returned here. In theory this
+// could be worked into the minimization process (e.g. pick the least
+// complex func that reproduces the failure), but for now that isn't
+// wired up yet.
+func (f *funcdef) complexityMeasure() int {
+	v := int(0)
+	if f.isMethod {
+		v += f.receiver.NumElements()
+	}
+	for _, p := range f.params {
+		v += p.NumElements()
+	}
+	for _, r := range f.returns {
+		v += r.NumElements()
+	}
+	return v
+}
+
+// emitRecursiveCall generates a recursive call to the test function in question.
+func (s *genstate) emitRecursiveCall(f *funcdef) string {
+	b := bytes.NewBuffer(nil)
+	rcvr := ""
+	if f.isMethod {
+		rcvr = "rcvr."
+	}
+	b.WriteString(fmt.Sprintf(" %sTest%d(", rcvr, f.idx))
+	for pi, p := range f.params {
+		writeCom(b, pi)
+		if p.IsControl() {
+			b.WriteString(fmt.Sprintf(" %s-1", s.genParamRef(p, pi)))
+		} else {
+			if !p.IsBlank() {
+				b.WriteString(fmt.Sprintf(" %s", s.genParamRef(p, pi)))
+			} else {
+				b.WriteString(fmt.Sprintf(" brc%d", pi))
+			}
+		}
+	}
+	b.WriteString(")")
+	return b.String()
+}
+
+// emitReturn generates a return sequence.
+func (s *genstate) emitReturn(f *funcdef, b *bytes.Buffer, doRecursiveCall bool) {
+	// If any of the return values are address-taken, then instead of
+	//
+	//   return x, y, z
+	//
+	// we emit
+	//
+	//   r1 = ...
+	//   r2 = ...
+	//   ...
+	//   return
+	//
+	// Make an initial pass through the returns to see if we need to do this.
+	// Figure out the final return values in the process.
+	indirectReturn := false
+	retvals := []string{}
+	for ri, r := range f.returns {
+		if r.AddrTaken() != notAddrTaken {
+			indirectReturn = true
+		}
+		t := ""
+		if doRecursiveCall {
+			t = "t"
+		}
+		retvals = append(retvals, fmt.Sprintf("rc%s%d", t, ri))
+	}
+
+	// generate the recursive call itself if applicable
+	if doRecursiveCall {
+		b.WriteString("  // recursive call\n  ")
+		if s.ForceStackGrowth {
+			b.WriteString("  hackStack() // force stack growth on next call\n")
+		}
+		rcall := s.emitRecursiveCall(f)
+		if indirectReturn {
+			for ri := range f.returns {
+				writeCom(b, ri)
+				b.WriteString(fmt.Sprintf("  rct%d", ri))
+			}
+			b.WriteString(" := ")
+			b.WriteString(rcall)
+			b.WriteString("\n")
+		} else {
+			if len(f.returns) == 0 {
+				b.WriteString(fmt.Sprintf("%s\n  return\n", rcall))
+			} else {
+				b.WriteString(fmt.Sprintf("  return %s\n", rcall))
+			}
+			return
+		}
+	}
+
+	// now the actual return
+	if indirectReturn {
+		for ri, r := range f.returns {
+			s.genReturnAssign(b, r, ri, retvals[ri])
+		}
+		b.WriteString("  return\n")
+	} else {
+		b.WriteString("  return ")
+		for ri := range f.returns {
+			writeCom(b, ri)
+			b.WriteString(retvals[ri])
+		}
+		b.WriteString("\n")
+	}
+}
+
+func (s *genstate) GenPair(calloutfile *os.File, checkoutfile *os.File, fidx int, pidx int, b *bytes.Buffer, seed int64, emit bool) int64 {
+
+	verb(1, "gen fidx %d pidx %d", fidx, pidx)
+
+	checkTunables(tunables)
+	s.tunables = tunables
+
+	// Generate a function with a random number of params and returns
+	s.wr = NewWrapRand(seed, s.RandCtl)
+	s.wr.tag = "genfunc"
+	fp := s.GenFunc(fidx, pidx)
+
+	// Emit caller side
+	wrcaller := NewWrapRand(seed, s.RandCtl)
+	s.wr = wrcaller
+	s.wr.tag = "caller"
+	s.emitCaller(fp, b, pidx)
+	if emit {
+		b.WriteTo(calloutfile)
+	}
+	b.Reset()
+
+	// Emit checker side
+	wrchecker := NewWrapRand(seed, s.RandCtl)
+	s.wr = wrchecker
+	s.wr.tag = "checker"
+	s.emitChecker(fp, b, pidx, emit)
+	if emit {
+		b.WriteTo(checkoutfile)
+	}
+	b.Reset()
+	wrchecker.Check(wrcaller)
+
+	return seed + 1
+}
+
+func (s *genstate) openOutputFile(filename string, pk string, imports []string, ipref string) *os.File {
+	iprefix := func(f string) string {
+		if ipref == "" {
+			return f
+		}
+		return ipref + "/" + f
+	}
+	verb(1, "opening %s", filename)
+	outf, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
+	if err != nil {
+		log.Fatal(err)
+	}
+	haveunsafe := false
+	outf.WriteString(fmt.Sprintf("package %s\n\n", pk))
+	for _, imp := range imports {
+		if imp == "reflect" {
+			outf.WriteString("import \"reflect\"\n")
+			continue
+		}
+		if imp == "unsafe" {
+			outf.WriteString("import _ \"unsafe\"\n")
+			haveunsafe = true
+			continue
+		}
+		if imp == s.utilsPkg() {
+
+			outf.WriteString(fmt.Sprintf("import . \"%s\"\n", iprefix(imp)))
+			continue
+		}
+		outf.WriteString(fmt.Sprintf("import \"%s\"\n", iprefix(imp)))
+	}
+	outf.WriteString("\n")
+	if s.ForceStackGrowth && haveunsafe {
+		outf.WriteString("// Hack: reach into runtime to grab this testing hook.\n")
+		outf.WriteString("//go:linkname hackStack runtime.gcTestMoveStackOnNextCall\n")
+		outf.WriteString("func hackStack()\n\n")
+	}
+	return outf
+}
+
+type miscVals struct {
+	NumTpk   int
+	MaxFail  int
+	NumTests int
+}
+
+const utilsTemplate = `
+
+import (
+  "fmt"
+  "os"
+)
+
+type UtilsType int
+var ParamFailCount [{{.NumTpk}}]int
+var ReturnFailCount [{{.NumTpk}}]int
+var FailCount [{{.NumTpk}}]int
+var Mode [{{.NumTpk}}]string
+
+//go:noinline
+func NoteFailure(cm int, pidx int, fidx int, pkg string, pref string, parmNo int, isret bool, _ uint64) {
+	if isret {
+		if ParamFailCount[pidx] != 0 {
+			return
+		}
+		ReturnFailCount[pidx]++
+	} else {
+		ParamFailCount[pidx]++
+	}
+	fmt.Fprintf(os.Stderr, "Error: fail %s |%d|%d|%d| =%s.Test%d= %s %d\n", Mode, cm, pidx, fidx, pkg, fidx, pref, parmNo)
+
+	if ParamFailCount[pidx]+FailCount[pidx]+ReturnFailCount[pidx] > {{.MaxFail}} {
+		os.Exit(1)
+	}
+}
+
+//go:noinline
+func NoteFailureElem(cm int, pidx int, fidx int, pkg string, pref string, parmNo int, elem int, isret bool, _ uint64) {
+
+	if isret {
+		if ParamFailCount[pidx] != 0 {
+			return
+		}
+		ReturnFailCount[pidx]++
+	} else {
+		ParamFailCount[pidx]++
+	}
+	fmt.Fprintf(os.Stderr, "Error: fail %s |%d|%d|%d| =%s.Test%d= %s %d elem %d\n", Mode, cm, pidx, fidx, pkg, fidx, pref, parmNo, elem)
+
+	if ParamFailCount[pidx]+FailCount[pidx]+ReturnFailCount[pidx] > {{.MaxFail}} {
+		os.Exit(1)
+	}
+}
+
+func BeginFcn(p int) {
+	ParamFailCount[p] = 0
+	ReturnFailCount[p] = 0
+}
+
+func EndFcn(p int) {
+	FailCount[p] += ParamFailCount[p]
+	FailCount[p] += ReturnFailCount[p]
+}
+`
+
+func (s *genstate) emitUtils(outf *os.File, maxfail int, numtpk int) {
+	vals := miscVals{
+		NumTpk:  numtpk,
+		MaxFail: maxfail,
+	}
+	t := template.Must(template.New("utils").Parse(utilsTemplate))
+	err := t.Execute(outf, vals)
+	if err != nil {
+		log.Fatal(err)
+	}
+}
+
+const mainPreamble = `
+
+import (
+	"fmt"
+	"os"
+)
+
+func main() {
+  fmt.Fprintf(os.Stderr, "starting main\n")
+`
+
+func (s *genstate) emitMain(outf *os.File, numit int, fcnmask map[int]int, pkmask map[int]int) {
+	fmt.Fprintf(outf, "%s", mainPreamble)
+	fmt.Fprintf(outf, "  pch := make(chan bool, %d)\n", s.NumTestPackages)
+	for k := 0; k < s.NumTestPackages; k++ {
+		cp := fmt.Sprintf("%s%s%d", s.Tag, CallerName, k)
+		fmt.Fprintf(outf, "  go func(ch chan bool) {\n")
+		for i := 0; i < numit; i++ {
+			if shouldEmitFP(i, k, fcnmask, pkmask) {
+				fmt.Fprintf(outf, "    %s.%s%d(\"normal\")\n", cp, CallerName, i)
+				if s.tunables.doReflectCall {
+					fmt.Fprintf(outf, "    %s.%s%d(\"reflect\")\n", cp, CallerName, i)
+				}
+			}
+		}
+		fmt.Fprintf(outf, "    pch <- true\n")
+		fmt.Fprintf(outf, "  }(pch)\n")
+	}
+	fmt.Fprintf(outf, "  for pidx := 0; pidx < %d; pidx++ {\n", s.NumTestPackages)
+	fmt.Fprintf(outf, "    _ = <- pch\n")
+	fmt.Fprintf(outf, "  }\n")
+	fmt.Fprintf(outf, "  tf := 0\n")
+	fmt.Fprintf(outf, "  for pidx := 0; pidx < %d; pidx++ {\n", s.NumTestPackages)
+	fmt.Fprintf(outf, "    tf += FailCount[pidx]\n")
+	fmt.Fprintf(outf, "  }\n")
+	fmt.Fprintf(outf, "  if tf != 0 {\n")
+	fmt.Fprintf(outf, "    fmt.Fprintf(os.Stderr, \"FAILURES: %%d\\n\", tf)\n")
+	fmt.Fprintf(outf, "    os.Exit(2)\n")
+	fmt.Fprintf(outf, "  }\n")
+	fmt.Fprintf(outf, "  fmt.Fprintf(os.Stderr, \"finished %d tests\\n\")\n", numit*s.NumTestPackages)
+	fmt.Fprintf(outf, "}\n")
+}
+
+func makeDir(d string) {
+	fi, err := os.Stat(d)
+	if err == nil && fi.IsDir() {
+		return
+	}
+	verb(1, "creating %s", d)
+	if err := os.Mkdir(d, 0777); err != nil {
+		log.Fatal(err)
+	}
+}
+
+func (s *genstate) callerPkg(which int) string {
+	return s.Tag + CallerName + strconv.Itoa(which)
+}
+
+func (s *genstate) callerFile(which int) string {
+	cp := s.callerPkg(which)
+	return filepath.Join(s.OutDir, cp, cp+".go")
+}
+
+func (s *genstate) checkerPkg(which int) string {
+	return s.Tag + CheckerName + strconv.Itoa(which)
+}
+
+func (s *genstate) checkerFile(which int) string {
+	cp := s.checkerPkg(which)
+	return filepath.Join(s.OutDir, cp, cp+".go")
+}
+
+func (s *genstate) utilsPkg() string {
+	return s.Tag + "Utils"
+}
+
+func (s *genstate) beginPackage(pkidx int) {
+	s.pkidx = pkidx
+	s.derefFuncs = make(map[string]string)
+	s.assignFuncs = make(map[string]string)
+	s.allocFuncs = make(map[string]string)
+	s.globVars = make(map[string]string)
+	s.genvalFuncs = make(map[string]string)
+}
+
+func runImports(files []string) {
+	verb(1, "... running goimports")
+	args := make([]string, 0, len(files)+1)
+	args = append(args, "-w")
+	args = append(args, files...)
+	cmd := exec.Command("goimports", args...)
+	coutput, cerr := cmd.CombinedOutput()
+	if cerr != nil {
+		log.Fatalf("goimports command failed: %s", string(coutput))
+	}
+	verb(1, "... goimports run complete")
+}
+
+// shouldEmitFP returns true if we should actually emit code for the function
+// with the specified package + fcn indices. For "regular" runs, fcnmask and pkmask
+// will be empty, meaning we want to emit every function in every package. The
+// fuzz-runner program also tries to do testcase "minimization", which means that it
+// will try to whittle down the set of packages and functions (by running the generator
+// using the fcnmask and pkmask options) to emit only specific packages or functions.
+func shouldEmitFP(fn int, pk int, fcnmask map[int]int, pkmask map[int]int) bool {
+	emitpk := true
+	emitfn := true
+	if len(pkmask) != 0 {
+		emitpk = false
+		if _, ok := pkmask[pk]; ok {
+			emitpk = true
+		}
+	}
+	if len(fcnmask) != 0 {
+		emitfn = false
+		if _, ok := fcnmask[fn]; ok {
+			emitfn = true
+		}
+	}
+	doemit := emitpk && emitfn
+	verb(2, "shouldEmitFP(F=%d,P=%d) returns %v", fn, pk, doemit)
+	return doemit
+}
+
+// Generate is the top level code generation hook for this package.
+// Emits code according to the schema in config object 'c'.
+func Generate(c GenConfig) int {
+	mainpkg := c.Tag + "Main"
+
+	var ipref string
+	if len(c.PkgPath) > 0 {
+		ipref = c.PkgPath
+	}
+
+	s := genstate{
+		GenConfig: c,
+		ipref:     ipref,
+	}
+
+	if s.OutDir != "." {
+		verb(1, "creating %s", s.OutDir)
+		makeDir(s.OutDir)
+	}
+
+	mainimports := []string{}
+	for i := 0; i < s.NumTestPackages; i++ {
+		if shouldEmitFP(-1, i, nil, s.PkgMask) {
+			makeDir(s.OutDir + "/" + s.callerPkg(i))
+			makeDir(s.OutDir + "/" + s.checkerPkg(i))
+			makeDir(s.OutDir + "/" + s.utilsPkg())
+			mainimports = append(mainimports, s.callerPkg(i))
+		}
+	}
+	mainimports = append(mainimports, s.utilsPkg())
+
+	// Emit utils package.
+	verb(1, "emit utils")
+	utilsfile := s.OutDir + "/" + s.utilsPkg() + "/" + s.utilsPkg() + ".go"
+	utilsoutfile := s.openOutputFile(utilsfile, s.utilsPkg(), []string{}, "")
+	s.emitUtils(utilsoutfile, s.MaxFail, s.NumTestPackages)
+	utilsoutfile.Close()
+
+	mainfile := s.OutDir + "/" + mainpkg + ".go"
+	mainoutfile := s.openOutputFile(mainfile, "main", mainimports, ipref)
+
+	allfiles := []string{mainfile, utilsfile}
+	for k := 0; k < s.NumTestPackages; k++ {
+		callerImports := []string{s.checkerPkg(k), s.utilsPkg()}
+		checkerImports := []string{s.utilsPkg()}
+		if tunables.doReflectCall {
+			callerImports = append(callerImports, "reflect")
+		}
+		if s.ForceStackGrowth {
+			callerImports = append(callerImports, "unsafe")
+			checkerImports = append(checkerImports, "unsafe")
+		}
+		var calleroutfile, checkeroutfile *os.File
+		if shouldEmitFP(-1, k, nil, s.PkgMask) {
+			calleroutfile = s.openOutputFile(s.callerFile(k), s.callerPkg(k),
+				callerImports, ipref)
+			checkeroutfile = s.openOutputFile(s.checkerFile(k), s.checkerPkg(k),
+				checkerImports, ipref)
+			allfiles = append(allfiles, s.callerFile(k), s.checkerFile(k))
+		}
+
+		s.beginPackage(k)
+
+		var b bytes.Buffer
+		for i := 0; i < s.NumTestFunctions; i++ {
+			doemit := shouldEmitFP(i, k, s.FcnMask, s.PkgMask)
+			s.Seed = s.GenPair(calleroutfile, checkeroutfile, i, k,
+				&b, s.Seed, doemit)
+		}
+
+		// When minimization is in effect, we sometimes wind
+		// up eliminating all refs to the utils package. Add a
+		// dummy to help with this.
+		fmt.Fprintf(calleroutfile, "\n// dummy\nvar Dummy UtilsType\n")
+		fmt.Fprintf(checkeroutfile, "\n// dummy\nvar Dummy UtilsType\n")
+		calleroutfile.Close()
+		checkeroutfile.Close()
+	}
+	s.emitMain(mainoutfile, s.NumTestFunctions, s.FcnMask, s.PkgMask)
+
+	// emit go.mod
+	verb(1, "opening go.mod")
+	fn := s.OutDir + "/go.mod"
+	outf, err := os.OpenFile(fn, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
+	if err != nil {
+		log.Fatal(err)
+	}
+	outf.WriteString(fmt.Sprintf("module %s\n\ngo 1.17\n", s.PkgPath))
+	outf.Close()
+
+	verb(1, "closing files")
+	mainoutfile.Close()
+
+	if s.errs == 0 && s.RunGoImports {
+		runImports(allfiles)
+	}
+
+	return s.errs
+}
diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/mapparm.go b/cmd/signature-fuzzer/internal/fuzz-generator/mapparm.go
new file mode 100644
index 0000000..9626475
--- /dev/null
+++ b/cmd/signature-fuzzer/internal/fuzz-generator/mapparm.go
@@ -0,0 +1,91 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package generator
+
+import (
+	"bytes"
+	"fmt"
+)
+
+// mapparm describes a parameter of map type; it implements the
+// "parm" interface.
+type mapparm struct {
+	aname   string
+	qname   string
+	keytype parm
+	valtype parm
+	keytmp  string
+	isBlank
+	addrTakenHow
+	isGenValFunc
+	skipCompare
+}
+
+func (p mapparm) IsControl() bool {
+	return false
+}
+
+func (p mapparm) TypeName() string {
+	return p.aname
+}
+
+func (p mapparm) QualName() string {
+	return p.qname
+}
+
+func (p mapparm) Declare(b *bytes.Buffer, prefix string, suffix string, caller bool) {
+	n := p.aname
+	if caller {
+		n = p.qname
+	}
+	b.WriteString(fmt.Sprintf("%s %s%s", prefix, n, suffix))
+}
+
+func (p mapparm) String() string {
+	return fmt.Sprintf("%s map[%s]%s", p.aname,
+		p.keytype.String(), p.valtype.String())
+}
+
+func (p mapparm) GenValue(s *genstate, f *funcdef, value int, caller bool) (string, int) {
+	var buf bytes.Buffer
+
+	verb(5, "mapparm.GenValue(%d)", value)
+
+	n := p.aname
+	if caller {
+		n = p.qname
+	}
+	buf.WriteString(fmt.Sprintf("%s{", n))
+	buf.WriteString(p.keytmp + ": ")
+
+	var valstr string
+	valstr, value = s.GenValue(f, p.valtype, value, caller)
+	buf.WriteString(valstr + "}")
+	return buf.String(), value
+}
+
+func (p mapparm) GenElemRef(elidx int, path string) (string, parm) {
+	vne := p.valtype.NumElements()
+	verb(4, "begin GenElemRef(%d,%s) on %s %d", elidx, path, p.String(), vne)
+
+	ppath := fmt.Sprintf("%s[mkt.%s]", path, p.keytmp)
+
+	// otherwise dig into the value
+	verb(4, "recur GenElemRef(%d,...)", elidx)
+
+	// Otherwise our victim is somewhere inside the value
+	if p.IsBlank() {
+		ppath = "_"
+	}
+	return p.valtype.GenElemRef(elidx, ppath)
+}
+
+func (p mapparm) NumElements() int {
+	return p.valtype.NumElements()
+}
+
+func (p mapparm) HasPointer() bool {
+	return true
+}
diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/numparm.go b/cmd/signature-fuzzer/internal/fuzz-generator/numparm.go
new file mode 100644
index 0000000..6be0d91
--- /dev/null
+++ b/cmd/signature-fuzzer/internal/fuzz-generator/numparm.go
@@ -0,0 +1,144 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package generator
+
+import (
+	"bytes"
+	"fmt"
+	"math"
+)
+
+// numparm describes a numeric parameter type; it implements the
+// "parm" interface.
+type numparm struct {
+	tag         string
+	widthInBits uint32
+	ctl         bool
+	isBlank
+	addrTakenHow
+	isGenValFunc
+	skipCompare
+}
+
+var f32parm *numparm = &numparm{
+	tag:         "float",
+	widthInBits: uint32(32),
+	ctl:         false,
+}
+var f64parm *numparm = &numparm{
+	tag:         "float",
+	widthInBits: uint32(64),
+	ctl:         false,
+}
+
+func (p numparm) TypeName() string {
+	if p.tag == "byte" {
+		return "byte"
+	}
+	return fmt.Sprintf("%s%d", p.tag, p.widthInBits)
+}
+
+func (p numparm) QualName() string {
+	return p.TypeName()
+}
+
+func (p numparm) String() string {
+	if p.tag == "byte" {
+		return "byte"
+	}
+	ctl := ""
+	if p.ctl {
+		ctl = " [ctl=yes]"
+	}
+	return fmt.Sprintf("%s%s", p.TypeName(), ctl)
+}
+
+func (p numparm) NumElements() int {
+	return 1
+}
+
+func (p numparm) IsControl() bool {
+	return p.ctl
+}
+
+func (p numparm) GenElemRef(elidx int, path string) (string, parm) {
+	return path, &p
+}
+
+func (p numparm) Declare(b *bytes.Buffer, prefix string, suffix string, caller bool) {
+	t := fmt.Sprintf("%s%d%s", p.tag, p.widthInBits, suffix)
+	if p.tag == "byte" {
+		t = fmt.Sprintf("%s%s", p.tag, suffix)
+	}
+	b.WriteString(prefix + " " + t)
+}
+
+func (p numparm) genRandNum(s *genstate, value int) (string, int) {
+	which := uint8(s.wr.Intn(int64(100)))
+	if p.tag == "int" {
+		var v int64
+		if which < 3 {
+			// max
+			v = (1 << (p.widthInBits - 1)) - 1
+
+		} else if which < 5 {
+			// min
+			v = (-1 << (p.widthInBits - 1))
+		} else {
+			nrange := int64(1 << (p.widthInBits - 2))
+			v = s.wr.Intn(nrange)
+			if value%2 != 0 {
+				v = -v
+			}
+		}
+		return fmt.Sprintf("%s%d(%d)", p.tag, p.widthInBits, v), value + 1
+	}
+	if p.tag == "uint" || p.tag == "byte" {
+		nrange := int64(1 << (p.widthInBits - 2))
+		v := s.wr.Intn(nrange)
+		if p.tag == "byte" {
+			return fmt.Sprintf("%s(%d)", p.tag, v), value + 1
+		}
+		return fmt.Sprintf("%s%d(0x%x)", p.tag, p.widthInBits, v), value + 1
+	}
+	if p.tag == "float" {
+		if p.widthInBits == 32 {
+			rf := s.wr.Float32() * (math.MaxFloat32 / 4)
+			if value%2 != 0 {
+				rf = -rf
+			}
+			return fmt.Sprintf("%s%d(%v)", p.tag, p.widthInBits, rf), value + 1
+		}
+		if p.widthInBits == 64 {
+			return fmt.Sprintf("%s%d(%v)", p.tag, p.widthInBits,
+				s.wr.NormFloat64()), value + 1
+		}
+		panic("unknown float type")
+	}
+	if p.tag == "complex" {
+		if p.widthInBits == 64 {
+			f1, v2 := f32parm.genRandNum(s, value)
+			f2, v3 := f32parm.genRandNum(s, v2)
+			return fmt.Sprintf("complex(%s,%s)", f1, f2), v3
+		}
+		if p.widthInBits == 128 {
+			f1, v2 := f64parm.genRandNum(s, value)
+			f2, v3 := f64parm.genRandNum(s, v2)
+			return fmt.Sprintf("complex(%v,%v)", f1, f2), v3
+		}
+		panic("unknown complex type")
+	}
+	panic("unknown numeric type")
+}
+
+func (p numparm) GenValue(s *genstate, f *funcdef, value int, caller bool) (string, int) {
+	r, nv := p.genRandNum(s, value)
+	verb(5, "numparm.GenValue(%d) = %s", value, r)
+	return r, nv
+}
+
+func (p numparm) HasPointer() bool {
+	return false
+}
diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/parm.go b/cmd/signature-fuzzer/internal/fuzz-generator/parm.go
new file mode 100644
index 0000000..7ee2224
--- /dev/null
+++ b/cmd/signature-fuzzer/internal/fuzz-generator/parm.go
@@ -0,0 +1,216 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package generator
+
+import (
+	"bytes"
+	"fmt"
+	"os"
+	"sort"
+)
+
+// parm is an interface describing an abstract parameter var or return
+// var; there will be concrete types of various sorts that implement
+// this interface.
+type parm interface {
+
+	// Declare emits text containing a declaration of this param
+	// or return var into the specified buffer. Prefix is a tag to
+	// prepend before the declaration (for example a variable
+	// name) followed by a space; suffix is an arbitrary string to
+	// tack onto the end of the param's type text. Here 'caller'
+	// is set to true if we're emitting the caller part of a test
+	// pair as opposed to the checker.
+	Declare(b *bytes.Buffer, prefix string, suffix string, caller bool)
+
+	// GenElemRef returns a pair [X,Y] corresponding to a
+	// component piece of some composite parm, where X is a string
+	// forming the reference (ex: ".field" if we're picking out a
+	// struct field) and Y is a parm object corresponding to the
+	// type of the element.
+	GenElemRef(elidx int, path string) (string, parm)
+
+	// GenValue constructs a new concrete random value appropriate
+	// for the type in question and returns it, along with a
+	// sequence number indicating how many random decisions we had
+	// to make. Here "s" is the current generator state, "f" is
+	// the current function we're emitting, value is a sequence
+	// number indicating how many random decisions have been made
+	// up until this point, and 'caller' is set to true if we're
+	// emitting the caller part of a test pair as opposed to the
+	// checker.  Return value is a pair [V,I] where V is the text
+	// if the value, and I is a new sequence number reflecting any
+	// additional random choices we had to make.  For example, if
+	// the parm is something like "type Foo struct { f1 int32; f2
+	// float64 }" then we might expect GenValue to emit something
+	// like "Foo{int32(-9), float64(123.123)}".
+	GenValue(s *genstate, f *funcdef, value int, caller bool) (string, int)
+
+	// IsControl returns true if this specific param has been marked
+	// as the single param that controls recursion for a recursive
+	// checker function. The test code doesn't check this param for a specific
+	// value, but instead returns early if it has value 0 or decrements it
+	// on a recursive call.
+	IsControl() bool
+
+	// NumElements returns the total number of discrete elements contained
+	// in this parm. For non-composite types, this will always be 1.
+	NumElements() int
+
+	// String returns a descriptive string for this parm.
+	String() string
+
+	// TypeName returns the non-qualified type name for this parm.
+	TypeName() string
+
+	// QualName returns a package-qualified type name for this parm.
+	QualName() string
+
+	// HasPointer returns true if this parm is of pointer type, or
+	// if it is a composite that has a pointer element somewhere inside.
+	// Strings and slices return true for this hook.
+	HasPointer() bool
+
+	// IsBlank() returns true if the name of this parm is "_" (that is,
+	// if we randomly chose to make it a blank). SetBlank() is used
+	// to set the 'blank' property for this parm.
+	IsBlank() bool
+	SetBlank(v bool)
+
+	// AddrTaken() return a token indicating whether this parm should
+	// be address taken or not, the nature of the address-taken-ness (see
+	// below at the def of addrTakenHow). SetAddrTaken is used to set
+	// the address taken property of the parm.
+	AddrTaken() addrTakenHow
+	SetAddrTaken(val addrTakenHow)
+
+	// IsGenVal() returns true if the values of this type should
+	// be obtained by calling a helper func, as opposed to
+	// emitting code inline (as one would for things like numeric
+	// types). SetIsGenVal is used to set the gen-val property of
+	// the parm.
+	IsGenVal() bool
+	SetIsGenVal(val bool)
+
+	// SkipCompare() returns true if we've randomly decided that
+	// we don't want to compare the value for this param or
+	// return.  SetSkipCompare is used to set the skip-compare
+	// property of the parm.
+	SkipCompare() skipCompare
+	SetSkipCompare(val skipCompare)
+}
+
+type addrTakenHow uint8
+
+const (
+	// Param not address taken.
+	notAddrTaken addrTakenHow = 0
+
+	// Param address is taken and used for simple reads/writes.
+	addrTakenSimple addrTakenHow = 1
+
+	// Param address is taken and passed to a well-behaved function.
+	addrTakenPassed addrTakenHow = 2
+
+	// Param address is taken and stored to a global var.
+	addrTakenHeap addrTakenHow = 3
+)
+
+func (a *addrTakenHow) AddrTaken() addrTakenHow {
+	return *a
+}
+
+func (a *addrTakenHow) SetAddrTaken(val addrTakenHow) {
+	*a = val
+}
+
+type isBlank bool
+
+func (b *isBlank) IsBlank() bool {
+	return bool(*b)
+}
+
+func (b *isBlank) SetBlank(val bool) {
+	*b = isBlank(val)
+}
+
+type isGenValFunc bool
+
+func (g *isGenValFunc) IsGenVal() bool {
+	return bool(*g)
+}
+
+func (g *isGenValFunc) SetIsGenVal(val bool) {
+	*g = isGenValFunc(val)
+}
+
+type skipCompare int
+
+const (
+	// Param not address taken.
+	SkipAll     = -1
+	SkipNone    = 0
+	SkipPayload = 1
+)
+
+func (s *skipCompare) SkipCompare() skipCompare {
+	return skipCompare(*s)
+}
+
+func (s *skipCompare) SetSkipCompare(val skipCompare) {
+	*s = skipCompare(val)
+}
+
+// containedParms takes an arbitrary param 'p' and returns a slice
+// with 'p' itself plus any component parms contained within 'p'.
+func containedParms(p parm) []parm {
+	visited := make(map[string]parm)
+	worklist := []parm{p}
+
+	addToWork := func(p parm) {
+		if p == nil {
+			panic("not expected")
+		}
+		if _, ok := visited[p.TypeName()]; !ok {
+			worklist = append(worklist, p)
+		}
+	}
+
+	for len(worklist) != 0 {
+		cp := worklist[0]
+		worklist = worklist[1:]
+		if _, ok := visited[cp.TypeName()]; ok {
+			continue
+		}
+		visited[cp.TypeName()] = cp
+		switch x := cp.(type) {
+		case *mapparm:
+			addToWork(x.keytype)
+			addToWork(x.valtype)
+		case *structparm:
+			for _, fld := range x.fields {
+				addToWork(fld)
+			}
+		case *arrayparm:
+			addToWork(x.eltype)
+		case *pointerparm:
+			addToWork(x.totype)
+		case *typedefparm:
+			addToWork(x.target)
+		}
+	}
+	rv := []parm{}
+	for _, v := range visited {
+		rv = append(rv, v)
+	}
+	sort.Slice(rv, func(i, j int) bool {
+		if rv[i].TypeName() == rv[j].TypeName() {
+			fmt.Fprintf(os.Stderr, "%d %d %+v %+v %s %s\n", i, j, rv[i], rv[i].String(), rv[j], rv[j].String())
+			panic("unexpected")
+		}
+		return rv[i].TypeName() < rv[j].TypeName()
+	})
+	return rv
+}
diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/pointerparm.go b/cmd/signature-fuzzer/internal/fuzz-generator/pointerparm.go
new file mode 100644
index 0000000..1ec61e5
--- /dev/null
+++ b/cmd/signature-fuzzer/internal/fuzz-generator/pointerparm.go
@@ -0,0 +1,75 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package generator
+
+import (
+	"bytes"
+	"fmt"
+)
+
+// pointerparm describes a parameter of pointer type; it implements the
+// "parm" interface.
+type pointerparm struct {
+	tag    string
+	totype parm
+	isBlank
+	addrTakenHow
+	isGenValFunc
+	skipCompare
+}
+
+func (p pointerparm) Declare(b *bytes.Buffer, prefix string, suffix string, caller bool) {
+	n := p.totype.TypeName()
+	if caller {
+		n = p.totype.QualName()
+	}
+	b.WriteString(fmt.Sprintf("%s *%s%s", prefix, n, suffix))
+}
+
+func (p pointerparm) GenElemRef(elidx int, path string) (string, parm) {
+	return path, &p
+}
+
+func (p pointerparm) GenValue(s *genstate, f *funcdef, value int, caller bool) (string, int) {
+	pref := ""
+	if caller {
+		pref = s.checkerPkg(s.pkidx) + "."
+	}
+	var valstr string
+	valstr, value = s.GenValue(f, p.totype, value, caller)
+	fname := s.genAllocFunc(p.totype)
+	return fmt.Sprintf("%s%s(%s)", pref, fname, valstr), value
+}
+
+func (p pointerparm) IsControl() bool {
+	return false
+}
+
+func (p pointerparm) NumElements() int {
+	return 1
+}
+
+func (p pointerparm) String() string {
+	return fmt.Sprintf("*%s", p.totype)
+}
+
+func (p pointerparm) TypeName() string {
+	return fmt.Sprintf("*%s", p.totype.TypeName())
+}
+
+func (p pointerparm) QualName() string {
+	return fmt.Sprintf("*%s", p.totype.QualName())
+}
+
+func mkPointerParm(to parm) pointerparm {
+	var pp pointerparm
+	pp.tag = "pointer"
+	pp.totype = to
+	return pp
+}
+
+func (p pointerparm) HasPointer() bool {
+	return true
+}
diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/stringparm.go b/cmd/signature-fuzzer/internal/fuzz-generator/stringparm.go
new file mode 100644
index 0000000..2da541d
--- /dev/null
+++ b/cmd/signature-fuzzer/internal/fuzz-generator/stringparm.go
@@ -0,0 +1,64 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package generator
+
+import (
+	"bytes"
+)
+
+// stringparm describes a parameter of string type; it implements the
+// "parm" interface
+type stringparm struct {
+	tag string
+	isBlank
+	addrTakenHow
+	isGenValFunc
+	skipCompare
+}
+
+func (p stringparm) Declare(b *bytes.Buffer, prefix string, suffix string, caller bool) {
+	b.WriteString(prefix + " string" + suffix)
+}
+
+func (p stringparm) GenElemRef(elidx int, path string) (string, parm) {
+	return path, &p
+}
+
+var letters = []rune("�꿦3ς‚¨ƒο’¦οžšf6κ‚…8Λ‹<τ‚Š‡ρŠΆΏοŒ–(zΜ½|ο€†Ο£α‡Šρ—‡ςŸ„Όqρ§²₯筁{Π‚ΖœΔ½")
+
+func (p stringparm) GenValue(s *genstate, f *funcdef, value int, caller bool) (string, int) {
+	ns := len(letters) - 9
+	nel := int(s.wr.Intn(8))
+	st := int(s.wr.Intn(int64(ns)))
+	en := st + nel
+	if en > ns {
+		en = ns
+	}
+	return "\"" + string(letters[st:en]) + "\"", value + 1
+}
+
+func (p stringparm) IsControl() bool {
+	return false
+}
+
+func (p stringparm) NumElements() int {
+	return 1
+}
+
+func (p stringparm) String() string {
+	return "string"
+}
+
+func (p stringparm) TypeName() string {
+	return "string"
+}
+
+func (p stringparm) QualName() string {
+	return "string"
+}
+
+func (p stringparm) HasPointer() bool {
+	return false
+}
diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/structparm.go b/cmd/signature-fuzzer/internal/fuzz-generator/structparm.go
new file mode 100644
index 0000000..df90107
--- /dev/null
+++ b/cmd/signature-fuzzer/internal/fuzz-generator/structparm.go
@@ -0,0 +1,163 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package generator
+
+import (
+	"bytes"
+	"fmt"
+	"strings"
+)
+
+// structparm describes a parameter of struct type; it implements the
+// "parm" interface.
+type structparm struct {
+	sname  string
+	qname  string
+	fields []parm
+	isBlank
+	addrTakenHow
+	isGenValFunc
+	skipCompare
+}
+
+func (p structparm) TypeName() string {
+	return p.sname
+}
+
+func (p structparm) QualName() string {
+	return p.qname
+}
+
+func (p structparm) Declare(b *bytes.Buffer, prefix string, suffix string, caller bool) {
+	n := p.sname
+	if caller {
+		n = p.qname
+	}
+	b.WriteString(fmt.Sprintf("%s %s%s", prefix, n, suffix))
+}
+
+func (p structparm) FieldName(i int) string {
+	if p.fields[i].IsBlank() {
+		return "_"
+	}
+	return fmt.Sprintf("F%d", i)
+}
+
+func (p structparm) String() string {
+	var buf bytes.Buffer
+
+	buf.WriteString(fmt.Sprintf("struct %s {\n", p.sname))
+	for fi, f := range p.fields {
+		buf.WriteString(fmt.Sprintf("%s %s\n", p.FieldName(fi), f.String()))
+	}
+	buf.WriteString("}")
+	return buf.String()
+}
+
+func (p structparm) GenValue(s *genstate, f *funcdef, value int, caller bool) (string, int) {
+	var buf bytes.Buffer
+
+	verb(5, "structparm.GenValue(%d)", value)
+
+	n := p.sname
+	if caller {
+		n = p.qname
+	}
+	buf.WriteString(fmt.Sprintf("%s{", n))
+	nbfi := 0
+	for fi, fld := range p.fields {
+		var valstr string
+		valstr, value = s.GenValue(f, fld, value, caller)
+		if p.fields[fi].IsBlank() {
+			buf.WriteString("/* ")
+			valstr = strings.ReplaceAll(valstr, "/*", "[[")
+			valstr = strings.ReplaceAll(valstr, "*/", "]]")
+		} else {
+			writeCom(&buf, nbfi)
+		}
+		buf.WriteString(p.FieldName(fi) + ": ")
+		buf.WriteString(valstr)
+		if p.fields[fi].IsBlank() {
+			buf.WriteString(" */")
+		} else {
+			nbfi++
+		}
+	}
+	buf.WriteString("}")
+	return buf.String(), value
+}
+
+func (p structparm) IsControl() bool {
+	return false
+}
+
+func (p structparm) NumElements() int {
+	ne := 0
+	for _, f := range p.fields {
+		ne += f.NumElements()
+	}
+	return ne
+}
+
+func (p structparm) GenElemRef(elidx int, path string) (string, parm) {
+	ct := 0
+	verb(4, "begin GenElemRef(%d,%s) on %s", elidx, path, p.String())
+
+	for fi, f := range p.fields {
+		fne := f.NumElements()
+
+		//verb(4, "+ examining field %d fne %d ct %d", fi, fne, ct)
+
+		// Empty field. Continue on.
+		if elidx == ct && fne == 0 {
+			continue
+		}
+
+		// Is this field the element we're interested in?
+		if fne == 1 && elidx == ct {
+
+			// The field in question may be a composite that has only
+			// multiple elements but a single non-zero-sized element.
+			// If this is the case, keep going.
+			if sp, ok := f.(*structparm); ok {
+				if len(sp.fields) > 1 {
+					ppath := fmt.Sprintf("%s.F%d", path, fi)
+					if p.fields[fi].IsBlank() || path == "_" {
+						ppath = "_"
+					}
+					return f.GenElemRef(elidx-ct, ppath)
+				}
+			}
+
+			verb(4, "found field %d type %s in GenElemRef(%d,%s)", fi, f.TypeName(), elidx, path)
+			ppath := fmt.Sprintf("%s.F%d", path, fi)
+			if p.fields[fi].IsBlank() || path == "_" {
+				ppath = "_"
+			}
+			return ppath, f
+		}
+
+		// Is the element we want somewhere inside this field?
+		if fne > 1 && elidx >= ct && elidx < ct+fne {
+			ppath := fmt.Sprintf("%s.F%d", path, fi)
+			if p.fields[fi].IsBlank() || path == "_" {
+				ppath = "_"
+			}
+			return f.GenElemRef(elidx-ct, ppath)
+		}
+
+		ct += fne
+	}
+	panic(fmt.Sprintf("GenElemRef failed for struct %s elidx %d", p.TypeName(), elidx))
+}
+
+func (p structparm) HasPointer() bool {
+	for _, f := range p.fields {
+		if f.HasPointer() {
+			return true
+		}
+	}
+	return false
+}
diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/typedefparm.go b/cmd/signature-fuzzer/internal/fuzz-generator/typedefparm.go
new file mode 100644
index 0000000..27cea64
--- /dev/null
+++ b/cmd/signature-fuzzer/internal/fuzz-generator/typedefparm.go
@@ -0,0 +1,90 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package generator
+
+import (
+	"bytes"
+	"fmt"
+)
+
+// typedefparm describes a parameter that is a typedef of some other
+// type; it implements the "parm" interface
+type typedefparm struct {
+	aname  string
+	qname  string
+	target parm
+	isBlank
+	addrTakenHow
+	isGenValFunc
+	skipCompare
+}
+
+func (p typedefparm) Declare(b *bytes.Buffer, prefix string, suffix string, caller bool) {
+	n := p.aname
+	if caller {
+		n = p.qname
+	}
+	b.WriteString(fmt.Sprintf("%s %s%s", prefix, n, suffix))
+}
+
+func (p typedefparm) GenElemRef(elidx int, path string) (string, parm) {
+	_, isarr := p.target.(*arrayparm)
+	_, isstruct := p.target.(*structparm)
+	_, ismap := p.target.(*mapparm)
+	rv, rp := p.target.GenElemRef(elidx, path)
+	// this is hacky, but I don't see a nicer way to do this
+	if isarr || isstruct || ismap {
+		return rv, rp
+	}
+	rp = &p
+	return rv, rp
+}
+
+func (p typedefparm) GenValue(s *genstate, f *funcdef, value int, caller bool) (string, int) {
+	n := p.aname
+	if caller {
+		n = p.qname
+	}
+	rv, v := s.GenValue(f, p.target, value, caller)
+	rv = n + "(" + rv + ")"
+	return rv, v
+}
+
+func (p typedefparm) IsControl() bool {
+	return false
+}
+
+func (p typedefparm) NumElements() int {
+	return p.target.NumElements()
+}
+
+func (p typedefparm) String() string {
+	return fmt.Sprintf("%s typedef of %s", p.aname, p.target.String())
+
+}
+
+func (p typedefparm) TypeName() string {
+	return p.aname
+
+}
+
+func (p typedefparm) QualName() string {
+	return p.qname
+}
+
+func (p typedefparm) HasPointer() bool {
+	return p.target.HasPointer()
+}
+
+func (s *genstate) makeTypedefParm(f *funcdef, target parm, pidx int) parm {
+	var tdp typedefparm
+	ns := len(f.typedefs)
+	tdp.aname = fmt.Sprintf("MyTypeF%dS%d", f.idx, ns)
+	tdp.qname = fmt.Sprintf("%s.MyTypeF%dS%d", s.checkerPkg(pidx), f.idx, ns)
+	tdp.target = target
+	tdp.SetBlank(uint8(s.wr.Intn(100)) < tunables.blankPerc)
+	f.typedefs = append(f.typedefs, tdp)
+	return &tdp
+}
diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/wraprand.go b/cmd/signature-fuzzer/internal/fuzz-generator/wraprand.go
new file mode 100644
index 0000000..bba178d
--- /dev/null
+++ b/cmd/signature-fuzzer/internal/fuzz-generator/wraprand.go
@@ -0,0 +1,136 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package generator
+
+import (
+	"fmt"
+	"math/rand"
+	"os"
+	"runtime"
+	"strings"
+)
+
+const (
+	RandCtlNochecks = 0
+	RandCtlChecks   = 1 << iota
+	RandCtlCapture
+	RandCtlPanic
+)
+
+func NewWrapRand(seed int64, ctl int) *wraprand {
+	rand.Seed(seed)
+	return &wraprand{seed: seed, ctl: ctl}
+}
+
+type wraprand struct {
+	f32calls  int
+	f64calls  int
+	intncalls int
+	seed      int64
+	tag       string
+	calls     []string
+	ctl       int
+}
+
+func (w *wraprand) captureCall(tag string, val string) {
+	call := tag + ": " + val + "\n"
+	pc := make([]uintptr, 10)
+	n := runtime.Callers(1, pc)
+	if n == 0 {
+		panic("why?")
+	}
+	pc = pc[:n] // pass only valid pcs to runtime.CallersFrames
+	frames := runtime.CallersFrames(pc)
+	for {
+		frame, more := frames.Next()
+		if strings.Contains(frame.File, "testing.") {
+			break
+		}
+		call += fmt.Sprintf("%s %s:%d\n", frame.Function, frame.File, frame.Line)
+		if !more {
+			break
+		}
+
+	}
+	w.calls = append(w.calls, call)
+}
+
+func (w *wraprand) Intn(n int64) int64 {
+	w.intncalls++
+	rv := rand.Int63n(n)
+	if w.ctl&RandCtlCapture != 0 {
+		w.captureCall("Intn", fmt.Sprintf("%d", rv))
+	}
+	return rv
+}
+
+func (w *wraprand) Float32() float32 {
+	w.f32calls++
+	rv := rand.Float32()
+	if w.ctl&RandCtlCapture != 0 {
+		w.captureCall("Float32", fmt.Sprintf("%f", rv))
+	}
+	return rv
+}
+
+func (w *wraprand) NormFloat64() float64 {
+	w.f64calls++
+	rv := rand.NormFloat64()
+	if w.ctl&RandCtlCapture != 0 {
+		w.captureCall("NormFloat64", fmt.Sprintf("%f", rv))
+	}
+	return rv
+}
+
+func (w *wraprand) emitCalls(fn string) {
+	outf, err := os.OpenFile(fn, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
+	if err != nil {
+		panic(err)
+	}
+	for _, c := range w.calls {
+		fmt.Fprint(outf, c)
+	}
+	outf.Close()
+}
+
+func (w *wraprand) Equal(w2 *wraprand) bool {
+	return w.f32calls == w2.f32calls &&
+		w.f64calls == w2.f64calls &&
+		w.intncalls == w2.intncalls
+}
+
+func (w *wraprand) Check(w2 *wraprand) {
+	if w.ctl != 0 && !w.Equal(w2) {
+		fmt.Fprintf(os.Stderr, "wraprand consistency check failed:\n")
+		t := "w"
+		if w.tag != "" {
+			t = w.tag
+		}
+		t2 := "w2"
+		if w2.tag != "" {
+			t2 = w2.tag
+		}
+		fmt.Fprintf(os.Stderr, " %s: {f32:%d f64:%d i:%d}\n", t,
+			w.f32calls, w.f64calls, w.intncalls)
+		fmt.Fprintf(os.Stderr, " %s: {f32:%d f64:%d i:%d}\n", t2,
+			w2.f32calls, w2.f64calls, w2.intncalls)
+		if w.ctl&RandCtlCapture != 0 {
+			f := fmt.Sprintf("/tmp/%s.txt", t)
+			f2 := fmt.Sprintf("/tmp/%s.txt", t2)
+			w.emitCalls(f)
+			w2.emitCalls(f2)
+			fmt.Fprintf(os.Stderr, "=-= emitted calls to %s, %s\n", f, f2)
+		}
+		if w.ctl&RandCtlPanic != 0 {
+			panic("bad")
+		}
+	}
+}
+
+func (w *wraprand) Checkpoint(tag string) {
+	if w.ctl&RandCtlCapture != 0 {
+		w.calls = append(w.calls, "=-=\n"+tag+"\n=-=\n")
+	}
+}
diff --git a/cmd/stringer/endtoend_test.go b/cmd/stringer/endtoend_test.go
index 3b0b39d..5b969a5 100644
--- a/cmd/stringer/endtoend_test.go
+++ b/cmd/stringer/endtoend_test.go
@@ -17,11 +17,13 @@
 	"io/ioutil"
 	"os"
 	"os/exec"
+	"path"
 	"path/filepath"
 	"strings"
 	"testing"
 
 	"golang.org/x/tools/internal/testenv"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // This file contains a test that compiles and runs each program in testdata
@@ -42,8 +44,15 @@
 	if err != nil {
 		t.Fatalf("Readdirnames: %s", err)
 	}
+	if typeparams.Enabled {
+		names = append(names, moreTests(t, "testdata/typeparams", "typeparams")...)
+	}
 	// Generate, compile, and run the test programs.
 	for _, name := range names {
+		if name == "typeparams" {
+			// ignore the directory containing the tests with type params
+			continue
+		}
 		if !strings.HasSuffix(name, ".go") {
 			t.Errorf("%s is not a Go file", name)
 			continue
@@ -56,12 +65,31 @@
 			t.Logf("cgo is not enabled for %s", name)
 			continue
 		}
-		// Names are known to be ASCII and long enough.
-		typeName := fmt.Sprintf("%c%s", name[0]+'A'-'a', name[1:len(name)-len(".go")])
-		stringerCompileAndRun(t, dir, stringer, typeName, name)
+		stringerCompileAndRun(t, dir, stringer, typeName(name), name)
 	}
 }
 
+// a type name for stringer. use the last component of the file name with the .go
+func typeName(fname string) string {
+	// file names are known to be ascii and end .go
+	base := path.Base(fname)
+	return fmt.Sprintf("%c%s", base[0]+'A'-'a', base[1:len(base)-len(".go")])
+}
+
+func moreTests(t *testing.T, dirname, prefix string) []string {
+	x, err := os.ReadDir(dirname)
+	if err != nil {
+		// error, but try the rest of the tests
+		t.Errorf("can't read type param tess from %s: %v", dirname, err)
+		return nil
+	}
+	names := make([]string, len(x))
+	for i, f := range x {
+		names[i] = prefix + "/" + f.Name()
+	}
+	return names
+}
+
 // TestTags verifies that the -tags flag works as advertised.
 func TestTags(t *testing.T) {
 	dir, stringer := buildStringer(t)
@@ -173,7 +201,7 @@
 func stringerCompileAndRun(t *testing.T, dir, stringer, typeName, fileName string) {
 	t.Helper()
 	t.Logf("run: %s %s\n", fileName, typeName)
-	source := filepath.Join(dir, fileName)
+	source := filepath.Join(dir, path.Base(fileName))
 	err := copy(source, filepath.Join("testdata", fileName))
 	if err != nil {
 		t.Fatalf("copying file to temporary directory: %s", err)
diff --git a/cmd/stringer/testdata/typeparams/conv2.go b/cmd/stringer/testdata/typeparams/conv2.go
new file mode 100644
index 0000000..62e1cb7
--- /dev/null
+++ b/cmd/stringer/testdata/typeparams/conv2.go
@@ -0,0 +1,47 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This is a version of ../conv.go with type params
+
+// Check that constants defined as a conversion are accepted.
+
+package main
+
+import "fmt"
+
+// For now, a lone type parameter is not permitted as RHS in a type declaration (issue #45639).
+// type Other[T interface{ ~int | ~uint }] T // Imagine this is in another package.
+type Other int
+
+const (
+	// alpha Other[int] = iota
+	alpha Other = iota
+	beta
+	gamma
+	delta
+)
+
+// type Conv2 Other[int]
+type Conv2 Other
+
+const (
+	Alpha = Conv2(alpha)
+	Beta  = Conv2(beta)
+	Gamma = Conv2(gamma)
+	Delta = Conv2(delta)
+)
+
+func main() {
+	ck(Alpha, "Alpha")
+	ck(Beta, "Beta")
+	ck(Gamma, "Gamma")
+	ck(Delta, "Delta")
+	ck(42, "Conv2(42)")
+}
+
+func ck(c Conv2, str string) {
+	if fmt.Sprint(c) != str {
+		panic("conv2.go: " + str)
+	}
+}
diff --git a/cmd/stringer/testdata/typeparams/prime2.go b/cmd/stringer/testdata/typeparams/prime2.go
new file mode 100644
index 0000000..556db37
--- /dev/null
+++ b/cmd/stringer/testdata/typeparams/prime2.go
@@ -0,0 +1,63 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This is a version of ../prime.go with type params
+
+// Enough gaps to trigger a map implementation of the method.
+// Also includes a duplicate to test that it doesn't cause problems
+
+package main
+
+import "fmt"
+
+// For now, a lone type parameter is not permitted as RHS in a type declaration (issue #45639).
+// type Likeint[T interface{ ~int | ~uint8 }] T
+type Likeint int
+
+// type Prime2 Likeint[int]
+type Prime2 Likeint
+
+const (
+	p2  Prime2 = 2
+	p3  Prime2 = 3
+	p5  Prime2 = 5
+	p7  Prime2 = 7
+	p77 Prime2 = 7 // Duplicate; note that p77 doesn't appear below.
+	p11 Prime2 = 11
+	p13 Prime2 = 13
+	p17 Prime2 = 17
+	p19 Prime2 = 19
+	p23 Prime2 = 23
+	p29 Prime2 = 29
+	p37 Prime2 = 31
+	p41 Prime2 = 41
+	p43 Prime2 = 43
+)
+
+func main() {
+	ck(0, "Prime2(0)")
+	ck(1, "Prime2(1)")
+	ck(p2, "p2")
+	ck(p3, "p3")
+	ck(4, "Prime2(4)")
+	ck(p5, "p5")
+	ck(p7, "p7")
+	ck(p77, "p7")
+	ck(p11, "p11")
+	ck(p13, "p13")
+	ck(p17, "p17")
+	ck(p19, "p19")
+	ck(p23, "p23")
+	ck(p29, "p29")
+	ck(p37, "p37")
+	ck(p41, "p41")
+	ck(p43, "p43")
+	ck(44, "Prime2(44)")
+}
+
+func ck(prime Prime2, str string) {
+	if fmt.Sprint(prime) != str {
+		panic("prime2.go: " + str)
+	}
+}
diff --git a/container/intsets/export_test.go b/container/intsets/export_test.go
new file mode 100644
index 0000000..41faf31
--- /dev/null
+++ b/container/intsets/export_test.go
@@ -0,0 +1,8 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package intsets
+
+// Backdoor for testing.
+func (s *Sparse) Check() error { return s.check() }
diff --git a/container/intsets/popcnt_amd64.go b/container/intsets/popcnt_amd64.go
deleted file mode 100644
index 25c02f4..0000000
--- a/container/intsets/popcnt_amd64.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build amd64 && !appengine && !gccgo
-// +build amd64,!appengine,!gccgo
-
-package intsets
-
-func popcnt(x word) int
-func havePOPCNT() bool
-
-var hasPOPCNT = havePOPCNT()
-
-// popcount returns the population count (number of set bits) of x.
-func popcount(x word) int {
-	if hasPOPCNT {
-		return popcnt(x)
-	}
-	return popcountTable(x) // faster than Hacker's Delight
-}
diff --git a/container/intsets/popcnt_amd64.s b/container/intsets/popcnt_amd64.s
deleted file mode 100644
index 05c3d6f..0000000
--- a/container/intsets/popcnt_amd64.s
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build amd64,!appengine,!gccgo
-
-#include "textflag.h"
-
-// func havePOPCNT() bool
-TEXT ·havePOPCNT(SB),4,$0
-	MOVQ	$1, AX
-	CPUID
-	SHRQ	$23, CX
-	ANDQ	$1, CX
-	MOVB	CX, ret+0(FP)
-	RET
-
-// func popcnt(word) int
-TEXT ·popcnt(SB),NOSPLIT,$0-8
-	XORQ	AX, AX
-	MOVQ	x+0(FP), SI
-	// POPCNT (SI), AX is not recognized by Go assembler,
-	// so we assemble it ourselves.
-	BYTE	$0xf3
-	BYTE	$0x48
-	BYTE	$0x0f
-	BYTE	$0xb8
-	BYTE	$0xc6
-	MOVQ	AX, ret+8(FP)
-	RET
diff --git a/container/intsets/popcnt_gccgo.go b/container/intsets/popcnt_gccgo.go
deleted file mode 100644
index 5e1efcf..0000000
--- a/container/intsets/popcnt_gccgo.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build gccgo
-// +build gccgo
-
-package intsets
-
-func popcount(x word) int
diff --git a/container/intsets/popcnt_gccgo_c.c b/container/intsets/popcnt_gccgo_c.c
deleted file mode 100644
index 08abb32..0000000
--- a/container/intsets/popcnt_gccgo_c.c
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2015 The Go Authors.  All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build gccgo
-
-#include <errno.h>
-#include <stdint.h>
-#include <unistd.h>
-
-#define _STRINGIFY2_(x) #x
-#define _STRINGIFY_(x) _STRINGIFY2_(x)
-#define GOSYM_PREFIX _STRINGIFY_(__USER_LABEL_PREFIX__)
-
-extern intptr_t popcount(uintptr_t x) __asm__(GOSYM_PREFIX GOPKGPATH ".popcount");
-
-intptr_t popcount(uintptr_t x) {
-	return __builtin_popcountl((unsigned long)(x));
-}
diff --git a/container/intsets/popcnt_generic.go b/container/intsets/popcnt_generic.go
deleted file mode 100644
index caffedc..0000000
--- a/container/intsets/popcnt_generic.go
+++ /dev/null
@@ -1,34 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build (!amd64 || appengine) && !gccgo
-// +build !amd64 appengine
-// +build !gccgo
-
-package intsets
-
-import "runtime"
-
-// We compared three algorithms---Hacker's Delight, table lookup,
-// and AMD64's SSE4.1 hardware POPCNT---on a 2.67GHz Xeon X5550.
-//
-// % GOARCH=amd64 go test -run=NONE -bench=Popcount
-// POPCNT               5.12 ns/op
-// Table                8.53 ns/op
-// HackersDelight       9.96 ns/op
-//
-// % GOARCH=386 go test -run=NONE -bench=Popcount
-// Table               10.4  ns/op
-// HackersDelight       5.23 ns/op
-//
-// (AMD64's ABM1 hardware supports ntz and nlz too,
-// but they aren't critical.)
-
-// popcount returns the population count (number of set bits) of x.
-func popcount(x word) int {
-	if runtime.GOARCH == "386" {
-		return popcountHD(uint32(x))
-	}
-	return popcountTable(x)
-}
diff --git a/container/intsets/sparse.go b/container/intsets/sparse.go
index 5db01c1..2f1a0ea 100644
--- a/container/intsets/sparse.go
+++ b/container/intsets/sparse.go
@@ -27,6 +27,7 @@
 import (
 	"bytes"
 	"fmt"
+	"math/bits"
 )
 
 // A Sparse is a set of int values.
@@ -63,6 +64,36 @@
 	MinInt = -MaxInt - 1
 )
 
+// popcount returns the number of set bits in w.
+func popcount(x word) int {
+	// Avoid OnesCount(uint): don't assume uint = uintptr.
+	if bitsPerWord == 32 {
+		return bits.OnesCount32(uint32(x))
+	} else {
+		return bits.OnesCount64(uint64(x))
+	}
+}
+
+// nlz returns the number of leading zeros of x.
+func nlz(x word) int {
+	// Avoid LeadingZeros(uint): don't assume uint = uintptr.
+	if bitsPerWord == 32 {
+		return bits.LeadingZeros32(uint32(x))
+	} else {
+		return bits.LeadingZeros64(uint64(x))
+	}
+}
+
+// ntz returns the number of trailing zeros of x.
+func ntz(x word) int {
+	// Avoid TrailingZeros(uint): don't assume uint = uintptr.
+	if bitsPerWord == 32 {
+		return bits.TrailingZeros32(uint32(x))
+	} else {
+		return bits.TrailingZeros64(uint64(x))
+	}
+}
+
 // -- block ------------------------------------------------------------
 
 // A set is represented as a circular doubly-linked list of blocks,
@@ -635,8 +666,9 @@
 	for xb != &none {
 		if sb != &none && sb.offset == xb.offset {
 			for i := range xb.bits {
-				if sb.bits[i] != xb.bits[i] {
-					sb.bits[i] |= xb.bits[i]
+				union := sb.bits[i] | xb.bits[i]
+				if sb.bits[i] != union {
+					sb.bits[i] = union
 					changed = true
 				}
 			}
diff --git a/container/intsets/sparse_test.go b/container/intsets/sparse_test.go
index 7481a06..cd8ec6e 100644
--- a/container/intsets/sparse_test.go
+++ b/container/intsets/sparse_test.go
@@ -460,6 +460,41 @@
 	}
 }
 
+// TestUnionWithChanged checks the 'changed' result of UnionWith.
+func TestUnionWithChanged(t *testing.T) {
+	setOf := func(elems ...int) *intsets.Sparse {
+		s := new(intsets.Sparse)
+		for _, elem := range elems {
+			s.Insert(elem)
+		}
+		return s
+	}
+
+	checkUnionWith := func(x, y *intsets.Sparse) {
+		xstr := x.String()
+		prelen := x.Len()
+		changed := x.UnionWith(y)
+		if (x.Len() > prelen) != changed {
+			t.Errorf("%s.UnionWith(%s) => %s, changed=%t", xstr, y, x, changed)
+		}
+	}
+
+	// The case marked "!" is a regression test for Issue 50352,
+	// which spuriously returned true when y ⊂ x.
+
+	// same block
+	checkUnionWith(setOf(1, 2), setOf(1, 2))
+	checkUnionWith(setOf(1, 2, 3), setOf(1, 2)) // !
+	checkUnionWith(setOf(1, 2), setOf(1, 2, 3))
+	checkUnionWith(setOf(1, 2), setOf())
+
+	// different blocks
+	checkUnionWith(setOf(1, 1000000), setOf(1, 1000000))
+	checkUnionWith(setOf(1, 2, 1000000), setOf(1, 2))
+	checkUnionWith(setOf(1, 2), setOf(1, 2, 1000000))
+	checkUnionWith(setOf(1, 1000000), setOf())
+}
+
 func TestIntersectionWith(t *testing.T) {
 	// Edge cases: the pairs (1,1), (1000,2000), (8000,4000)
 	// exercise the <, >, == cases in IntersectionWith that the
diff --git a/container/intsets/util.go b/container/intsets/util.go
deleted file mode 100644
index dd1db86..0000000
--- a/container/intsets/util.go
+++ /dev/null
@@ -1,84 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package intsets
-
-// From Hacker's Delight, fig 5.2.
-func popcountHD(x uint32) int {
-	x -= (x >> 1) & 0x55555555
-	x = (x & 0x33333333) + ((x >> 2) & 0x33333333)
-	x = (x + (x >> 4)) & 0x0f0f0f0f
-	x = x + (x >> 8)
-	x = x + (x >> 16)
-	return int(x & 0x0000003f)
-}
-
-var a [1 << 8]byte
-
-func init() {
-	for i := range a {
-		var n byte
-		for x := i; x != 0; x >>= 1 {
-			if x&1 != 0 {
-				n++
-			}
-		}
-		a[i] = n
-	}
-}
-
-func popcountTable(x word) int {
-	return int(a[byte(x>>(0*8))] +
-		a[byte(x>>(1*8))] +
-		a[byte(x>>(2*8))] +
-		a[byte(x>>(3*8))] +
-		a[byte(x>>(4*8))] +
-		a[byte(x>>(5*8))] +
-		a[byte(x>>(6*8))] +
-		a[byte(x>>(7*8))])
-}
-
-// nlz returns the number of leading zeros of x.
-// From Hacker's Delight, fig 5.11.
-func nlz(x word) int {
-	x |= (x >> 1)
-	x |= (x >> 2)
-	x |= (x >> 4)
-	x |= (x >> 8)
-	x |= (x >> 16)
-	x |= (x >> 32)
-	return popcount(^x)
-}
-
-// ntz returns the number of trailing zeros of x.
-// From Hacker's Delight, fig 5.13.
-func ntz(x word) int {
-	if x == 0 {
-		return bitsPerWord
-	}
-	n := 1
-	if bitsPerWord == 64 {
-		if (x & 0xffffffff) == 0 {
-			n = n + 32
-			x = x >> 32
-		}
-	}
-	if (x & 0x0000ffff) == 0 {
-		n = n + 16
-		x = x >> 16
-	}
-	if (x & 0x000000ff) == 0 {
-		n = n + 8
-		x = x >> 8
-	}
-	if (x & 0x0000000f) == 0 {
-		n = n + 4
-		x = x >> 4
-	}
-	if (x & 0x00000003) == 0 {
-		n = n + 2
-		x = x >> 2
-	}
-	return n - int(x&1)
-}
diff --git a/container/intsets/util_test.go b/container/intsets/util_test.go
deleted file mode 100644
index e4cc659..0000000
--- a/container/intsets/util_test.go
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package intsets
-
-import (
-	"math/rand"
-	"testing"
-)
-
-func TestNLZ(t *testing.T) {
-	// Test the platform-specific edge case.
-	// NB: v must be a var (not const) so that the word() conversion is dynamic.
-	// Otherwise the compiler will report an error.
-	v := uint64(0x0000801000000000)
-	n := nlz(word(v))
-	want := 32 // (on 32-bit)
-	if bitsPerWord == 64 {
-		want = 16
-	}
-	if n != want {
-		t.Errorf("%d-bit nlz(%d) = %d, want %d", bitsPerWord, v, n, want)
-	}
-}
-
-// Backdoor for testing.
-func (s *Sparse) Check() error { return s.check() }
-
-func dumbPopcount(x word) int {
-	var popcnt int
-	for i := uint(0); i < bitsPerWord; i++ {
-		if x&(1<<i) != 0 {
-			popcnt++
-		}
-	}
-	return popcnt
-}
-
-func TestPopcount(t *testing.T) {
-	for i := 0; i < 1e5; i++ {
-		x := word(rand.Uint32())
-		if bitsPerWord == 64 {
-			x = x | (word(rand.Uint32()) << 32)
-		}
-		want := dumbPopcount(x)
-		got := popcount(x)
-		if got != want {
-			t.Errorf("popcount(%d) = %d, want %d", x, got, want)
-		}
-	}
-}
-
-func BenchmarkPopcount(b *testing.B) {
-	for i := 0; i < b.N; i++ {
-		popcount(word(i))
-	}
-}
diff --git a/copyright/copyright.go b/copyright/copyright.go
index a20d623..4a04d13 100644
--- a/copyright/copyright.go
+++ b/copyright/copyright.go
@@ -2,6 +2,9 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
+//go:build go1.18
+// +build go1.18
+
 // Package copyright checks that files have the correct copyright notices.
 package copyright
 
@@ -9,8 +12,8 @@
 	"go/ast"
 	"go/parser"
 	"go/token"
+	"io/fs"
 	"io/ioutil"
-	"os"
 	"path/filepath"
 	"regexp"
 	"strings"
@@ -18,13 +21,18 @@
 
 func checkCopyright(dir string) ([]string, error) {
 	var files []string
-	err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
+	err := filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error {
 		if err != nil {
 			return err
 		}
-		if info.IsDir() {
+		if d.IsDir() {
 			// Skip directories like ".git".
-			if strings.HasPrefix(info.Name(), ".") {
+			if strings.HasPrefix(d.Name(), ".") {
+				return filepath.SkipDir
+			}
+			// Skip any directory that starts with an underscore, as the go
+			// command would.
+			if strings.HasPrefix(d.Name(), "_") {
 				return filepath.SkipDir
 			}
 			return nil
diff --git a/copyright/copyright_test.go b/copyright/copyright_test.go
index bfab43c..1d63147 100644
--- a/copyright/copyright_test.go
+++ b/copyright/copyright_test.go
@@ -2,6 +2,9 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
+//go:build go1.18
+// +build go1.18
+
 package copyright
 
 import (
diff --git a/cover/profile.go b/cover/profile.go
index 5719577..47a9a54 100644
--- a/cover/profile.go
+++ b/cover/profile.go
@@ -10,6 +10,7 @@
 	"bufio"
 	"errors"
 	"fmt"
+	"io"
 	"math"
 	"os"
 	"sort"
@@ -45,14 +46,18 @@
 		return nil, err
 	}
 	defer pf.Close()
+	return ParseProfilesFromReader(pf)
+}
 
-	files := make(map[string]*Profile)
-	buf := bufio.NewReader(pf)
+// ParseProfilesFromReader parses profile data from the Reader and
+// returns a Profile for each source file described therein.
+func ParseProfilesFromReader(rd io.Reader) ([]*Profile, error) {
 	// First line is "mode: foo", where foo is "set", "count", or "atomic".
 	// Rest of file is in the format
 	//	encoding/base64/base64.go:34.44,37.40 3 1
 	// where the fields are: name.go:line.column,line.column numberOfStatements count
-	s := bufio.NewScanner(buf)
+	files := make(map[string]*Profile)
+	s := bufio.NewScanner(rd)
 	mode := ""
 	for s.Scan() {
 		line := s.Text()
diff --git a/go.mod b/go.mod
index 1c1dad4..34bc8ab 100644
--- a/go.mod
+++ b/go.mod
@@ -3,10 +3,11 @@
 go 1.17
 
 require (
-	github.com/yuin/goldmark v1.4.0
-	golang.org/x/mod v0.4.2
-	golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d
+	github.com/yuin/goldmark v1.4.1
+	golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3
+	golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f
 	golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
-	golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e
+	golang.org/x/sys v0.0.0-20211019181941-9d821ace8654
+	golang.org/x/text v0.3.7
 	golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1
 )
diff --git a/go.sum b/go.sum
index a56a130..c34a2ae 100644
--- a/go.sum
+++ b/go.sum
@@ -1,25 +1,30 @@
-github.com/yuin/goldmark v1.4.0 h1:OtISOGfH6sOWa1/qXqqAiOIAO6Z5J3AEAE18WAq6BiQ=
-github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+github.com/yuin/goldmark v1.4.1 h1:/vn0k+RBvwlxEmP5E7SZMqNxPhfMVFEJiykr15/0XKM=
+github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo=
-golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
-golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o=
+golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
 golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d h1:20cMwl2fHAzkJMEA+8J4JgqBQcQGzbisXo31MIeenXI=
-golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f h1:OfiFi4JbukWwe3lzw+xunroH1mnC1e2Gy5cxNJApiSY=
+golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e h1:WUoyKPm6nCo1BnNUvPGnFG3T5DUVem42yDJZZ4CNxMA=
-golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211019181941-9d821ace8654 h1:id054HUawV2/6IGm2IV8KZQjqtwAOo2CYlOToYqa0d0=
+golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
 golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
 golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
 golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
 golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
diff --git a/go/analysis/analysistest/analysistest.go b/go/analysis/analysistest/analysistest.go
index 8447244..df79a44 100644
--- a/go/analysis/analysistest/analysistest.go
+++ b/go/analysis/analysistest/analysistest.go
@@ -196,12 +196,13 @@
 							want := string(bytes.TrimRight(vf.Data, "\n")) + "\n"
 							formatted, err := format.Source([]byte(out))
 							if err != nil {
+								t.Errorf("%s: error formatting edited source: %v\n%s", file.Name(), err, out)
 								continue
 							}
 							if want != string(formatted) {
 								d, err := myers.ComputeEdits("", want, string(formatted))
 								if err != nil {
-									t.Errorf("failed to compute suggested fixes: %v", err)
+									t.Errorf("failed to compute suggested fix diff: %v", err)
 								}
 								t.Errorf("suggested fixes failed for %s:\n%s", file.Name(), diff.ToUnified(fmt.Sprintf("%s.golden [%s]", file.Name(), sf), "actual", want, d))
 							}
@@ -225,12 +226,13 @@
 
 				formatted, err := format.Source([]byte(out))
 				if err != nil {
+					t.Errorf("%s: error formatting resulting source: %v\n%s", file.Name(), err, out)
 					continue
 				}
 				if want != string(formatted) {
 					d, err := myers.ComputeEdits("", want, string(formatted))
 					if err != nil {
-						t.Errorf("failed to compute edits: %s", err)
+						t.Errorf("%s: failed to compute suggested fix diff: %s", file.Name(), err)
 					}
 					t.Errorf("suggested fixes failed for %s:\n%s", file.Name(), diff.ToUnified(file.Name()+".golden", "actual", want, d))
 				}
@@ -282,7 +284,7 @@
 		testenv.NeedsGoPackages(t)
 	}
 
-	pkgs, err := loadPackages(dir, patterns...)
+	pkgs, err := loadPackages(a, dir, patterns...)
 	if err != nil {
 		t.Errorf("loading %s: %v", patterns, err)
 		return nil
@@ -306,7 +308,7 @@
 // dependencies) from dir, which is the root of a GOPATH-style project
 // tree. It returns an error if any package had an error, or the pattern
 // matched no packages.
-func loadPackages(dir string, patterns ...string) ([]*packages.Package, error) {
+func loadPackages(a *analysis.Analyzer, dir string, patterns ...string) ([]*packages.Package, error) {
 	// packages.Load loads the real standard library, not a minimal
 	// fake version, which would be more efficient, especially if we
 	// have many small tests that import, say, net/http.
@@ -325,9 +327,13 @@
 		return nil, err
 	}
 
-	// Print errors but do not stop:
-	// some Analyzers may be disposed to RunDespiteErrors.
-	packages.PrintErrors(pkgs)
+	// Do NOT print errors if the analyzer will continue running.
+	// It is incredibly confusing for tests to be printing to stderr
+	// willy-nilly instead of their test logs, especially when the
+	// errors are expected and are going to be fixed.
+	if !a.RunDespiteErrors {
+		packages.PrintErrors(pkgs)
+	}
 
 	if len(pkgs) == 0 {
 		return nil, fmt.Errorf("no packages matched %s", patterns)
@@ -439,7 +445,7 @@
 					want[k] = expects
 					return
 				}
-				unmatched = append(unmatched, fmt.Sprintf("%q", exp.rx))
+				unmatched = append(unmatched, fmt.Sprintf("%#q", exp.rx))
 			}
 		}
 		if unmatched == nil {
@@ -503,7 +509,7 @@
 	var surplus []string
 	for key, expects := range want {
 		for _, exp := range expects {
-			err := fmt.Sprintf("%s:%d: no %s was reported matching %q", key.file, key.line, exp.kind, exp.rx)
+			err := fmt.Sprintf("%s:%d: no %s was reported matching %#q", key.file, key.line, exp.kind, exp.rx)
 			surplus = append(surplus, err)
 		}
 	}
diff --git a/go/analysis/analysistest/analysistest_test.go b/go/analysis/analysistest/analysistest_test.go
index cb9bdd2..8c7ff73 100644
--- a/go/analysis/analysistest/analysistest_test.go
+++ b/go/analysis/analysistest/analysistest_test.go
@@ -134,19 +134,19 @@
 		`a/b.go:6: in 'want' comment: got String after foo, want ':'`,
 		`a/b.go:7: in 'want' comment: got EOF, want regular expression`,
 		`a/b.go:8: in 'want' comment: invalid char escape`,
-		`a/b.go:11:9: diagnostic "call of println(...)" does not match pattern "wrong expectation text"`,
+		"a/b.go:11:9: diagnostic \"call of println(...)\" does not match pattern `wrong expectation text`",
 		`a/b.go:14:9: unexpected diagnostic: call of println(...)`,
-		`a/b.go:11: no diagnostic was reported matching "wrong expectation text"`,
-		`a/b.go:17: no diagnostic was reported matching "unsatisfied expectation"`,
+		"a/b.go:11: no diagnostic was reported matching `wrong expectation text`",
+		"a/b.go:17: no diagnostic was reported matching `unsatisfied expectation`",
 		// duplicate copies of each message from the test package (see issue #40574)
 		`a/b.go:5: in 'want' comment: unexpected ":"`,
 		`a/b.go:6: in 'want' comment: got String after foo, want ':'`,
 		`a/b.go:7: in 'want' comment: got EOF, want regular expression`,
 		`a/b.go:8: in 'want' comment: invalid char escape`,
-		`a/b.go:11:9: diagnostic "call of println(...)" does not match pattern "wrong expectation text"`,
+		"a/b.go:11:9: diagnostic \"call of println(...)\" does not match pattern `wrong expectation text`",
 		`a/b.go:14:9: unexpected diagnostic: call of println(...)`,
-		`a/b.go:11: no diagnostic was reported matching "wrong expectation text"`,
-		`a/b.go:17: no diagnostic was reported matching "unsatisfied expectation"`,
+		"a/b.go:11: no diagnostic was reported matching `wrong expectation text`",
+		"a/b.go:17: no diagnostic was reported matching `unsatisfied expectation`",
 	}
 	if !reflect.DeepEqual(got, want) {
 		t.Errorf("got:\n%s\nwant:\n%s",
diff --git a/go/analysis/doc/suggested_fixes.md b/go/analysis/doc/suggested_fixes.md
index f46871a..74888f8 100644
--- a/go/analysis/doc/suggested_fixes.md
+++ b/go/analysis/doc/suggested_fixes.md
@@ -93,7 +93,7 @@
 apply all fixes suggested by their analysis or analyses. This is intended to
 be used primarily by refactoring tools, because in general, like diagnostics,
 suggested fixes will need to be examined by a human who can decide whether
-they are relevent.
+they are relevant.
 
 ### gopls
 
diff --git a/go/analysis/internal/checker/checker.go b/go/analysis/internal/checker/checker.go
index 34f5b47..e405a2a 100644
--- a/go/analysis/internal/checker/checker.go
+++ b/go/analysis/internal/checker/checker.go
@@ -11,6 +11,7 @@
 import (
 	"bytes"
 	"encoding/gob"
+	"errors"
 	"flag"
 	"fmt"
 	"go/format"
@@ -129,8 +130,13 @@
 	allSyntax := needFacts(analyzers)
 	initial, err := load(args, allSyntax)
 	if err != nil {
-		log.Print(err)
-		return 1 // load errors
+		if _, ok := err.(typeParseError); !ok {
+			// Fail when some of the errors are not
+			// related to parsing nor typing.
+			log.Print(err)
+			return 1
+		}
+		// TODO: filter analyzers based on RunDespiteError?
 	}
 
 	// Print the results.
@@ -139,11 +145,17 @@
 	if Fix {
 		applyFixes(roots)
 	}
-
 	return printDiagnostics(roots)
 }
 
-// load loads the initial packages.
+// typeParseError represents a package load error
+// that is related to typing and parsing.
+type typeParseError struct {
+	error
+}
+
+// load loads the initial packages. If all loading issues are related to
+// typing and parsing, the returned error is of type typeParseError.
 func load(patterns []string, allSyntax bool) ([]*packages.Package, error) {
 	mode := packages.LoadSyntax
 	if allSyntax {
@@ -155,18 +167,43 @@
 	}
 	initial, err := packages.Load(&conf, patterns...)
 	if err == nil {
-		if n := packages.PrintErrors(initial); n > 1 {
-			err = fmt.Errorf("%d errors during loading", n)
-		} else if n == 1 {
-			err = fmt.Errorf("error during loading")
-		} else if len(initial) == 0 {
+		if len(initial) == 0 {
 			err = fmt.Errorf("%s matched no packages", strings.Join(patterns, " "))
+		} else {
+			err = loadingError(initial)
 		}
 	}
-
 	return initial, err
 }
 
+// loadingError checks for issues during the loading of initial
+// packages. Returns nil if there are no issues. Returns error
+// of type typeParseError if all errors, including those in
+// dependencies, are related to typing or parsing. Otherwise,
+// a plain error is returned with an appropriate message.
+func loadingError(initial []*packages.Package) error {
+	var err error
+	if n := packages.PrintErrors(initial); n > 1 {
+		err = fmt.Errorf("%d errors during loading", n)
+	} else if n == 1 {
+		err = errors.New("error during loading")
+	} else {
+		// no errors
+		return nil
+	}
+	all := true
+	packages.Visit(initial, nil, func(pkg *packages.Package) {
+		for _, err := range pkg.Errors {
+			typeOrParse := err.Kind == packages.TypeError || err.Kind == packages.ParseError
+			all = all && typeOrParse
+		}
+	})
+	if all {
+		return typeParseError{err}
+	}
+	return err
+}
+
 // TestAnalyzer applies an analysis to a set of packages (and their
 // dependencies if necessary) and returns the results.
 //
@@ -890,7 +927,7 @@
 func factType(fact analysis.Fact) reflect.Type {
 	t := reflect.TypeOf(fact)
 	if t.Kind() != reflect.Ptr {
-		log.Fatalf("invalid Fact type: got %T, want pointer", t)
+		log.Fatalf("invalid Fact type: got %T, want pointer", fact)
 	}
 	return t
 }
diff --git a/go/analysis/internal/checker/checker_test.go b/go/analysis/internal/checker/checker_test.go
index 50c51a1..eee211c 100644
--- a/go/analysis/internal/checker/checker_test.go
+++ b/go/analysis/internal/checker/checker_test.go
@@ -99,3 +99,62 @@
 
 	return nil, nil
 }
+
+func TestRunDespiteErrors(t *testing.T) {
+	testenv.NeedsGoPackages(t)
+
+	files := map[string]string{
+		"rderr/test.go": `package rderr
+
+// Foo deliberately has a type error
+func Foo(s string) int {
+	return s + 1
+}
+`}
+
+	testdata, cleanup, err := analysistest.WriteFiles(files)
+	if err != nil {
+		t.Fatal(err)
+	}
+	path := filepath.Join(testdata, "src/rderr/test.go")
+
+	// A no-op analyzer that should finish regardless of
+	// parse or type errors in the code.
+	noop := &analysis.Analyzer{
+		Name:     "noop",
+		Requires: []*analysis.Analyzer{inspect.Analyzer},
+		Run: func(pass *analysis.Pass) (interface{}, error) {
+			return nil, nil
+		},
+		RunDespiteErrors: true,
+	}
+
+	for _, test := range []struct {
+		name      string
+		pattern   []string
+		analyzers []*analysis.Analyzer
+		code      int
+	}{
+		// parse/type errors
+		{name: "skip-error", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{analyzer}, code: 1},
+		{name: "despite-error", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{noop}, code: 0},
+		// combination of parse/type errors and no errors
+		{name: "despite-error-and-no-error", pattern: []string{"file=" + path, "sort"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 1},
+		// non-existing package error
+		{name: "no-package", pattern: []string{"xyz"}, analyzers: []*analysis.Analyzer{analyzer}, code: 1},
+		{name: "no-package-despite-error", pattern: []string{"abc"}, analyzers: []*analysis.Analyzer{noop}, code: 1},
+		{name: "no-multi-package-despite-error", pattern: []string{"xyz", "abc"}, analyzers: []*analysis.Analyzer{noop}, code: 1},
+		// combination of type/parsing and different errors
+		{name: "different-errors", pattern: []string{"file=" + path, "xyz"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 1},
+		// non existing dir error
+		{name: "no-match-dir", pattern: []string{"file=non/existing/dir"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 1},
+		// no errors
+		{name: "no-errors", pattern: []string{"sort"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 0},
+	} {
+		if got := checker.Run(test.pattern, test.analyzers); got != test.code {
+			t.Errorf("got incorrect exit code %d for test %s; want %d", got, test.name, test.code)
+		}
+	}
+
+	defer cleanup()
+}
diff --git a/go/analysis/internal/facts/facts_test.go b/go/analysis/internal/facts/facts_test.go
index 971334e..13c3582 100644
--- a/go/analysis/internal/facts/facts_test.go
+++ b/go/analysis/internal/facts/facts_test.go
@@ -17,6 +17,7 @@
 	"golang.org/x/tools/go/analysis/internal/facts"
 	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/internal/testenv"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 type myFact struct {
@@ -26,23 +27,209 @@
 func (f *myFact) String() string { return fmt.Sprintf("myFact(%s)", f.S) }
 func (f *myFact) AFact()         {}
 
-func TestEncodeDecode(t *testing.T) {
+func init() {
 	gob.Register(new(myFact))
+}
 
-	// c -> b -> a, a2
-	// c does not directly depend on a, but it indirectly uses a.T.
-	//
-	// Package a2 is never loaded directly so it is incomplete.
-	//
-	// We use only types in this example because we rely on
-	// types.Eval to resolve the lookup expressions, and it only
-	// works for types. This is a definite gap in the typechecker API.
-	files := map[string]string{
-		"a/a.go":  `package a; type A int; type T int`,
-		"a2/a.go": `package a2; type A2 int; type Unneeded int`,
-		"b/b.go":  `package b; import ("a"; "a2"); type B chan a2.A2; type F func() a.T`,
-		"c/c.go":  `package c; import "b"; type C []b.B`,
+func TestEncodeDecode(t *testing.T) {
+	tests := []struct {
+		name       string
+		typeparams bool // requires typeparams to be enabled
+		files      map[string]string
+		plookups   []pkgLookups // see testEncodeDecode for details
+	}{
+		{
+			name: "loading-order",
+			// c -> b -> a, a2
+			// c does not directly depend on a, but it indirectly uses a.T.
+			//
+			// Package a2 is never loaded directly so it is incomplete.
+			//
+			// We use only types in this example because we rely on
+			// types.Eval to resolve the lookup expressions, and it only
+			// works for types. This is a definite gap in the typechecker API.
+			files: map[string]string{
+				"a/a.go":  `package a; type A int; type T int`,
+				"a2/a.go": `package a2; type A2 int; type Unneeded int`,
+				"b/b.go":  `package b; import ("a"; "a2"); type B chan a2.A2; type F func() a.T`,
+				"c/c.go":  `package c; import "b"; type C []b.B`,
+			},
+			// In the following table, we analyze packages (a, b, c) in order,
+			// look up various objects accessible within each package,
+			// and see if they have a fact.  The "analysis" exports a fact
+			// for every object at package level.
+			//
+			// Note: Loop iterations are not independent test cases;
+			// order matters, as we populate factmap.
+			plookups: []pkgLookups{
+				{"a", []lookup{
+					{"A", "myFact(a.A)"},
+				}},
+				{"b", []lookup{
+					{"a.A", "myFact(a.A)"},
+					{"a.T", "myFact(a.T)"},
+					{"B", "myFact(b.B)"},
+					{"F", "myFact(b.F)"},
+					{"F(nil)()", "myFact(a.T)"}, // (result type of b.F)
+				}},
+				{"c", []lookup{
+					{"b.B", "myFact(b.B)"},
+					{"b.F", "myFact(b.F)"},
+					//{"b.F(nil)()", "myFact(a.T)"}, // no fact; TODO(adonovan): investigate
+					{"C", "myFact(c.C)"},
+					{"C{}[0]", "myFact(b.B)"},
+					{"<-(C{}[0])", "no fact"}, // object but no fact (we never "analyze" a2)
+				}},
+			},
+		},
+		{
+			name: "globals",
+			files: map[string]string{
+				"a/a.go": `package a;
+				type T1 int
+				type T2 int
+				type T3 int
+				type T4 int
+				type T5 int
+				type K int; type V string
+				`,
+				"b/b.go": `package b
+				import "a"
+				var (
+					G1 []a.T1
+					G2 [7]a.T2
+					G3 chan a.T3
+					G4 *a.T4
+					G5 struct{ F a.T5 }
+					G6 map[a.K]a.V
+				)
+				`,
+				"c/c.go": `package c; import "b";
+				var (
+					v1 = b.G1
+					v2 = b.G2
+					v3 = b.G3
+					v4 = b.G4
+					v5 = b.G5
+					v6 = b.G6
+				)
+				`,
+			},
+			plookups: []pkgLookups{
+				{"a", []lookup{}},
+				{"b", []lookup{}},
+				{"c", []lookup{
+					{"v1[0]", "myFact(a.T1)"},
+					{"v2[0]", "myFact(a.T2)"},
+					{"<-v3", "myFact(a.T3)"},
+					{"*v4", "myFact(a.T4)"},
+					{"v5.F", "myFact(a.T5)"},
+					{"v6[0]", "myFact(a.V)"},
+				}},
+			},
+		},
+		{
+			name:       "typeparams",
+			typeparams: true,
+			files: map[string]string{
+				"a/a.go": `package a
+				  type T1 int
+				  type T2 int
+				  type T3 interface{Foo()}
+				  type T4 int
+				  type T5 int
+				  type T6 interface{Foo()}
+				`,
+				"b/b.go": `package b
+				  import "a"
+				  type N1[T a.T1|int8] func() T
+				  type N2[T any] struct{ F T }
+				  type N3[T a.T3] func() T
+				  type N4[T a.T4|int8] func() T
+				  type N5[T interface{Bar() a.T5} ] func() T
+		
+				  type t5 struct{}; func (t5) Bar() a.T5
+		
+				  var G1 N1[a.T1]
+				  var G2 func() N2[a.T2]
+				  var G3 N3[a.T3]
+				  var G4 N4[a.T4]
+				  var G5 N5[t5]
+		
+				  func F6[T a.T6]() T { var x T; return x }
+				  `,
+				"c/c.go": `package c; import "b";
+				  var (
+					  v1 = b.G1
+					  v2 = b.G2
+					  v3 = b.G3
+					  v4 = b.G4
+					  v5 = b.G5
+					  v6 = b.F6[t6]
+				  )
+		
+				  type t6 struct{}; func (t6) Foo() {}
+				`,
+			},
+			plookups: []pkgLookups{
+				{"a", []lookup{}},
+				{"b", []lookup{}},
+				{"c", []lookup{
+					{"v1", "myFact(b.N1)"},
+					{"v1()", "myFact(a.T1)"},
+					{"v2()", "myFact(b.N2)"},
+					{"v2().F", "myFact(a.T2)"},
+					{"v3", "myFact(b.N3)"},
+					{"v4", "myFact(b.N4)"},
+					{"v4()", "myFact(a.T4)"},
+					{"v5", "myFact(b.N5)"},
+					{"v5()", "myFact(b.t5)"},
+					{"v6()", "myFact(c.t6)"},
+				}},
+			},
+		},
 	}
+
+	for i := range tests {
+		test := tests[i]
+		t.Run(test.name, func(t *testing.T) {
+			t.Parallel()
+			if test.typeparams && !typeparams.Enabled {
+				t.Skip("type parameters are not enabled")
+			}
+			testEncodeDecode(t, test.files, test.plookups)
+		})
+	}
+}
+
+type lookup struct {
+	objexpr string
+	want    string
+}
+
+type pkgLookups struct {
+	path    string
+	lookups []lookup
+}
+
+// testEncodeDecode tests fact encoding and decoding and simulates how package facts
+// are passed during analysis. It operates on a group of Go file contents. Then
+// for each <package, []lookup> in tests it does the following:
+//  1) loads and type checks the package,
+//  2) calls facts.Decode to loads the facts exported by its imports,
+//  3) exports a myFact Fact for all of package level objects,
+//  4) For each lookup for the current package:
+//  4.a) lookup the types.Object for an Go source expression in the curent package
+//       (or confirms one is not expected want=="no object"),
+//  4.b) finds a Fact for the object (or confirms one is not expected want=="no fact"),
+//  4.c) compares the content of the Fact to want.
+//  5) encodes the Facts of the package.
+//
+// Note: tests are not independent test cases; order matters (as does a package being
+// skipped). It changes what Facts can be imported.
+//
+// Failures are reported on t.
+func testEncodeDecode(t *testing.T, files map[string]string, tests []pkgLookups) {
 	dir, cleanup, err := analysistest.WriteFiles(files)
 	if err != nil {
 		t.Fatal(err)
@@ -54,40 +241,13 @@
 	factmap := make(map[string][]byte)
 	read := func(path string) ([]byte, error) { return factmap[path], nil }
 
-	// In the following table, we analyze packages (a, b, c) in order,
-	// look up various objects accessible within each package,
-	// and see if they have a fact.  The "analysis" exports a fact
-	// for every object at package level.
+	// Analyze packages in order, look up various objects accessible within
+	// each package, and see if they have a fact.  The "analysis" exports a
+	// fact for every object at package level.
 	//
 	// Note: Loop iterations are not independent test cases;
 	// order matters, as we populate factmap.
-	type lookups []struct {
-		objexpr string
-		want    string
-	}
-	for _, test := range []struct {
-		path    string
-		lookups lookups
-	}{
-		{"a", lookups{
-			{"A", "myFact(a.A)"},
-		}},
-		{"b", lookups{
-			{"a.A", "myFact(a.A)"},
-			{"a.T", "myFact(a.T)"},
-			{"B", "myFact(b.B)"},
-			{"F", "myFact(b.F)"},
-			{"F(nil)()", "myFact(a.T)"}, // (result type of b.F)
-		}},
-		{"c", lookups{
-			{"b.B", "myFact(b.B)"},
-			{"b.F", "myFact(b.F)"},
-			//{"b.F(nil)()", "myFact(a.T)"}, // no fact; TODO(adonovan): investigate
-			{"C", "myFact(c.C)"},
-			{"C{}[0]", "myFact(b.B)"},
-			{"<-(C{}[0])", "no fact"}, // object but no fact (we never "analyze" a2)
-		}},
-	} {
+	for _, test := range tests {
 		// load package
 		pkg, err := load(t, dir, test.path)
 		if err != nil {
@@ -99,18 +259,16 @@
 		if err != nil {
 			t.Fatalf("Decode failed: %v", err)
 		}
-		if true {
-			t.Logf("decode %s facts = %v", pkg.Path(), facts) // show all facts
-		}
+		t.Logf("decode %s facts = %v", pkg.Path(), facts) // show all facts
 
 		// export
 		// (one fact for each package-level object)
-		scope := pkg.Scope()
-		for _, name := range scope.Names() {
-			obj := scope.Lookup(name)
+		for _, name := range pkg.Scope().Names() {
+			obj := pkg.Scope().Lookup(name)
 			fact := &myFact{obj.Pkg().Name() + "." + obj.Name()}
 			facts.ExportObjectFact(obj, fact)
 		}
+		t.Logf("exported %s facts = %v", pkg.Path(), facts) // show all facts
 
 		// import
 		// (after export, because an analyzer may import its own facts)
diff --git a/go/analysis/internal/facts/imports.go b/go/analysis/internal/facts/imports.go
index 34740f4..ade0cc6 100644
--- a/go/analysis/internal/facts/imports.go
+++ b/go/analysis/internal/facts/imports.go
@@ -4,7 +4,11 @@
 
 package facts
 
-import "go/types"
+import (
+	"go/types"
+
+	"golang.org/x/tools/internal/typeparams"
+)
 
 // importMap computes the import map for a package by traversing the
 // entire exported API each of its imports.
@@ -42,9 +46,20 @@
 			// nop
 		case *types.Named:
 			if addObj(T.Obj()) {
+				// TODO(taking): Investigate why the Underlying type is not added here.
 				for i := 0; i < T.NumMethods(); i++ {
 					addObj(T.Method(i))
 				}
+				if tparams := typeparams.ForNamed(T); tparams != nil {
+					for i := 0; i < tparams.Len(); i++ {
+						addType(tparams.At(i))
+					}
+				}
+				if targs := typeparams.NamedTypeArgs(T); targs != nil {
+					for i := 0; i < targs.Len(); i++ {
+						addType(targs.At(i))
+					}
+				}
 			}
 		case *types.Pointer:
 			addType(T.Elem())
@@ -60,6 +75,11 @@
 		case *types.Signature:
 			addType(T.Params())
 			addType(T.Results())
+			if tparams := typeparams.ForSignature(T); tparams != nil {
+				for i := 0; i < tparams.Len(); i++ {
+					addType(tparams.At(i))
+				}
+			}
 		case *types.Struct:
 			for i := 0; i < T.NumFields(); i++ {
 				addObj(T.Field(i))
@@ -72,6 +92,17 @@
 			for i := 0; i < T.NumMethods(); i++ {
 				addObj(T.Method(i))
 			}
+			for i := 0; i < T.NumEmbeddeds(); i++ {
+				addType(T.EmbeddedType(i)) // walk Embedded for implicits
+			}
+		case *typeparams.Union:
+			for i := 0; i < T.Len(); i++ {
+				addType(T.Term(i).Type())
+			}
+		case *typeparams.TypeParam:
+			if addObj(T.Obj()) {
+				addType(T.Constraint())
+			}
 		}
 	}
 
diff --git a/go/analysis/passes/asmdecl/asmdecl.go b/go/analysis/passes/asmdecl/asmdecl.go
index 7b82d0b..b05ed5c 100644
--- a/go/analysis/passes/asmdecl/asmdecl.go
+++ b/go/analysis/passes/asmdecl/asmdecl.go
@@ -90,8 +90,8 @@
 	asmArchMipsLE   = asmArch{name: "mipsle", bigEndian: false, stack: "R29", lr: true}
 	asmArchMips64   = asmArch{name: "mips64", bigEndian: true, stack: "R29", lr: true}
 	asmArchMips64LE = asmArch{name: "mips64le", bigEndian: false, stack: "R29", lr: true}
-	asmArchPpc64    = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true}
-	asmArchPpc64LE  = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true}
+	asmArchPpc64    = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}}
+	asmArchPpc64LE  = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}}
 	asmArchRISCV64  = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true}
 	asmArchS390X    = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true}
 	asmArchWasm     = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false}
diff --git a/go/analysis/passes/asmdecl/testdata/src/a/asm7.s b/go/analysis/passes/asmdecl/testdata/src/a/asm7.s
index ef22ff8..51b5a84 100644
--- a/go/analysis/passes/asmdecl/testdata/src/a/asm7.s
+++ b/go/analysis/passes/asmdecl/testdata/src/a/asm7.s
@@ -190,3 +190,11 @@
 
 TEXT ·returnintmissing(SB),0,$0-8
 	RET // want `RET without writing to 8-byte ret\+0\(FP\)`
+
+// writing to result in ABIInternal function
+TEXT ·returnABIInternal<ABIInternal>(SB), NOSPLIT, $8
+	MOVD	$123, R3
+	RET
+TEXT ·returnmissingABIInternal<ABIInternal>(SB), NOSPLIT, $8
+	MOVD	$123, R10
+	RET // want `RET without writing to result register`
diff --git a/go/analysis/passes/assign/assign_test.go b/go/analysis/passes/assign/assign_test.go
index f793e08..146385f 100644
--- a/go/analysis/passes/assign/assign_test.go
+++ b/go/analysis/passes/assign/assign_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/assign"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.RunWithSuggestedFixes(t, testdata, assign.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.RunWithSuggestedFixes(t, testdata, assign.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go b/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..345db27
--- /dev/null
+++ b/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,33 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the useless-assignment checker.
+
+//go:build go1.18
+
+package testdata
+
+import "math/rand"
+
+type ST[T interface{ ~int }] struct {
+	x T
+	l []T
+}
+
+func (s *ST[T]) SetX(x T, ch chan T) {
+	// Accidental self-assignment; it should be "s.x = x"
+	x = x // want "self-assignment of x to x"
+	// Another mistake
+	s.x = s.x // want "self-assignment of s.x to s.x"
+
+	s.l[0] = s.l[0] // want "self-assignment of s.l.0. to s.l.0."
+
+	// Bail on any potential side effects to avoid false positives
+	s.l[num()] = s.l[num()]
+	rng := rand.New(rand.NewSource(0))
+	s.l[rng.Intn(len(s.l))] = s.l[rng.Intn(len(s.l))]
+	s.l[<-ch] = s.l[<-ch]
+}
+
+func num() int { return 2 }
diff --git a/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go.golden b/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go.golden
new file mode 100644
index 0000000..d9384ed
--- /dev/null
+++ b/go/analysis/passes/assign/testdata/src/typeparams/typeparams.go.golden
@@ -0,0 +1,33 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the useless-assignment checker.
+
+//go:build go1.18
+
+package testdata
+
+import "math/rand"
+
+type ST[T interface{ ~int }] struct {
+	x T
+	l []T
+}
+
+func (s *ST[T]) SetX(x T, ch chan T) {
+	// Accidental self-assignment; it should be "s.x = x"
+	// want "self-assignment of x to x"
+	// Another mistake
+	// want "self-assignment of s.x to s.x"
+
+	// want "self-assignment of s.l.0. to s.l.0."
+
+	// Bail on any potential side effects to avoid false positives
+	s.l[num()] = s.l[num()]
+	rng := rand.New(rand.NewSource(0))
+	s.l[rng.Intn(len(s.l))] = s.l[rng.Intn(len(s.l))]
+	s.l[<-ch] = s.l[<-ch]
+}
+
+func num() int { return 2 }
diff --git a/go/analysis/passes/atomic/atomic_test.go b/go/analysis/passes/atomic/atomic_test.go
index f5f60a3..c17064c 100644
--- a/go/analysis/passes/atomic/atomic_test.go
+++ b/go/analysis/passes/atomic/atomic_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/atomic"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, atomic.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, atomic.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/atomic/testdata/src/typeparams/typeparams.go b/go/analysis/passes/atomic/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..52cf468
--- /dev/null
+++ b/go/analysis/passes/atomic/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,37 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the atomic checker.
+
+package a
+
+import (
+	"sync/atomic"
+)
+
+type Subtractable interface {
+	~int64
+}
+
+func Sub[T Subtractable](addr *T, delta T) T {
+	// the followings result in type errors, but doesn't stop this vet check
+	*addr = atomic.AddInt64(addr, -delta)  // want "direct assignment to atomic value"
+	*addr = atomic.AddUintptr(addr, delta) // want "direct assignment to atomic value"
+	atomic.AddInt64()  // vet ignores it
+	return *addr
+}
+
+type _S[T Subtractable] struct {
+	x *T
+}
+
+func (v _S) AddInt64(_ *int64, delta int64) int64 {
+	*v.x = atomic.AddInt64(v.x, delta)  // want "direct assignment to atomic value"
+	return *v.x
+}
+
+func NonAtomicInt64() {
+	var atomic _S[int64]
+	*atomic.x = atomic.AddInt64(atomic.x, 123)  // ok; AddInt64 is not sync/atomic.AddInt64.
+}
\ No newline at end of file
diff --git a/go/analysis/passes/bools/bools_test.go b/go/analysis/passes/bools/bools_test.go
index 5732470..ea51437 100644
--- a/go/analysis/passes/bools/bools_test.go
+++ b/go/analysis/passes/bools/bools_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/bools"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, bools.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, bools.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/bools/testdata/src/typeparams/typeparams.go b/go/analysis/passes/bools/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..7184625
--- /dev/null
+++ b/go/analysis/passes/bools/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,63 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the bool checker.
+
+//go:build go1.18
+
+package typeparams
+
+type T[P interface{ ~int }] struct {
+	a P
+}
+
+func (t T[P]) Foo() int { return int(t.a) }
+
+type FT[P any] func() P
+
+func Sink[Elem any]() chan Elem {
+	return make(chan Elem)
+}
+
+func RedundantConditions[P interface{ int }]() {
+	type _f[P1 any] func() P1
+
+	var f, g _f[P]
+	if f() == 0 || f() == 0 { // OK f might have side effects
+	}
+	var t T[P]
+	_ = t.Foo() == 2 || t.Foo() == 2        // OK Foo might have side effects
+	if v, w := f(), g(); v == w || v == w { // want `redundant or: v == w \|\| v == w`
+	}
+
+	// error messages present type params correctly.
+	_ = t == T[P]{2} || t == T[P]{2}                 // want `redundant or: t == T\[P\]\{2\} \|\| t == T\[P\]\{2\}`
+	_ = FT[P](f) == nil || FT[P](f) == nil           // want `redundant or: FT\[P\]\(f\) == nil \|\| FT\[P\]\(f\) == nil`
+	_ = (func() P)(f) == nil || (func() P)(f) == nil // want `redundant or: \(func\(\) P\)\(f\) == nil \|\| \(func\(\) P\)\(f\) == nil`
+
+	var tint T[int]
+	var fint _f[int]
+	_ = tint == T[int]{2} || tint == T[int]{2}                 // want `redundant or: tint == T\[int\]\{2\} \|\| tint\ == T\[int\]\{2\}`
+	_ = FT[int](fint) == nil || FT[int](fint) == nil           // want `redundant or: FT\[int\]\(fint\) == nil \|\| FT\[int\]\(fint\) == nil`
+	_ = (func() int)(fint) == nil || (func() int)(fint) == nil // want `redundant or: \(func\(\) int\)\(fint\) == nil \|\| \(func\(\) int\)\(fint\) == nil`
+
+	c := Sink[P]()
+	_ = 0 == <-c || 0 == <-c                                  // OK subsequent receives may yield different values
+	for i, j := <-c, <-c; i == j || i == j; i, j = <-c, <-c { // want `redundant or: i == j \|\| i == j`
+	}
+
+	var i, j P
+	_ = i == 1 || j+1 == i || i == 1 // want `redundant or: i == 1 \|\| i == 1`
+	_ = i == 1 || f() == 1 || i == 1 // OK f may alter i as a side effect
+	_ = f() == 1 || i == 1 || i == 1 // want `redundant or: i == 1 \|\| i == 1`
+}
+
+func SuspectConditions[P interface{ ~int }, S interface{ ~string }]() {
+	var i, j P
+	_ = i == 0 || i == 1                 // OK
+	_ = i+3 != 7 || j+5 == 0 || i+3 != 9 // want `suspect or: i\+3 != 7 \|\| i\+3 != 9`
+
+	var s S
+	_ = s != "one" || s != "the other" // want `suspect or: s != .one. \|\| s != .the other.`
+}
diff --git a/go/analysis/passes/cgocall/cgocall_test.go b/go/analysis/passes/cgocall/cgocall_test.go
index ba65426..45ca1da 100644
--- a/go/analysis/passes/cgocall/cgocall_test.go
+++ b/go/analysis/passes/cgocall/cgocall_test.go
@@ -9,9 +9,15 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/cgocall"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, cgocall.Analyzer, "a", "b", "c")
+	tests := []string{"a", "b", "c"}
+	if typeparams.Enabled {
+		// and testdata/src/typeparams/typeparams.go when possible
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, cgocall.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/cgocall/testdata/src/typeparams/typeparams.go b/go/analysis/passes/cgocall/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..37e639a
--- /dev/null
+++ b/go/analysis/passes/cgocall/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,37 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the cgo checker.
+
+package a
+
+// void f(void *ptr) {}
+import "C"
+
+import "unsafe"
+
+func CgoTest[T any]() {
+	var c chan bool
+	C.f(*(*unsafe.Pointer)(unsafe.Pointer(&c))) // want "embedded pointer"
+	C.f(unsafe.Pointer(&c))                     // want "embedded pointer"
+
+	var schan S[chan bool]
+	C.f(*(*unsafe.Pointer)(unsafe.Pointer(&schan))) // want "embedded pointer"
+	C.f(unsafe.Pointer(&schan))                     // want "embedded pointer"
+
+	var x T
+	C.f(*(*unsafe.Pointer)(unsafe.Pointer(&x))) // no findings as T is not known compile-time
+	C.f(unsafe.Pointer(&x))
+
+	// instantiating CgoTest should not yield any warnings
+	CgoTest[chan bool]()
+
+	var sint S[int]
+	C.f(*(*unsafe.Pointer)(unsafe.Pointer(&sint)))
+	C.f(unsafe.Pointer(&sint))
+}
+
+type S[X any] struct {
+	val X
+}
diff --git a/go/analysis/passes/composite/composite.go b/go/analysis/passes/composite/composite.go
index 4c3ac66..d3670ac 100644
--- a/go/analysis/passes/composite/composite.go
+++ b/go/analysis/passes/composite/composite.go
@@ -14,6 +14,7 @@
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/analysis/passes/inspect"
 	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 const Doc = `check for unkeyed composite literals
@@ -67,41 +68,61 @@
 			// skip whitelisted types
 			return
 		}
-		under := typ.Underlying()
-		for {
-			ptr, ok := under.(*types.Pointer)
-			if !ok {
-				break
+		var structuralTypes []types.Type
+		switch typ := typ.(type) {
+		case *typeparams.TypeParam:
+			terms, err := typeparams.StructuralTerms(typ)
+			if err != nil {
+				return // invalid type
 			}
-			under = ptr.Elem().Underlying()
-		}
-		if _, ok := under.(*types.Struct); !ok {
-			// skip non-struct composite literals
-			return
-		}
-		if isLocalType(pass, typ) {
-			// allow unkeyed locally defined composite literal
-			return
-		}
-
-		// check if the CompositeLit contains an unkeyed field
-		allKeyValue := true
-		for _, e := range cl.Elts {
-			if _, ok := e.(*ast.KeyValueExpr); !ok {
-				allKeyValue = false
-				break
+			for _, term := range terms {
+				structuralTypes = append(structuralTypes, term.Type())
 			}
+		default:
+			structuralTypes = append(structuralTypes, typ)
 		}
-		if allKeyValue {
-			// all the composite literal fields are keyed
+		for _, typ := range structuralTypes {
+			under := deref(typ.Underlying())
+			if _, ok := under.(*types.Struct); !ok {
+				// skip non-struct composite literals
+				continue
+			}
+			if isLocalType(pass, typ) {
+				// allow unkeyed locally defined composite literal
+				continue
+			}
+
+			// check if the CompositeLit contains an unkeyed field
+			allKeyValue := true
+			for _, e := range cl.Elts {
+				if _, ok := e.(*ast.KeyValueExpr); !ok {
+					allKeyValue = false
+					break
+				}
+			}
+			if allKeyValue {
+				// all the composite literal fields are keyed
+				continue
+			}
+
+			pass.ReportRangef(cl, "%s composite literal uses unkeyed fields", typeName)
 			return
 		}
-
-		pass.ReportRangef(cl, "%s composite literal uses unkeyed fields", typeName)
 	})
 	return nil, nil
 }
 
+func deref(typ types.Type) types.Type {
+	for {
+		ptr, ok := typ.(*types.Pointer)
+		if !ok {
+			break
+		}
+		typ = ptr.Elem().Underlying()
+	}
+	return typ
+}
+
 func isLocalType(pass *analysis.Pass, typ types.Type) bool {
 	switch x := typ.(type) {
 	case *types.Struct:
@@ -112,6 +133,8 @@
 	case *types.Named:
 		// names in package foo are local to foo_test too
 		return strings.TrimSuffix(x.Obj().Pkg().Path(), "_test") == strings.TrimSuffix(pass.Pkg.Path(), "_test")
+	case *typeparams.TypeParam:
+		return strings.TrimSuffix(x.Obj().Pkg().Path(), "_test") == strings.TrimSuffix(pass.Pkg.Path(), "_test")
 	}
 	return false
 }
diff --git a/go/analysis/passes/composite/composite_test.go b/go/analysis/passes/composite/composite_test.go
index c55015c..952de8b 100644
--- a/go/analysis/passes/composite/composite_test.go
+++ b/go/analysis/passes/composite/composite_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/composite"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, composite.Analyzer, "a")
+	pkgs := []string{"a"}
+	if typeparams.Enabled {
+		pkgs = append(pkgs, "typeparams")
+	}
+	analysistest.Run(t, testdata, composite.Analyzer, pkgs...)
 }
diff --git a/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go b/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go
new file mode 100644
index 0000000..20b652e
--- /dev/null
+++ b/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go
@@ -0,0 +1,16 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package a
+
+import "testing"
+
+var fuzzTargets = []testing.InternalFuzzTarget{
+	{"Fuzz", Fuzz},
+}
+
+func Fuzz(f *testing.F) {}
diff --git a/go/analysis/passes/composite/testdata/src/typeparams/lib/lib.go b/go/analysis/passes/composite/testdata/src/typeparams/lib/lib.go
new file mode 100644
index 0000000..9d7710d
--- /dev/null
+++ b/go/analysis/passes/composite/testdata/src/typeparams/lib/lib.go
@@ -0,0 +1,9 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package lib
+
+type Struct struct{ F int }
+type Slice []int
+type Map map[int]int
diff --git a/go/analysis/passes/composite/testdata/src/typeparams/typeparams.go b/go/analysis/passes/composite/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..dd5d57e
--- /dev/null
+++ b/go/analysis/passes/composite/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,27 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import "typeparams/lib"
+
+type localStruct struct { F int }
+
+func F[
+	T1 ~struct{ f int },
+	T2a localStruct,
+	T2b lib.Struct,
+	T3 ~[]int,
+	T4 lib.Slice,
+	T5 ~map[int]int,
+	T6 lib.Map,
+]() {
+	_ = T1{2}
+	_ = T2a{2}
+	_ = T2b{2} // want "unkeyed fields"
+	_ = T3{1,2}
+	_ = T4{1,2}
+	_ = T5{1:2}
+	_ = T6{1:2}
+}
diff --git a/go/analysis/passes/composite/whitelist.go b/go/analysis/passes/composite/whitelist.go
index 1e5f5fd..f84c187 100644
--- a/go/analysis/passes/composite/whitelist.go
+++ b/go/analysis/passes/composite/whitelist.go
@@ -26,9 +26,10 @@
 	"unicode.Range16": true,
 	"unicode.Range32": true,
 
-	// These three structs are used in generated test main files,
+	// These four structs are used in generated test main files,
 	// but the generator can be trusted.
-	"testing.InternalBenchmark": true,
-	"testing.InternalExample":   true,
-	"testing.InternalTest":      true,
+	"testing.InternalBenchmark":  true,
+	"testing.InternalExample":    true,
+	"testing.InternalTest":       true,
+	"testing.InternalFuzzTarget": true,
 }
diff --git a/go/analysis/passes/copylock/copylock.go b/go/analysis/passes/copylock/copylock.go
index c4ebf78..350dc4e 100644
--- a/go/analysis/passes/copylock/copylock.go
+++ b/go/analysis/passes/copylock/copylock.go
@@ -17,6 +17,7 @@
 	"golang.org/x/tools/go/analysis/passes/inspect"
 	"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
 	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 const Doc = `check for locks erroneously passed by value
@@ -145,7 +146,7 @@
 func checkCopyLocksFunc(pass *analysis.Pass, name string, recv *ast.FieldList, typ *ast.FuncType) {
 	if recv != nil && len(recv.List) > 0 {
 		expr := recv.List[0].Type
-		if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil {
+		if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type, nil); path != nil {
 			pass.ReportRangef(expr, "%s passes lock by value: %v", name, path)
 		}
 	}
@@ -153,7 +154,7 @@
 	if typ.Params != nil {
 		for _, field := range typ.Params.List {
 			expr := field.Type
-			if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil {
+			if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type, nil); path != nil {
 				pass.ReportRangef(expr, "%s passes lock by value: %v", name, path)
 			}
 		}
@@ -199,12 +200,12 @@
 	if typ == nil {
 		return
 	}
-	if path := lockPath(pass.Pkg, typ); path != nil {
+	if path := lockPath(pass.Pkg, typ, nil); path != nil {
 		pass.Reportf(e.Pos(), "range var %s copies lock: %v", analysisutil.Format(pass.Fset, e), path)
 	}
 }
 
-type typePath []types.Type
+type typePath []string
 
 // String pretty-prints a typePath.
 func (path typePath) String() string {
@@ -215,7 +216,7 @@
 			fmt.Fprint(&buf, " contains ")
 		}
 		// The human-readable path is in reverse order, outermost to innermost.
-		fmt.Fprint(&buf, path[n-i-1].String())
+		fmt.Fprint(&buf, path[n-i-1])
 	}
 	return buf.String()
 }
@@ -234,16 +235,57 @@
 			return nil
 		}
 	}
-	return lockPath(pass.Pkg, pass.TypesInfo.Types[x].Type)
+	return lockPath(pass.Pkg, pass.TypesInfo.Types[x].Type, nil)
 }
 
 // lockPath returns a typePath describing the location of a lock value
 // contained in typ. If there is no contained lock, it returns nil.
-func lockPath(tpkg *types.Package, typ types.Type) typePath {
+//
+// The seenTParams map is used to short-circuit infinite recursion via type
+// parameters.
+func lockPath(tpkg *types.Package, typ types.Type, seenTParams map[*typeparams.TypeParam]bool) typePath {
 	if typ == nil {
 		return nil
 	}
 
+	if tpar, ok := typ.(*typeparams.TypeParam); ok {
+		if seenTParams == nil {
+			// Lazily allocate seenTParams, since the common case will not involve
+			// any type parameters.
+			seenTParams = make(map[*typeparams.TypeParam]bool)
+		}
+		if seenTParams[tpar] {
+			return nil
+		}
+		seenTParams[tpar] = true
+		terms, err := typeparams.StructuralTerms(tpar)
+		if err != nil {
+			return nil // invalid type
+		}
+		for _, term := range terms {
+			subpath := lockPath(tpkg, term.Type(), seenTParams)
+			if len(subpath) > 0 {
+				if term.Tilde() {
+					// Prepend a tilde to our lock path entry to clarify the resulting
+					// diagnostic message. Consider the following example:
+					//
+					//  func _[Mutex interface{ ~sync.Mutex; M() }](m Mutex) {}
+					//
+					// Here the naive error message will be something like "passes lock
+					// by value: Mutex contains sync.Mutex". This is misleading because
+					// the local type parameter doesn't actually contain sync.Mutex,
+					// which lacks the M method.
+					//
+					// With tilde, it is clearer that the containment is via an
+					// approximation element.
+					subpath[len(subpath)-1] = "~" + subpath[len(subpath)-1]
+				}
+				return append(subpath, typ.String())
+			}
+		}
+		return nil
+	}
+
 	for {
 		atyp, ok := typ.Underlying().(*types.Array)
 		if !ok {
@@ -252,6 +294,17 @@
 		typ = atyp.Elem()
 	}
 
+	ttyp, ok := typ.Underlying().(*types.Tuple)
+	if ok {
+		for i := 0; i < ttyp.Len(); i++ {
+			subpath := lockPath(tpkg, ttyp.At(i).Type(), seenTParams)
+			if subpath != nil {
+				return append(subpath, typ.String())
+			}
+		}
+		return nil
+	}
+
 	// We're only interested in the case in which the underlying
 	// type is a struct. (Interfaces and pointers are safe to copy.)
 	styp, ok := typ.Underlying().(*types.Struct)
@@ -263,7 +316,7 @@
 	// is a sync.Locker, but a value is not. This differentiates
 	// embedded interfaces from embedded values.
 	if types.Implements(types.NewPointer(typ), lockerType) && !types.Implements(typ, lockerType) {
-		return []types.Type{typ}
+		return []string{typ.String()}
 	}
 
 	// In go1.10, sync.noCopy did not implement Locker.
@@ -272,15 +325,15 @@
 	if named, ok := typ.(*types.Named); ok &&
 		named.Obj().Name() == "noCopy" &&
 		named.Obj().Pkg().Path() == "sync" {
-		return []types.Type{typ}
+		return []string{typ.String()}
 	}
 
 	nfields := styp.NumFields()
 	for i := 0; i < nfields; i++ {
 		ftyp := styp.Field(i).Type()
-		subpath := lockPath(tpkg, ftyp)
+		subpath := lockPath(tpkg, ftyp, seenTParams)
 		if subpath != nil {
-			return append(subpath, typ)
+			return append(subpath, typ.String())
 		}
 	}
 
diff --git a/go/analysis/passes/copylock/copylock_test.go b/go/analysis/passes/copylock/copylock_test.go
index d33d0a2..869955b 100644
--- a/go/analysis/passes/copylock/copylock_test.go
+++ b/go/analysis/passes/copylock/copylock_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/copylock"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, copylock.Analyzer, "a")
+	pkgs := []string{"a"}
+	if typeparams.Enabled {
+		pkgs = append(pkgs, "typeparams")
+	}
+	analysistest.Run(t, testdata, copylock.Analyzer, pkgs...)
 }
diff --git a/go/analysis/passes/copylock/testdata/src/a/copylock.go b/go/analysis/passes/copylock/testdata/src/a/copylock.go
index 57d4076..e528280 100644
--- a/go/analysis/passes/copylock/testdata/src/a/copylock.go
+++ b/go/analysis/passes/copylock/testdata/src/a/copylock.go
@@ -89,6 +89,14 @@
 	fmuB := fmuA        // OK
 	fmuA = fmuB         // OK
 	fmuSlice := fmuA[:] // OK
+
+	// map access by single and tuple copies prohibited
+	type mut struct{ mu sync.Mutex }
+	muM := map[string]mut{
+		"a": mut{},
+	}
+	mumA := muM["a"]    // want "assignment copies lock value to mumA: a.mut contains sync.Mutex"
+	mumB, _ := muM["a"] // want "assignment copies lock value to mumB: \\(a.mut, bool\\) contains a.mut contains sync.Mutex"
 }
 
 func LenAndCapOnLockArrays() {
diff --git a/go/analysis/passes/copylock/testdata/src/typeparams/typeparams.go b/go/analysis/passes/copylock/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..3b2191e
--- /dev/null
+++ b/go/analysis/passes/copylock/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,56 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import "sync"
+
+// The copylock analyzer runs despite errors. The following invalid type should
+// not cause an infinite recursion.
+type R struct{ r R }
+
+func TestNoRecursion(r R) {}
+
+// The following recursive type parameter definitions should not cause an
+// infinite recursion.
+func TestNoTypeParamRecursion[T1 ~[]T2, T2 ~[]T1 | string, T3 ~struct{ F T3 }](t1 T1, t2 T2, t3 T3) {
+}
+
+func OkFunc1[Struct ~*struct{ mu sync.Mutex }](s Struct) {
+}
+
+func BadFunc1[Struct ~struct{ mu sync.Mutex }](s Struct) { // want `passes lock by value: .*Struct contains ~struct{mu sync.Mutex}`
+}
+
+func OkFunc2[MutexPtr *sync.Mutex](m MutexPtr) {
+	var x *MutexPtr
+	p := x
+	var y MutexPtr
+	p = &y
+	*p = *x
+
+	var mus []MutexPtr
+
+	for _, _ = range mus {
+	}
+}
+
+func BadFunc2[Mutex sync.Mutex](m Mutex) { // want `passes lock by value: .*Mutex contains sync.Mutex`
+	var x *Mutex
+	p := x
+	var y Mutex
+	p = &y
+	*p = *x // want `assignment copies lock value to \*p: .*Mutex contains sync.Mutex`
+
+	var mus []Mutex
+
+	for _, _ = range mus {
+	}
+}
+
+func ApproximationError[Mutex interface {
+	~sync.Mutex
+	M()
+}](m Mutex) { // want `passes lock by value: .*Mutex contains ~sync.Mutex`
+}
diff --git a/go/analysis/passes/ctrlflow/ctrlflow.go b/go/analysis/passes/ctrlflow/ctrlflow.go
index 51600ff..73746d6 100644
--- a/go/analysis/passes/ctrlflow/ctrlflow.go
+++ b/go/analysis/passes/ctrlflow/ctrlflow.go
@@ -187,7 +187,11 @@
 		return false // panic never returns
 	}
 
-	// Is this a static call?
+	// Is this a static call? Also includes static functions
+	// parameterized by a type. Such functions may or may not
+	// return depending on the parameter type, but in some
+	// cases the answer is definite. We let ctrlflow figure
+	// that out.
 	fn := typeutil.StaticCallee(c.pass.TypesInfo, call)
 	if fn == nil {
 		return true // callee not statically known; be conservative
diff --git a/go/analysis/passes/ctrlflow/ctrlflow_test.go b/go/analysis/passes/ctrlflow/ctrlflow_test.go
index 0aae7cb..1503c33 100644
--- a/go/analysis/passes/ctrlflow/ctrlflow_test.go
+++ b/go/analysis/passes/ctrlflow/ctrlflow_test.go
@@ -10,13 +10,19 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/ctrlflow"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
 
 	// load testdata/src/a/a.go
-	results := analysistest.Run(t, testdata, ctrlflow.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		// and testdata/src/typeparams/typeparams.go when possible
+		tests = append(tests, "typeparams")
+	}
+	results := analysistest.Run(t, testdata, ctrlflow.Analyzer, tests...)
 
 	// Perform a minimal smoke test on
 	// the result (CFG) computed by ctrlflow.
diff --git a/go/analysis/passes/ctrlflow/testdata/src/a/a.go b/go/analysis/passes/ctrlflow/testdata/src/a/a.go
index a65bd74..d2a7aec 100644
--- a/go/analysis/passes/ctrlflow/testdata/src/a/a.go
+++ b/go/analysis/passes/ctrlflow/testdata/src/a/a.go
@@ -1,3 +1,7 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package a
 
 // This file tests facts produced by ctrlflow.
diff --git a/go/analysis/passes/ctrlflow/testdata/src/lib/lib.go b/go/analysis/passes/ctrlflow/testdata/src/lib/lib.go
index c0bf7df..41afcc1 100644
--- a/go/analysis/passes/ctrlflow/testdata/src/lib/lib.go
+++ b/go/analysis/passes/ctrlflow/testdata/src/lib/lib.go
@@ -1,3 +1,7 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package lib
 
 func CanReturn() {}
diff --git a/go/analysis/passes/ctrlflow/testdata/src/typeparams/typeparams.go b/go/analysis/passes/ctrlflow/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..1226891
--- /dev/null
+++ b/go/analysis/passes/ctrlflow/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,64 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package a
+
+// This file tests facts produced by ctrlflow.
+
+var cond bool
+
+var funcs = []func(){func() {}}
+
+func a[A any]() { // want a:"noReturn"
+	if cond {
+		funcs[0]()
+		b[A]()
+	} else {
+		for {
+		}
+	}
+}
+
+func b[B any]() { // want b:"noReturn"
+	select {}
+}
+
+func c[A, B any]() { // want c:"noReturn"
+	if cond {
+		a[A]()
+	} else {
+		d[A, B]()
+	}
+}
+
+func d[A, B any]() { // want d:"noReturn"
+	b[B]()
+}
+
+type I[T any] interface {
+	Id(T) T
+}
+
+func e[T any](i I[T], t T) T {
+	return i.Id(t)
+}
+
+func k[T any](i I[T], t T) T { // want k:"noReturn"
+	b[T]()
+	return i.Id(t)
+}
+
+type T[X any] int
+
+func (T[X]) method1() { // want method1:"noReturn"
+	a[X]()
+}
+
+func (T[X]) method2() { // (may return)
+	if cond {
+		a[X]()
+	} else {
+		funcs[0]()
+	}
+}
diff --git a/go/analysis/passes/deepequalerrors/deepequalerrors_test.go b/go/analysis/passes/deepequalerrors/deepequalerrors_test.go
index 2d4faa3..0094173 100644
--- a/go/analysis/passes/deepequalerrors/deepequalerrors_test.go
+++ b/go/analysis/passes/deepequalerrors/deepequalerrors_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/deepequalerrors"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, deepequalerrors.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, deepequalerrors.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/deepequalerrors/testdata/src/typeparams/typeparams.go b/go/analysis/passes/deepequalerrors/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..ac16aa3
--- /dev/null
+++ b/go/analysis/passes/deepequalerrors/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,58 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the deepequalerrors checker.
+
+package a
+
+import (
+	"io"
+	"os"
+	"reflect"
+)
+
+type myError int
+
+func (myError) Error() string { return "" }
+
+func bad[T any]() T {
+	var t T
+	return t
+}
+
+type s1 struct {
+	s2 *s2[myError2]
+	i  int
+}
+
+type myError2 error
+
+type s2[T any] struct {
+	s1   *s1
+	errs []*T
+}
+
+func hasError() {
+	var e error
+	var m myError2
+	reflect.DeepEqual(bad[error](), e)    // want `avoid using reflect.DeepEqual with errors`
+	reflect.DeepEqual(io.EOF, io.EOF)     // want `avoid using reflect.DeepEqual with errors`
+	reflect.DeepEqual(e, &e)              // want `avoid using reflect.DeepEqual with errors`
+	reflect.DeepEqual(e, m)               // want `avoid using reflect.DeepEqual with errors`
+	reflect.DeepEqual(e, s1{})            // want `avoid using reflect.DeepEqual with errors`
+	reflect.DeepEqual(e, [1]error{})      // want `avoid using reflect.DeepEqual with errors`
+	reflect.DeepEqual(e, map[error]int{}) // want `avoid using reflect.DeepEqual with errors`
+	reflect.DeepEqual(e, map[int]error{}) // want `avoid using reflect.DeepEqual with errors`
+	// We catch the next not because *os.PathError implements error, but because it contains
+	// a field Err of type error.
+	reflect.DeepEqual(&os.PathError{}, io.EOF) // want `avoid using reflect.DeepEqual with errors`
+
+}
+
+func notHasError() {
+	reflect.ValueOf(4)                    // not reflect.DeepEqual
+	reflect.DeepEqual(3, 4)               // not errors
+	reflect.DeepEqual(5, io.EOF)          // only one error
+	reflect.DeepEqual(myError(1), io.EOF) // not types that implement error
+}
diff --git a/go/analysis/passes/errorsas/errorsas_test.go b/go/analysis/passes/errorsas/errorsas_test.go
index 5ef8668..7908e89 100644
--- a/go/analysis/passes/errorsas/errorsas_test.go
+++ b/go/analysis/passes/errorsas/errorsas_test.go
@@ -12,9 +12,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/errorsas"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, errorsas.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, errorsas.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/errorsas/testdata/src/typeparams/typeparams.go b/go/analysis/passes/errorsas/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..5b9ec45
--- /dev/null
+++ b/go/analysis/passes/errorsas/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,37 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the errorsas checker.
+
+package a
+
+import "errors"
+
+type myError[T any] struct{ t T }
+
+func (myError[T]) Error() string { return "" }
+
+type twice[T any] struct {
+	t T
+}
+
+func perr[T any]() *T { return nil }
+
+func two[T any]() (error, *T) { return nil, nil }
+
+func _[E error](e E) {
+	var (
+		m  myError[int]
+		tw twice[myError[int]]
+	)
+	errors.As(nil, &e)
+	errors.As(nil, &m)            // *T where T implemements error
+	errors.As(nil, &tw.t)         // *T where T implements error
+	errors.As(nil, perr[error]()) // *error, via a call
+
+	errors.As(nil, e)    // want `second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type`
+	errors.As(nil, m)    // want `second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type`
+	errors.As(nil, tw.t) // want `second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type`
+	errors.As(two[error]())
+}
diff --git a/go/analysis/passes/httpresponse/httpresponse_test.go b/go/analysis/passes/httpresponse/httpresponse_test.go
index dac3ed6..14e1667 100644
--- a/go/analysis/passes/httpresponse/httpresponse_test.go
+++ b/go/analysis/passes/httpresponse/httpresponse_test.go
@@ -5,13 +5,17 @@
 package httpresponse_test
 
 import (
-	"testing"
-
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/httpresponse"
+	"golang.org/x/tools/internal/typeparams"
+	"testing"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, httpresponse.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, httpresponse.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/httpresponse/testdata/src/typeparams/typeparams.go b/go/analysis/passes/httpresponse/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..65dd58c
--- /dev/null
+++ b/go/analysis/passes/httpresponse/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,52 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the httpresponse checker.
+
+//go:build go1.18
+
+package typeparams
+
+import (
+	"log"
+	"net/http"
+)
+
+func badHTTPGet[T any](url string) {
+	res, err := http.Get(url)
+	defer res.Body.Close() // want "using res before checking for errors"
+	if err != nil {
+		log.Fatal(err)
+	}
+}
+
+func mkClient[T any]() *T {
+	return nil
+}
+
+func badClientHTTPGet() {
+	client := mkClient[http.Client]()
+	res, _ := client.Get("")
+	defer res.Body.Close() // want "using res before checking for errors"
+}
+
+// User-defined type embedded "http.Client"
+type S[P any] struct {
+	http.Client
+}
+
+func unmatchedClientTypeName(client S[string]) {
+	res, _ := client.Get("")
+	defer res.Body.Close() // the name of client's type doesn't match "*http.Client"
+}
+
+// User-defined Client type
+type C[P any] interface {
+	Get(url string) (resp *P, err error)
+}
+
+func userDefinedClientType(client C[http.Response]) {
+	resp, _ := client.Get("http://foo.com")
+	defer resp.Body.Close() // "client" is not of type "*http.Client"
+}
diff --git a/go/analysis/passes/ifaceassert/ifaceassert.go b/go/analysis/passes/ifaceassert/ifaceassert.go
index fd22853..30130f6 100644
--- a/go/analysis/passes/ifaceassert/ifaceassert.go
+++ b/go/analysis/passes/ifaceassert/ifaceassert.go
@@ -51,6 +51,12 @@
 	if V == nil || T == nil {
 		return nil
 	}
+
+	// Mitigations for interface comparisons and generics.
+	// TODO(https://github.com/golang/go/issues/50658): Support more precise conclusion.
+	if isParameterized(V) || isParameterized(T) {
+		return nil
+	}
 	if f, wrongType := types.MissingMethod(V, T, false); wrongType {
 		return f
 	}
diff --git a/go/analysis/passes/ifaceassert/ifaceassert_test.go b/go/analysis/passes/ifaceassert/ifaceassert_test.go
index 4607338..b07c276 100644
--- a/go/analysis/passes/ifaceassert/ifaceassert_test.go
+++ b/go/analysis/passes/ifaceassert/ifaceassert_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/ifaceassert"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, ifaceassert.Analyzer, "a")
+	pkgs := []string{"a"}
+	if typeparams.Enabled {
+		pkgs = append(pkgs, "typeparams")
+	}
+	analysistest.Run(t, testdata, ifaceassert.Analyzer, pkgs...)
 }
diff --git a/go/analysis/passes/ifaceassert/parameterized.go b/go/analysis/passes/ifaceassert/parameterized.go
new file mode 100644
index 0000000..1285ecf
--- /dev/null
+++ b/go/analysis/passes/ifaceassert/parameterized.go
@@ -0,0 +1,112 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+package ifaceassert
+
+import (
+	"go/types"
+
+	"golang.org/x/tools/internal/typeparams"
+)
+
+// isParameterized reports whether typ contains any of the type parameters of tparams.
+//
+// NOTE: Adapted from go/types/infer.go. If that is exported in a future release remove this copy.
+func isParameterized(typ types.Type) bool {
+	w := tpWalker{
+		seen: make(map[types.Type]bool),
+	}
+	return w.isParameterized(typ)
+}
+
+type tpWalker struct {
+	seen map[types.Type]bool
+}
+
+func (w *tpWalker) isParameterized(typ types.Type) (res bool) {
+	// detect cycles
+	if x, ok := w.seen[typ]; ok {
+		return x
+	}
+	w.seen[typ] = false
+	defer func() {
+		w.seen[typ] = res
+	}()
+
+	switch t := typ.(type) {
+	case nil, *types.Basic: // TODO(gri) should nil be handled here?
+		break
+
+	case *types.Array:
+		return w.isParameterized(t.Elem())
+
+	case *types.Slice:
+		return w.isParameterized(t.Elem())
+
+	case *types.Struct:
+		for i, n := 0, t.NumFields(); i < n; i++ {
+			if w.isParameterized(t.Field(i).Type()) {
+				return true
+			}
+		}
+
+	case *types.Pointer:
+		return w.isParameterized(t.Elem())
+
+	case *types.Tuple:
+		n := t.Len()
+		for i := 0; i < n; i++ {
+			if w.isParameterized(t.At(i).Type()) {
+				return true
+			}
+		}
+
+	case *types.Signature:
+		// t.tparams may not be nil if we are looking at a signature
+		// of a generic function type (or an interface method) that is
+		// part of the type we're testing. We don't care about these type
+		// parameters.
+		// Similarly, the receiver of a method may declare (rather then
+		// use) type parameters, we don't care about those either.
+		// Thus, we only need to look at the input and result parameters.
+		return w.isParameterized(t.Params()) || w.isParameterized(t.Results())
+
+	case *types.Interface:
+		for i, n := 0, t.NumMethods(); i < n; i++ {
+			if w.isParameterized(t.Method(i).Type()) {
+				return true
+			}
+		}
+		terms, err := typeparams.InterfaceTermSet(t)
+		if err != nil {
+			panic(err)
+		}
+		for _, term := range terms {
+			if w.isParameterized(term.Type()) {
+				return true
+			}
+		}
+
+	case *types.Map:
+		return w.isParameterized(t.Key()) || w.isParameterized(t.Elem())
+
+	case *types.Chan:
+		return w.isParameterized(t.Elem())
+
+	case *types.Named:
+		list := typeparams.NamedTypeArgs(t)
+		for i, n := 0, list.Len(); i < n; i++ {
+			if w.isParameterized(list.At(i)) {
+				return true
+			}
+		}
+
+	case *typeparams.TypeParam:
+		return true
+
+	default:
+		panic(t) // unreachable
+	}
+
+	return false
+}
diff --git a/go/analysis/passes/ifaceassert/testdata/src/typeparams/typeparams.go b/go/analysis/passes/ifaceassert/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..65709c0
--- /dev/null
+++ b/go/analysis/passes/ifaceassert/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,102 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import "io"
+
+type SourceReader[Source any] interface {
+	Read(p Source) (n int, err error)
+}
+
+func GenericInterfaceAssertionTest[T io.Reader]() {
+	var (
+		a SourceReader[[]byte]
+		b SourceReader[[]int]
+		r io.Reader
+	)
+	_ = a.(io.Reader)
+	_ = b.(io.Reader) // want `^impossible type assertion: no type can implement both typeparams.SourceReader\[\[\]int\] and io.Reader \(conflicting types for Read method\)$`
+
+	_ = r.(SourceReader[[]byte])
+	_ = r.(SourceReader[[]int]) // want `^impossible type assertion: no type can implement both io.Reader and typeparams.SourceReader\[\[\]int\] \(conflicting types for Read method\)$`
+	_ = r.(T)                   // not actually an iface assertion, so checked by the type checker.
+
+	switch a.(type) {
+	case io.Reader:
+	default:
+	}
+
+	switch b.(type) {
+	case io.Reader: // want `^impossible type assertion: no type can implement both typeparams.SourceReader\[\[\]int\] and io.Reader \(conflicting types for Read method\)$`
+
+	default:
+	}
+}
+
+// Issue 50658: Check for type parameters in type switches.
+type Float interface {
+	float32 | float64
+}
+
+type Doer[F Float] interface {
+	Do() F
+}
+
+func Underlying[F Float](v Doer[F]) string {
+	switch v.(type) {
+	case Doer[float32]:
+		return "float32!"
+	case Doer[float64]:
+		return "float64!"
+	default:
+		return "<unknown>"
+	}
+}
+
+func DoIf[F Float]() {
+	// This is a synthetic function to create a non-generic to generic assignment.
+	// This function does not make much sense.
+	var v Doer[float32]
+	if t, ok := v.(Doer[F]); ok {
+		t.Do()
+	}
+}
+
+func IsASwitch[F Float, U Float](v Doer[F]) bool {
+	switch v.(type) {
+	case Doer[U]:
+		return true
+	}
+	return false
+}
+
+func IsA[F Float, U Float](v Doer[F]) bool {
+	_, is := v.(Doer[U])
+	return is
+}
+
+func LayeredTypes[F Float]() {
+	// This is a synthetic function cover more isParameterized cases.
+	type T interface {
+		foo() struct{ _ map[T][2]chan *F }
+	}
+	type V interface {
+		foo() struct{ _ map[T][2]chan *float32 }
+	}
+	var t T
+	var v V
+	t, _ = v.(T)
+	_ = t
+}
+
+type X[T any] struct{}
+
+func (x X[T]) m(T) {}
+
+func InstancesOfGenericMethods() {
+	var x interface{ m(string) }
+	// _ = x.(X[int])    // BAD. Not enabled as it does not type check.
+	_ = x.(X[string]) // OK
+}
diff --git a/go/analysis/passes/internal/analysisutil/util_test.go b/go/analysis/passes/internal/analysisutil/util_test.go
new file mode 100644
index 0000000..2d7e94e
--- /dev/null
+++ b/go/analysis/passes/internal/analysisutil/util_test.go
@@ -0,0 +1,57 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package analysisutil_test
+
+import (
+	"go/ast"
+	"go/parser"
+	"go/token"
+	"go/types"
+	"testing"
+
+	"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+	"golang.org/x/tools/internal/typeparams"
+)
+
+func TestHasSideEffects(t *testing.T) {
+	if !typeparams.Enabled {
+		t.Skip("type parameters are not enabled")
+	}
+	src := `package p
+
+type T int
+
+type G[P any] int
+
+func _() {
+	var x int
+	_ = T(x)
+	_ = G[int](x)
+}
+`
+	fset := token.NewFileSet()
+	file, err := parser.ParseFile(fset, "p.go", src, 0)
+	if err != nil {
+		t.Fatal(err)
+	}
+	var conf types.Config
+	info := &types.Info{
+		Types: make(map[ast.Expr]types.TypeAndValue),
+	}
+	_, err = conf.Check("", fset, []*ast.File{file}, info)
+	if err != nil {
+		t.Fatal(err)
+	}
+	ast.Inspect(file, func(node ast.Node) bool {
+		call, ok := node.(*ast.CallExpr)
+		if !ok {
+			return true
+		}
+		if got := analysisutil.HasSideEffects(info, call); got != false {
+			t.Errorf("HasSideEffects(%s) = true, want false", types.ExprString(call))
+		}
+		return true
+	})
+}
diff --git a/go/analysis/passes/loopclosure/loopclosure_test.go b/go/analysis/passes/loopclosure/loopclosure_test.go
index 0916f5e..1498838 100644
--- a/go/analysis/passes/loopclosure/loopclosure_test.go
+++ b/go/analysis/passes/loopclosure/loopclosure_test.go
@@ -5,6 +5,7 @@
 package loopclosure_test
 
 import (
+	"golang.org/x/tools/internal/typeparams"
 	"testing"
 
 	"golang.org/x/tools/go/analysis/analysistest"
@@ -13,5 +14,9 @@
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, loopclosure.Analyzer, "a")
+	tests := []string{"a", "golang.org/..."}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, loopclosure.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/loopclosure/testdata/src/typeparams/typeparams.go b/go/analysis/passes/loopclosure/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..55e129c
--- /dev/null
+++ b/go/analysis/passes/loopclosure/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,60 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the loopclosure checker.
+
+//go:build go1.18
+
+package typeparams
+
+import "golang.org/x/sync/errgroup"
+
+func f[T any](data T) {
+	print(data)
+}
+
+func _[T any]() {
+	var s []T
+	for i, v := range s {
+		go func() {
+			f(i) // want "loop variable i captured by func literal"
+			f(v) // want "loop variable v captured by func literal"
+		}()
+	}
+}
+
+func loop[P interface{ Go(func() error) }](grp P) {
+	var s []int
+	for i, v := range s {
+		// The checker only matches on methods "(*...errgroup.Group).Go".
+		grp.Go(func() error {
+			print(i)
+			print(v)
+			return nil
+		})
+	}
+}
+
+func _() {
+	g := new(errgroup.Group)
+	loop(g) // the analyzer is not "type inter-procedural" so no findings are reported
+}
+
+type T[P any] struct {
+	a P
+}
+
+func (t T[P]) Go(func() error) { }
+
+func _(g T[errgroup.Group]) {
+	var s []int
+	for i, v := range s {
+		// "T.a" is method "(*...errgroup.Group).Go".
+		g.a.Go(func() error {
+			print(i)  // want "loop variable i captured by func literal"
+			print(v)  // want "loop variable v captured by func literal"
+			return nil
+		})
+	}
+}
\ No newline at end of file
diff --git a/go/analysis/passes/lostcancel/lostcancel_test.go b/go/analysis/passes/lostcancel/lostcancel_test.go
index a1d8f85..bda0293 100644
--- a/go/analysis/passes/lostcancel/lostcancel_test.go
+++ b/go/analysis/passes/lostcancel/lostcancel_test.go
@@ -5,13 +5,17 @@
 package lostcancel_test
 
 import (
-	"testing"
-
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/lostcancel"
+	"golang.org/x/tools/internal/typeparams"
+	"testing"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, lostcancel.Analyzer, "a", "b")
+	tests := []string{"a", "b"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, lostcancel.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/lostcancel/testdata/src/typeparams/typeparams.go b/go/analysis/passes/lostcancel/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..d6d0b5f
--- /dev/null
+++ b/go/analysis/passes/lostcancel/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,59 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the lostcancel checker.
+
+//go:build go1.18
+
+package typeparams
+
+import (
+	"context"
+	"time"
+)
+
+var bg = context.Background()
+
+func _[T any]() {
+	var _, cancel = context.WithCancel(bg) // want `the cancel function is not used on all paths \(possible context leak\)`
+	if false {
+		_ = cancel
+	}
+} // want "this return statement may be reached without using the cancel var defined on line 19"
+
+func _[T any]() {
+	_, cancel := context.WithCancel(bg)
+	defer cancel() // ok
+}
+
+// User-defined Context that matches type "context.Context"
+type C1[P1 any, P2 any] interface {
+	Deadline() (deadline time.Time, ok P1)
+	Done() <-chan struct{}
+	Err() error
+	Value(key P2) P2
+}
+
+func _(bg C1[bool, interface{}]) {
+	ctx, _ := context.WithCancel(bg)    // want "the cancel function returned by context.WithCancel should be called, not discarded, to avoid a context leak"
+	ctx, _ = context.WithTimeout(bg, 0) // want "the cancel function returned by context.WithTimeout should be called, not discarded, to avoid a context leak"
+	_ = ctx
+}
+
+// User-defined Context that doesn't match type "context.Context"
+type C2[P any] interface {
+	WithCancel(parent C1[P, bool]) (ctx C1[P, bool], cancel func())
+}
+
+func _(c C2[interface{}]) {
+	ctx, _ := c.WithCancel(nil) // not "context.WithCancel()"
+	_ = ctx
+}
+
+// Further regression test for Go issue 16143.
+func _() {
+	type C[P any] struct{ f func() P }
+	var x C[int]
+	x.f()
+}
diff --git a/go/analysis/passes/nilfunc/nilfunc.go b/go/analysis/passes/nilfunc/nilfunc.go
index cd42c98..e4c66df 100644
--- a/go/analysis/passes/nilfunc/nilfunc.go
+++ b/go/analysis/passes/nilfunc/nilfunc.go
@@ -14,6 +14,7 @@
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/analysis/passes/inspect"
 	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 const Doc = `check for useless comparisons between functions and nil
@@ -59,6 +60,12 @@
 			obj = pass.TypesInfo.Uses[v]
 		case *ast.SelectorExpr:
 			obj = pass.TypesInfo.Uses[v.Sel]
+		case *ast.IndexExpr, *typeparams.IndexListExpr:
+			// Check generic functions such as "f[T1,T2]".
+			x, _, _, _ := typeparams.UnpackIndexExpr(v)
+			if id, ok := x.(*ast.Ident); ok {
+				obj = pass.TypesInfo.Uses[id]
+			}
 		default:
 			return
 		}
diff --git a/go/analysis/passes/nilfunc/nilfunc_test.go b/go/analysis/passes/nilfunc/nilfunc_test.go
index 6eac063..e147a44 100644
--- a/go/analysis/passes/nilfunc/nilfunc_test.go
+++ b/go/analysis/passes/nilfunc/nilfunc_test.go
@@ -5,13 +5,17 @@
 package nilfunc_test
 
 import (
-	"testing"
-
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/nilfunc"
+	"golang.org/x/tools/internal/typeparams"
+	"testing"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, nilfunc.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, nilfunc.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/nilfunc/testdata/src/typeparams/typeparams.go b/go/analysis/passes/nilfunc/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..6346609
--- /dev/null
+++ b/go/analysis/passes/nilfunc/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,52 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the lostcancel checker.
+
+//go:build go1.18
+
+package typeparams
+
+func f[P any]() {}
+
+func g[P1 any, P2 any](x P1) {}
+
+var f1 = f[int]
+
+type T1[P any] struct {
+	f func() P
+}
+
+type T2[P1 any, P2 any] struct {
+	g func(P1) P2
+}
+
+func Comparison[P any](f2 func()T1[P]) {
+	var t1 T1[P]
+	var t2 T2[P, int]
+	var fn func()
+	if fn == nil || f1 == nil || f2 == nil || t1.f == nil || t2.g == nil {
+		// no error; these func vars or fields may be nil
+	}
+	if f[P] == nil { // want "comparison of function f == nil is always false"
+		panic("can't happen")
+	}
+	if f[int] == nil { // want "comparison of function f == nil is always false"
+		panic("can't happen")
+	}
+	if g[P, int] == nil { // want "comparison of function g == nil is always false"
+		panic("can't happen")
+	}
+}
+
+func Index[P any](a [](func()P)) {
+	if a[1] == nil {
+		// no error
+	}
+	var t1 []T1[P]
+	var t2 [][]T2[P, P]
+	if t1[1].f == nil || t2[0][1].g == nil {
+		// no error
+	}
+}
\ No newline at end of file
diff --git a/go/analysis/passes/nilness/nilness.go b/go/analysis/passes/nilness/nilness.go
index 2eb782b..8fd8cd0 100644
--- a/go/analysis/passes/nilness/nilness.go
+++ b/go/analysis/passes/nilness/nilness.go
@@ -342,7 +342,7 @@
 // ChangeInterface, have transitive nilness, such that if you know the
 // underlying value is nil, you also know the value itself is nil, and vice
 // versa. This operation allows callers to match on any of the related values
-// in analyses, rather than just the one form of the value that happend to
+// in analyses, rather than just the one form of the value that happened to
 // appear in a comparison.
 //
 // This work must be in addition to unwrapping values within nilnessOf because
diff --git a/go/analysis/passes/nilness/testdata/src/a/a.go b/go/analysis/passes/nilness/testdata/src/a/a.go
index b9426f4..f4d8f45 100644
--- a/go/analysis/passes/nilness/testdata/src/a/a.go
+++ b/go/analysis/passes/nilness/testdata/src/a/a.go
@@ -158,3 +158,26 @@
 func unknown() bool {
 	return false
 }
+
+func f10(a interface{}) {
+	switch a.(type) {
+	case nil:
+		return
+	}
+	switch a.(type) {
+	case nil: // want "impossible condition: non-nil == nil"
+		return
+	}
+}
+
+func f11(a interface{}) {
+	switch a {
+	case nil:
+		return
+	}
+	switch a {
+	case 5,
+		nil: // want "impossible condition: non-nil == nil"
+		return
+	}
+}
diff --git a/go/analysis/passes/printf/printf.go b/go/analysis/passes/printf/printf.go
index de0369a..dee37d7 100644
--- a/go/analysis/passes/printf/printf.go
+++ b/go/analysis/passes/printf/printf.go
@@ -25,6 +25,7 @@
 	"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
 	"golang.org/x/tools/go/ast/inspector"
 	"golang.org/x/tools/go/types/typeutil"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func init() {
@@ -452,8 +453,15 @@
 	if idx >= len(call.Args) {
 		return "", false
 	}
-	arg := call.Args[idx]
-	lit := pass.TypesInfo.Types[arg].Value
+	return stringConstantExpr(pass, call.Args[idx])
+}
+
+// stringConstantExpr returns expression's string constant value.
+//
+// ("", false) is returned if expression isn't a string
+// constant.
+func stringConstantExpr(pass *analysis.Pass, expr ast.Expr) (string, bool) {
+	lit := pass.TypesInfo.Types[expr].Value
 	if lit != nil && lit.Kind() == constant.String {
 		return constant.StringVal(lit), true
 	}
@@ -513,7 +521,12 @@
 func isFormatter(typ types.Type) bool {
 	// If the type is an interface, the value it holds might satisfy fmt.Formatter.
 	if _, ok := typ.Underlying().(*types.Interface); ok {
-		return true
+		// Don't assume type parameters could be formatters. With the greater
+		// expressiveness of constraint interface syntax we expect more type safety
+		// when using type parameters.
+		if !typeparams.IsTypeParam(typ) {
+			return true
+		}
 	}
 	obj, _, _ := types.LookupFieldOrMethod(typ, false, nil, "Format")
 	fn, ok := obj.(*types.Func)
@@ -834,8 +847,9 @@
 	}
 
 	// Could current arg implement fmt.Formatter?
+	// Skip check for the %w verb, which requires an error.
 	formatter := false
-	if state.argNum < len(call.Args) {
+	if v.typ != argError && state.argNum < len(call.Args) {
 		if tv, ok := pass.TypesInfo.Types[call.Args[state.argNum]]; ok {
 			formatter = isFormatter(tv.Type)
 		}
@@ -871,8 +885,12 @@
 			return
 		}
 		arg := call.Args[argNum]
-		if !matchArgType(pass, argInt, nil, arg) {
-			pass.ReportRangef(call, "%s format %s uses non-int %s as argument of *", state.name, state.format, analysisutil.Format(pass.Fset, arg))
+		if reason, ok := matchArgType(pass, argInt, arg); !ok {
+			details := ""
+			if reason != "" {
+				details = " (" + reason + ")"
+			}
+			pass.ReportRangef(call, "%s format %s uses non-int %s%s as argument of *", state.name, state.format, analysisutil.Format(pass.Fset, arg), details)
 			return false
 		}
 	}
@@ -889,12 +907,16 @@
 		pass.ReportRangef(call, "%s format %s arg %s is a func value, not called", state.name, state.format, analysisutil.Format(pass.Fset, arg))
 		return false
 	}
-	if !matchArgType(pass, v.typ, nil, arg) {
+	if reason, ok := matchArgType(pass, v.typ, arg); !ok {
 		typeString := ""
 		if typ := pass.TypesInfo.Types[arg].Type; typ != nil {
 			typeString = typ.String()
 		}
-		pass.ReportRangef(call, "%s format %s has arg %s of wrong type %s", state.name, state.format, analysisutil.Format(pass.Fset, arg), typeString)
+		details := ""
+		if reason != "" {
+			details = " (" + reason + ")"
+		}
+		pass.ReportRangef(call, "%s format %s has arg %s of wrong type %s%s", state.name, state.format, analysisutil.Format(pass.Fset, arg), typeString, details)
 		return false
 	}
 	if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) {
@@ -1052,10 +1074,10 @@
 	}
 
 	arg := args[0]
-	if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
-		// Ignore trailing % character in lit.Value.
+	if s, ok := stringConstantExpr(pass, arg); ok {
+		// Ignore trailing % character
 		// The % in "abc 0.0%" couldn't be a formatting directive.
-		s := strings.TrimSuffix(lit.Value, `%"`)
+		s = strings.TrimSuffix(s, "%")
 		if strings.Contains(s, "%") {
 			m := printFormatRE.FindStringSubmatch(s)
 			if m != nil {
@@ -1066,9 +1088,8 @@
 	if strings.HasSuffix(fn.Name(), "ln") {
 		// The last item, if a string, should not have a newline.
 		arg = args[len(args)-1]
-		if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
-			str, _ := strconv.Unquote(lit.Value)
-			if strings.HasSuffix(str, "\n") {
+		if s, ok := stringConstantExpr(pass, arg); ok {
+			if strings.HasSuffix(s, "\n") {
 				pass.ReportRangef(call, "%s arg list ends with redundant newline", fn.FullName())
 			}
 		}
diff --git a/go/analysis/passes/printf/printf_test.go b/go/analysis/passes/printf/printf_test.go
index fd22cf6..142afa1 100644
--- a/go/analysis/passes/printf/printf_test.go
+++ b/go/analysis/passes/printf/printf_test.go
@@ -9,10 +9,16 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/printf"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
 	printf.Analyzer.Flags.Set("funcs", "Warn,Warnf")
-	analysistest.Run(t, testdata, printf.Analyzer, "a", "b", "nofmt")
+
+	tests := []string{"a", "b", "nofmt"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, printf.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/printf/testdata/src/a/a.go b/go/analysis/passes/printf/testdata/src/a/a.go
index 89ef9ba..5eca317 100644
--- a/go/analysis/passes/printf/testdata/src/a/a.go
+++ b/go/analysis/passes/printf/testdata/src/a/a.go
@@ -51,6 +51,10 @@
 func (errorTest5) error() { // niladic; don't complain if no args (was bug)
 }
 
+type errorTestOK int
+
+func (errorTestOK) Error() string { return "" }
+
 // This function never executes, but it serves as a simple test for the program.
 // Test with make test.
 func PrintfTests() {
@@ -149,6 +153,7 @@
 	fmt.Println("%s", "hi")                     // want "fmt.Println call has possible formatting directive %s"
 	fmt.Println("%v", "hi")                     // want "fmt.Println call has possible formatting directive %v"
 	fmt.Println("%T", "hi")                     // want "fmt.Println call has possible formatting directive %T"
+	fmt.Println("%s"+" there", "hi")            // want "fmt.Println call has possible formatting directive %s"
 	fmt.Println("0.0%")                         // correct (trailing % couldn't be a formatting directive)
 	fmt.Printf("%s", "hi", 3)                   // want "fmt.Printf call needs 1 arg but has 2 args"
 	_ = fmt.Sprintf("%"+("s"), "hi", 3)         // want "fmt.Sprintf call needs 1 arg but has 2 args"
@@ -327,12 +332,19 @@
 	dbg("", 1) // no error "call has arguments but no formatting directive"
 
 	// %w
+	var errSubset interface {
+		Error() string
+		A()
+	}
 	_ = fmt.Errorf("%w", err)               // OK
 	_ = fmt.Errorf("%#w", err)              // OK
 	_ = fmt.Errorf("%[2]w %[1]s", "x", err) // OK
 	_ = fmt.Errorf("%[2]w %[1]s", e, "x")   // want `fmt.Errorf format %\[2\]w has arg "x" of wrong type string`
 	_ = fmt.Errorf("%w", "x")               // want `fmt.Errorf format %w has arg "x" of wrong type string`
 	_ = fmt.Errorf("%w %w", err, err)       // want `fmt.Errorf call has more than one error-wrapping directive %w`
+	_ = fmt.Errorf("%w", interface{}(nil))  // want `fmt.Errorf format %w has arg interface{}\(nil\) of wrong type interface{}`
+	_ = fmt.Errorf("%w", errorTestOK(0))    // concrete value implements error
+	_ = fmt.Errorf("%w", errSubset)         // interface value implements error
 	fmt.Printf("%w", err)                   // want `fmt.Printf does not support error-wrapping directive %w`
 	var wt *testing.T
 	wt.Errorf("%w", err)          // want `\(\*testing.common\).Errorf does not support error-wrapping directive %w`
@@ -686,6 +698,7 @@
 type unexportedStringer struct {
 	t ptrStringer
 }
+
 type unexportedStringerOtherFields struct {
 	s string
 	t ptrStringer
@@ -696,6 +709,7 @@
 type unexportedError struct {
 	e error
 }
+
 type unexportedErrorOtherFields struct {
 	s string
 	e error
@@ -757,9 +771,10 @@
 	fmt.Printf("%s", uei)       // want "Printf format %s has arg uei of wrong type a.unexportedErrorInterface"
 	fmt.Println("foo\n", "bar") // not an error
 
-	fmt.Println("foo\n")  // want "Println arg list ends with redundant newline"
-	fmt.Println("foo\\n") // not an error
-	fmt.Println(`foo\n`)  // not an error
+	fmt.Println("foo\n")      // want "Println arg list ends with redundant newline"
+	fmt.Println("foo" + "\n") // want "Println arg list ends with redundant newline"
+	fmt.Println("foo\\n")     // not an error
+	fmt.Println(`foo\n`)      // not an error
 
 	intSlice := []int{3, 4}
 	fmt.Printf("%s", intSlice) // want `fmt.Printf format %s has arg intSlice of wrong type \[\]int`
diff --git a/go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go b/go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go
new file mode 100644
index 0000000..76a9a20
--- /dev/null
+++ b/go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go
@@ -0,0 +1,123 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package typeparams
+
+import "fmt"
+
+func TestBasicTypeParams[T interface{ ~int }, E error, F fmt.Formatter, S fmt.Stringer, A any](t T, e E, f F, s S, a A) {
+	fmt.Printf("%d", t)
+	fmt.Printf("%s", t) // want "wrong type.*contains ~int"
+	fmt.Printf("%v", t)
+	fmt.Printf("%d", e) // want "wrong type"
+	fmt.Printf("%s", e)
+	fmt.Errorf("%w", e)
+	fmt.Printf("%a", f)
+	fmt.Printf("%d", f)
+	fmt.Printf("%T", f.Format)
+	fmt.Printf("%p", f.Format)
+	fmt.Printf("%s", s)
+	fmt.Errorf("%w", s) // want "wrong type"
+	fmt.Printf("%d", a) // want "wrong type"
+	fmt.Printf("%s", a) // want "wrong type"
+	fmt.Printf("%v", a)
+	fmt.Printf("%T", a)
+}
+
+type Constraint interface {
+	~int
+}
+
+func TestNamedConstraints_Issue49597[T Constraint](t T) {
+	fmt.Printf("%d", t)
+	fmt.Printf("%s", t) // want "wrong type.*contains ~int"
+}
+
+func TestNestedTypeParams[T interface{ ~int }, S interface{ ~string }]() {
+	var x struct {
+		f int
+		t T
+	}
+	fmt.Printf("%d", x)
+	fmt.Printf("%s", x) // want "wrong type"
+	var y struct {
+		f string
+		t S
+	}
+	fmt.Printf("%d", y) // want "wrong type"
+	fmt.Printf("%s", y)
+	var m1 map[T]T
+	fmt.Printf("%d", m1)
+	fmt.Printf("%s", m1) // want "wrong type"
+	var m2 map[S]S
+	fmt.Printf("%d", m2) // want "wrong type"
+	fmt.Printf("%s", m2)
+}
+
+type R struct {
+	F []R
+}
+
+func TestRecursiveTypeDefinition() {
+	var r []R
+	fmt.Printf("%d", r) // No error: avoids infinite recursion.
+}
+
+func TestRecursiveTypeParams[T1 ~[]T2, T2 ~[]T1 | string, T3 ~struct{ F T3 }](t1 T1, t2 T2, t3 T3) {
+	// No error is reported on the following lines to avoid infinite recursion.
+	fmt.Printf("%s", t1)
+	fmt.Printf("%s", t2)
+	fmt.Printf("%s", t3)
+}
+
+func TestRecusivePointers[T1 ~*T2, T2 ~*T1](t1 T1, t2 T2) {
+	// No error: we can't determine if pointer rules apply.
+	fmt.Printf("%s", t1)
+	fmt.Printf("%s", t2)
+}
+
+func TestEmptyTypeSet[T interface {
+	int | string
+	float64
+}](t T) {
+	fmt.Printf("%s", t) // No error: empty type set.
+}
+
+func TestPointerRules[T ~*[]int | *[2]int](t T) {
+	var slicePtr *[]int
+	var arrayPtr *[2]int
+	fmt.Printf("%d", slicePtr)
+	fmt.Printf("%d", arrayPtr)
+	fmt.Printf("%d", t)
+}
+
+func TestInterfacePromotion[E interface {
+	~int
+	Error() string
+}, S interface {
+	float64
+	String() string
+}](e E, s S) {
+	fmt.Printf("%d", e)
+	fmt.Printf("%s", e)
+	fmt.Errorf("%w", e)
+	fmt.Printf("%d", s) // want "wrong type.*contains float64"
+	fmt.Printf("%s", s)
+	fmt.Errorf("%w", s) // want "wrong type"
+}
+
+type myInt int
+
+func TestTermReduction[T1 interface{ ~int | string }, T2 interface {
+	~int | string
+	myInt
+}](t1 T1, t2 T2) {
+	fmt.Printf("%d", t1) // want "wrong type.*contains string"
+	fmt.Printf("%s", t1) // want "wrong type.*contains ~int"
+	fmt.Printf("%d", t2)
+	fmt.Printf("%s", t2) // want "wrong type.*contains typeparams.myInt"
+}
diff --git a/go/analysis/passes/printf/testdata/src/typeparams/wrappers.go b/go/analysis/passes/printf/testdata/src/typeparams/wrappers.go
new file mode 100644
index 0000000..df8a6fa
--- /dev/null
+++ b/go/analysis/passes/printf/testdata/src/typeparams/wrappers.go
@@ -0,0 +1,24 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package typeparams
+
+import "fmt"
+
+type N[T any] int
+
+func (N[P]) Wrapf(p P, format string, args ...interface{}) { // want Wrapf:"printfWrapper"
+	fmt.Printf(format, args...)
+}
+
+func (*N[P]) PtrWrapf(p P, format string, args ...interface{}) { // want PtrWrapf:"printfWrapper"
+	fmt.Printf(format, args...)
+}
+
+func Printf[P any](p P, format string, args ...interface{}) { // want Printf:"printfWrapper"
+	fmt.Printf(format, args...)
+}
diff --git a/go/analysis/passes/printf/types.go b/go/analysis/passes/printf/types.go
index 6a5fae4..270e917 100644
--- a/go/analysis/passes/printf/types.go
+++ b/go/analysis/passes/printf/types.go
@@ -5,45 +5,60 @@
 package printf
 
 import (
+	"fmt"
 	"go/ast"
 	"go/types"
 
 	"golang.org/x/tools/go/analysis"
-	"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
 
-// matchArgType reports an error if printf verb t is not appropriate
-// for operand arg.
+// matchArgType reports an error if printf verb t is not appropriate for
+// operand arg.
 //
-// typ is used only for recursive calls; external callers must supply nil.
-//
-// (Recursion arises from the compound types {map,chan,slice} which
-// may be printed with %d etc. if that is appropriate for their element
-// types.)
-func matchArgType(pass *analysis.Pass, t printfArgType, typ types.Type, arg ast.Expr) bool {
-	return matchArgTypeInternal(pass, t, typ, arg, make(map[types.Type]bool))
-}
-
-// matchArgTypeInternal is the internal version of matchArgType. It carries a map
-// remembering what types are in progress so we don't recur when faced with recursive
-// types or mutually recursive types.
-func matchArgTypeInternal(pass *analysis.Pass, t printfArgType, typ types.Type, arg ast.Expr, inProgress map[types.Type]bool) bool {
+// If arg is a type parameter, the verb t must be appropriate for every type in
+// the type parameter type set.
+func matchArgType(pass *analysis.Pass, t printfArgType, arg ast.Expr) (reason string, ok bool) {
 	// %v, %T accept any argument type.
 	if t == anyType {
-		return true
-	}
-	if typ == nil {
-		// external call
-		typ = pass.TypesInfo.Types[arg].Type
-		if typ == nil {
-			return true // probably a type check problem
-		}
+		return "", true
 	}
 
+	typ := pass.TypesInfo.Types[arg].Type
+	if typ == nil {
+		return "", true // probably a type check problem
+	}
+
+	m := &argMatcher{t: t, seen: make(map[types.Type]bool)}
+	ok = m.match(typ, true)
+	return m.reason, ok
+}
+
+// argMatcher recursively matches types against the printfArgType t.
+//
+// To short-circuit recursion, it keeps track of types that have already been
+// matched (or are in the process of being matched) via the seen map. Recursion
+// arises from the compound types {map,chan,slice} which may be printed with %d
+// etc. if that is appropriate for their element types, as well as from type
+// parameters, which are expanded to the constituents of their type set.
+//
+// The reason field may be set to report the cause of the mismatch.
+type argMatcher struct {
+	t      printfArgType
+	seen   map[types.Type]bool
+	reason string
+}
+
+// match checks if typ matches m's printf arg type. If topLevel is true, typ is
+// the actual type of the printf arg, for which special rules apply. As a
+// special case, top level type parameters pass topLevel=true when checking for
+// matches among the constituents of their type set, as type arguments will
+// replace the type parameter at compile time.
+func (m *argMatcher) match(typ types.Type, topLevel bool) bool {
 	// %w accepts only errors.
-	if t == argError {
+	if m.t == argError {
 		return types.ConvertibleTo(typ, errorType)
 	}
 
@@ -51,65 +66,122 @@
 	if isFormatter(typ) {
 		return true
 	}
+
 	// If we can use a string, might arg (dynamically) implement the Stringer or Error interface?
-	if t&argString != 0 && isConvertibleToString(pass, typ) {
+	if m.t&argString != 0 && isConvertibleToString(typ) {
+		return true
+	}
+
+	if typ, _ := typ.(*typeparams.TypeParam); typ != nil {
+		// Avoid infinite recursion through type parameters.
+		if m.seen[typ] {
+			return true
+		}
+		m.seen[typ] = true
+		terms, err := typeparams.StructuralTerms(typ)
+		if err != nil {
+			return true // invalid type (possibly an empty type set)
+		}
+
+		if len(terms) == 0 {
+			// No restrictions on the underlying of typ. Type parameters implementing
+			// error, fmt.Formatter, or fmt.Stringer were handled above, and %v and
+			// %T was handled in matchType. We're about to check restrictions the
+			// underlying; if the underlying type is unrestricted there must be an
+			// element of the type set that violates one of the arg type checks
+			// below, so we can safely return false here.
+
+			if m.t == anyType { // anyType must have already been handled.
+				panic("unexpected printfArgType")
+			}
+			return false
+		}
+
+		// Only report a reason if typ is the argument type, otherwise it won't
+		// make sense. Note that it is not sufficient to check if topLevel == here,
+		// as type parameters can have a type set consisting of other type
+		// parameters.
+		reportReason := len(m.seen) == 1
+
+		for _, term := range terms {
+			if !m.match(term.Type(), topLevel) {
+				if reportReason {
+					if term.Tilde() {
+						m.reason = fmt.Sprintf("contains ~%s", term.Type())
+					} else {
+						m.reason = fmt.Sprintf("contains %s", term.Type())
+					}
+				}
+				return false
+			}
+		}
 		return true
 	}
 
 	typ = typ.Underlying()
-	if inProgress[typ] {
-		// We're already looking at this type. The call that started it will take care of it.
+	if m.seen[typ] {
+		// We've already considered typ, or are in the process of considering it.
+		// In case we've already considered typ, it must have been valid (else we
+		// would have stopped matching). In case we're in the process of
+		// considering it, we must avoid infinite recursion.
+		//
+		// There are some pathological cases where returning true here is
+		// incorrect, for example `type R struct { F []R }`, but these are
+		// acceptable false negatives.
 		return true
 	}
-	inProgress[typ] = true
+	m.seen[typ] = true
 
 	switch typ := typ.(type) {
 	case *types.Signature:
-		return t == argPointer
+		return m.t == argPointer
 
 	case *types.Map:
-		return t == argPointer ||
-			// Recur: map[int]int matches %d.
-			(matchArgTypeInternal(pass, t, typ.Key(), arg, inProgress) && matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress))
+		if m.t == argPointer {
+			return true
+		}
+		// Recur: map[int]int matches %d.
+		return m.match(typ.Key(), false) && m.match(typ.Elem(), false)
 
 	case *types.Chan:
-		return t&argPointer != 0
+		return m.t&argPointer != 0
 
 	case *types.Array:
 		// Same as slice.
-		if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
+		if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && m.t&argString != 0 {
 			return true // %s matches []byte
 		}
 		// Recur: []int matches %d.
-		return matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress)
+		return m.match(typ.Elem(), false)
 
 	case *types.Slice:
 		// Same as array.
-		if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
+		if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && m.t&argString != 0 {
 			return true // %s matches []byte
 		}
-		if t == argPointer {
+		if m.t == argPointer {
 			return true // %p prints a slice's 0th element
 		}
 		// Recur: []int matches %d. But watch out for
 		//	type T []T
 		// If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below.
-		return matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress)
+		return m.match(typ.Elem(), false)
 
 	case *types.Pointer:
 		// Ugly, but dealing with an edge case: a known pointer to an invalid type,
 		// probably something from a failed import.
-		if typ.Elem().String() == "invalid type" {
-			if false {
-				pass.Reportf(arg.Pos(), "printf argument %v is pointer to invalid or unknown type", analysisutil.Format(pass.Fset, arg))
-			}
+		if typ.Elem() == types.Typ[types.Invalid] {
 			return true // special case
 		}
 		// If it's actually a pointer with %p, it prints as one.
-		if t == argPointer {
+		if m.t == argPointer {
 			return true
 		}
 
+		if typeparams.IsTypeParam(typ.Elem()) {
+			return true // We don't know whether the logic below applies. Give up.
+		}
+
 		under := typ.Elem().Underlying()
 		switch under.(type) {
 		case *types.Struct: // see below
@@ -118,19 +190,31 @@
 		case *types.Map: // see below
 		default:
 			// Check whether the rest can print pointers.
-			return t&argPointer != 0
+			return m.t&argPointer != 0
 		}
-		// If it's a top-level pointer to a struct, array, slice, or
+		// If it's a top-level pointer to a struct, array, slice, type param, or
 		// map, that's equivalent in our analysis to whether we can
 		// print the type being pointed to. Pointers in nested levels
 		// are not supported to minimize fmt running into loops.
-		if len(inProgress) > 1 {
+		if !topLevel {
 			return false
 		}
-		return matchArgTypeInternal(pass, t, under, arg, inProgress)
+		return m.match(under, false)
 
 	case *types.Struct:
-		return matchStructArgType(pass, t, typ, arg, inProgress)
+		// report whether all the elements of the struct match the expected type. For
+		// instance, with "%d" all the elements must be printable with the "%d" format.
+		for i := 0; i < typ.NumFields(); i++ {
+			typf := typ.Field(i)
+			if !m.match(typf.Type(), false) {
+				return false
+			}
+			if m.t&argString != 0 && !typf.Exported() && isConvertibleToString(typf.Type()) {
+				// Issue #17798: unexported Stringer or error cannot be properly formatted.
+				return false
+			}
+		}
+		return true
 
 	case *types.Interface:
 		// There's little we can do.
@@ -142,7 +226,7 @@
 		switch typ.Kind() {
 		case types.UntypedBool,
 			types.Bool:
-			return t&argBool != 0
+			return m.t&argBool != 0
 
 		case types.UntypedInt,
 			types.Int,
@@ -156,35 +240,32 @@
 			types.Uint32,
 			types.Uint64,
 			types.Uintptr:
-			return t&argInt != 0
+			return m.t&argInt != 0
 
 		case types.UntypedFloat,
 			types.Float32,
 			types.Float64:
-			return t&argFloat != 0
+			return m.t&argFloat != 0
 
 		case types.UntypedComplex,
 			types.Complex64,
 			types.Complex128:
-			return t&argComplex != 0
+			return m.t&argComplex != 0
 
 		case types.UntypedString,
 			types.String:
-			return t&argString != 0
+			return m.t&argString != 0
 
 		case types.UnsafePointer:
-			return t&(argPointer|argInt) != 0
+			return m.t&(argPointer|argInt) != 0
 
 		case types.UntypedRune:
-			return t&(argInt|argRune) != 0
+			return m.t&(argInt|argRune) != 0
 
 		case types.UntypedNil:
 			return false
 
 		case types.Invalid:
-			if false {
-				pass.Reportf(arg.Pos(), "printf argument %v has invalid or unknown type", analysisutil.Format(pass.Fset, arg))
-			}
 			return true // Probably a type check problem.
 		}
 		panic("unreachable")
@@ -193,7 +274,7 @@
 	return false
 }
 
-func isConvertibleToString(pass *analysis.Pass, typ types.Type) bool {
+func isConvertibleToString(typ types.Type) bool {
 	if bt, ok := typ.(*types.Basic); ok && bt.Kind() == types.UntypedNil {
 		// We explicitly don't want untyped nil, which is
 		// convertible to both of the interfaces below, as it
@@ -228,19 +309,3 @@
 	b, ok := t.(*types.Basic)
 	return ok && b.Kind() == kind
 }
-
-// matchStructArgType reports whether all the elements of the struct match the expected
-// type. For instance, with "%d" all the elements must be printable with the "%d" format.
-func matchStructArgType(pass *analysis.Pass, t printfArgType, typ *types.Struct, arg ast.Expr, inProgress map[types.Type]bool) bool {
-	for i := 0; i < typ.NumFields(); i++ {
-		typf := typ.Field(i)
-		if !matchArgTypeInternal(pass, t, typf.Type(), arg, inProgress) {
-			return false
-		}
-		if t&argString != 0 && !typf.Exported() && isConvertibleToString(pass, typf.Type()) {
-			// Issue #17798: unexported Stringer or error cannot be properly formatted.
-			return false
-		}
-	}
-	return true
-}
diff --git a/go/analysis/passes/shift/shift.go b/go/analysis/passes/shift/shift.go
index 1f3df07..e968f27 100644
--- a/go/analysis/passes/shift/shift.go
+++ b/go/analysis/passes/shift/shift.go
@@ -14,11 +14,14 @@
 	"go/ast"
 	"go/constant"
 	"go/token"
+	"go/types"
+	"math"
 
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/analysis/passes/inspect"
 	"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
 	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 const Doc = "check for shifts that equal or exceed the width of the integer"
@@ -93,9 +96,36 @@
 	if t == nil {
 		return
 	}
-	size := 8 * pass.TypesSizes.Sizeof(t)
-	if amt >= size {
+	var structuralTypes []types.Type
+	switch t := t.(type) {
+	case *typeparams.TypeParam:
+		terms, err := typeparams.StructuralTerms(t)
+		if err != nil {
+			return // invalid type
+		}
+		for _, term := range terms {
+			structuralTypes = append(structuralTypes, term.Type())
+		}
+	default:
+		structuralTypes = append(structuralTypes, t)
+	}
+	sizes := make(map[int64]struct{})
+	for _, t := range structuralTypes {
+		size := 8 * pass.TypesSizes.Sizeof(t)
+		sizes[size] = struct{}{}
+	}
+	minSize := int64(math.MaxInt64)
+	for size := range sizes {
+		if size < minSize {
+			minSize = size
+		}
+	}
+	if amt >= minSize {
 		ident := analysisutil.Format(pass.Fset, x)
-		pass.ReportRangef(node, "%s (%d bits) too small for shift of %d", ident, size, amt)
+		qualifier := ""
+		if len(sizes) > 1 {
+			qualifier = "may be "
+		}
+		pass.ReportRangef(node, "%s (%s%d bits) too small for shift of %d", ident, qualifier, minSize, amt)
 	}
 }
diff --git a/go/analysis/passes/shift/shift_test.go b/go/analysis/passes/shift/shift_test.go
index 8b41b60..e60943e 100644
--- a/go/analysis/passes/shift/shift_test.go
+++ b/go/analysis/passes/shift/shift_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/shift"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, shift.Analyzer, "a")
+	pkgs := []string{"a"}
+	if typeparams.Enabled {
+		pkgs = append(pkgs, "typeparams")
+	}
+	analysistest.Run(t, testdata, shift.Analyzer, pkgs...)
 }
diff --git a/go/analysis/passes/shift/testdata/src/typeparams/typeparams.go b/go/analysis/passes/shift/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..a76df88
--- /dev/null
+++ b/go/analysis/passes/shift/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,32 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import "unsafe"
+
+func GenericShiftTest[DifferentSize ~int8|int16|int64, SameSize int8|byte]() {
+	var d DifferentSize
+	_ = d << 7
+	_ = d << 8        // want "d .may be 8 bits. too small for shift of 8"
+	_ = d << 15       // want "d .may be 8 bits. too small for shift of 15"
+	_ = (d + 1) << 8  // want ".d . 1. .may be 8 bits. too small for shift of 8"
+	_ = (d + 1) << 16 // want ".d . 1. .may be 8 bits. too small for shift of 16"
+	_ = d << (7 + 1)  // want "d .may be 8 bits. too small for shift of 8"
+	_ = d >> 8        // want "d .may be 8 bits. too small for shift of 8"
+	d <<= 8           // want "d .may be 8 bits. too small for shift of 8"
+	d >>= 8           // want "d .may be 8 bits. too small for shift of 8"
+
+	// go/types does not compute constant sizes for type parameters, so we do not
+	// report a diagnostic here.
+	_ = d << (8 * DifferentSize(unsafe.Sizeof(d)))
+
+	var s SameSize
+	_ = s << 7
+	_ = s << 8        // want "s .8 bits. too small for shift of 8"
+	_ = s << (7 + 1)  // want "s .8 bits. too small for shift of 8"
+	_ = s >> 8        // want "s .8 bits. too small for shift of 8"
+	s <<= 8           // want "s .8 bits. too small for shift of 8"
+	s >>= 8           // want "s .8 bits. too small for shift of 8"
+}
diff --git a/go/analysis/passes/sortslice/analyzer.go b/go/analysis/passes/sortslice/analyzer.go
index 69a6793..5eb957a 100644
--- a/go/analysis/passes/sortslice/analyzer.go
+++ b/go/analysis/passes/sortslice/analyzer.go
@@ -45,7 +45,8 @@
 			return
 		}
 
-		if fn.FullName() != "sort.Slice" {
+		fnName := fn.FullName()
+		if fnName != "sort.Slice" && fnName != "sort.SliceStable" && fnName != "sort.SliceIsSorted" {
 			return
 		}
 
@@ -115,7 +116,7 @@
 		pass.Report(analysis.Diagnostic{
 			Pos:            call.Pos(),
 			End:            call.End(),
-			Message:        fmt.Sprintf("sort.Slice's argument must be a slice; is called with %s", typ.String()),
+			Message:        fmt.Sprintf("%s's argument must be a slice; is called with %s", fnName, typ.String()),
 			SuggestedFixes: fixes,
 		})
 	})
diff --git a/go/analysis/passes/sortslice/testdata/src/a/a.go b/go/analysis/passes/sortslice/testdata/src/a/a.go
index 3403660..bc6cc16 100644
--- a/go/analysis/passes/sortslice/testdata/src/a/a.go
+++ b/go/analysis/passes/sortslice/testdata/src/a/a.go
@@ -7,6 +7,8 @@
 	i := 5
 	sortFn := func(i, j int) bool { return false }
 	sort.Slice(i, sortFn) // want "sort.Slice's argument must be a slice; is called with int"
+	sort.SliceStable(i, sortFn) // want "sort.SliceStable's argument must be a slice; is called with int"
+	sort.SliceIsSorted(i, sortFn) // want "sort.SliceIsSorted's argument must be a slice; is called with int"
 }
 
 // CorrectSort sorts integers. It should not produce a diagnostic.
@@ -14,6 +16,8 @@
 	s := []int{2, 3, 5, 6}
 	sortFn := func(i, j int) bool { return s[i] < s[j] }
 	sort.Slice(s, sortFn)
+	sort.SliceStable(s, sortFn)
+	sort.SliceIsSorted(s, sortFn)
 }
 
 // CorrectInterface sorts an interface with a slice
@@ -23,6 +27,8 @@
 	s = interface{}([]int{2, 1, 0})
 	sortFn := func(i, j int) bool { return s.([]int)[i] < s.([]int)[j] }
 	sort.Slice(s, sortFn)
+	sort.SliceStable(s, sortFn)
+	sort.SliceIsSorted(s, sortFn)
 }
 
 type slicecompare interface {
@@ -41,6 +47,8 @@
 	var s slicecompare
 	s = intslice([]int{2, 1, 0})
 	sort.Slice(s, s.compare)
+	sort.SliceStable(s, s.compare)
+	sort.SliceIsSorted(s, s.compare)
 }
 
 type mySlice []int
@@ -51,4 +59,6 @@
 	s := mySlice{2, 3, 5, 6}
 	sortFn := func(i, j int) bool { return s[i] < s[j] }
 	sort.Slice(s, sortFn)
+	sort.SliceStable(s, sortFn)
+	sort.SliceIsSorted(s, sortFn)
 }
diff --git a/go/analysis/passes/stdmethods/stdmethods.go b/go/analysis/passes/stdmethods/stdmethods.go
index 64a28ac..cc94971 100644
--- a/go/analysis/passes/stdmethods/stdmethods.go
+++ b/go/analysis/passes/stdmethods/stdmethods.go
@@ -61,7 +61,7 @@
 // we let it go. But if it does have a fmt.ScanState, then the
 // rest has to match.
 var canonicalMethods = map[string]struct{ args, results []string }{
-	"As": {[]string{"interface{}"}, []string{"bool"}}, // errors.As
+	"As": {[]string{"any"}, []string{"bool"}}, // errors.As
 	// "Flush": {{}, {"error"}}, // http.Flusher and jpeg.writer conflict
 	"Format":        {[]string{"=fmt.State", "rune"}, []string{}},                      // fmt.Formatter
 	"GobDecode":     {[]string{"[]byte"}, []string{"error"}},                           // gob.GobDecoder
@@ -194,7 +194,9 @@
 func matchParamType(expect string, actual types.Type) bool {
 	expect = strings.TrimPrefix(expect, "=")
 	// Overkill but easy.
-	return typeString(actual) == expect
+	t := typeString(actual)
+	return t == expect ||
+		(t == "any" || t == "interface{}") && (expect == "any" || expect == "interface{}")
 }
 
 var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
diff --git a/go/analysis/passes/stdmethods/stdmethods_test.go b/go/analysis/passes/stdmethods/stdmethods_test.go
index 60b1a53..9bfa032 100644
--- a/go/analysis/passes/stdmethods/stdmethods_test.go
+++ b/go/analysis/passes/stdmethods/stdmethods_test.go
@@ -9,11 +9,16 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/stdmethods"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, stdmethods.Analyzer, "a")
+	pkgs := []string{"a"}
+	if typeparams.Enabled {
+		pkgs = append(pkgs, "typeparams")
+	}
+	analysistest.Run(t, testdata, stdmethods.Analyzer, pkgs...)
 }
 
 func TestAnalyzeEncodingXML(t *testing.T) {
diff --git a/go/analysis/passes/stdmethods/testdata/src/a/a.go b/go/analysis/passes/stdmethods/testdata/src/a/a.go
index 7f9e9ae..c95cf5d 100644
--- a/go/analysis/passes/stdmethods/testdata/src/a/a.go
+++ b/go/analysis/passes/stdmethods/testdata/src/a/a.go
@@ -47,7 +47,7 @@
 
 func (E) Error() string { return "" } // E implements error.
 
-func (E) As()     {} // want `method As\(\) should have signature As\(interface{}\) bool`
+func (E) As()     {} // want `method As\(\) should have signature As\((any|interface\{\})\) bool`
 func (E) Is()     {} // want `method Is\(\) should have signature Is\(error\) bool`
 func (E) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error`
 
@@ -55,6 +55,10 @@
 
 func (F) Error() string { return "" } // Both F and *F implement error.
 
-func (*F) As()     {} // want `method As\(\) should have signature As\(interface{}\) bool`
+func (*F) As()     {} // want `method As\(\) should have signature As\((any|interface\{\})\) bool`
 func (*F) Is()     {} // want `method Is\(\) should have signature Is\(error\) bool`
 func (*F) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error`
+
+type G int
+
+func (G) As(interface{}) bool // ok
diff --git a/go/analysis/passes/stdmethods/testdata/src/a/b.go b/go/analysis/passes/stdmethods/testdata/src/a/b.go
new file mode 100644
index 0000000..c0a16fb
--- /dev/null
+++ b/go/analysis/passes/stdmethods/testdata/src/a/b.go
@@ -0,0 +1,12 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package a
+
+type H int
+
+func (H) As(any) bool // ok
diff --git a/go/analysis/passes/stdmethods/testdata/src/typeparams/typeparams.go b/go/analysis/passes/stdmethods/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..72df30d
--- /dev/null
+++ b/go/analysis/passes/stdmethods/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,41 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import "fmt"
+
+type T[P any] int
+
+func (T[_]) Scan(x fmt.ScanState, c byte) {} // want `should have signature Scan\(fmt\.ScanState, rune\) error`
+
+func (T[_]) Format(fmt.State, byte) {} // want `should have signature Format\(fmt.State, rune\)`
+
+type U[P any] int
+
+func (U[_]) Format(byte) {} // no error: first parameter must be fmt.State to trigger check
+
+func (U[P]) GobDecode(P) {} // want `should have signature GobDecode\(\[\]byte\) error`
+
+type V[P any] int // V does not implement error.
+
+func (V[_]) As() T[int]  { return 0 }     // ok - V is not an error
+func (V[_]) Is() bool    { return false } // ok - V is not an error
+func (V[_]) Unwrap() int { return 0 }     // ok - V is not an error
+
+type E[P any] int
+
+func (E[_]) Error() string { return "" } // E implements error.
+
+func (E[P]) As()     {} // want `method As\(\) should have signature As\((any|interface\{\})\) bool`
+func (E[_]) Is()     {} // want `method Is\(\) should have signature Is\(error\) bool`
+func (E[_]) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error`
+
+type F[P any] int
+
+func (F[_]) Error() string { return "" } // Both F and *F implement error.
+
+func (*F[_]) As()     {} // want `method As\(\) should have signature As\((any|interface\{\})\) bool`
+func (*F[_]) Is()     {} // want `method Is\(\) should have signature Is\(error\) bool`
+func (*F[_]) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error`
diff --git a/go/analysis/passes/stringintconv/string.go b/go/analysis/passes/stringintconv/string.go
index 7a00590..e41de80 100644
--- a/go/analysis/passes/stringintconv/string.go
+++ b/go/analysis/passes/stringintconv/string.go
@@ -10,10 +10,12 @@
 	"fmt"
 	"go/ast"
 	"go/types"
+	"strings"
 
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/analysis/passes/inspect"
 	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 const Doc = `check for string(int) conversions
@@ -36,6 +38,35 @@
 	Run:      run,
 }
 
+// describe returns a string describing the type typ contained within the type
+// set of inType. If non-empty, inName is used as the name of inType (this is
+// necessary so that we can use alias type names that may not be reachable from
+// inType itself).
+func describe(typ, inType types.Type, inName string) string {
+	name := inName
+	if typ != inType {
+		name = typeName(typ)
+	}
+	if name == "" {
+		return ""
+	}
+
+	var parentheticals []string
+	if underName := typeName(typ.Underlying()); underName != "" && underName != name {
+		parentheticals = append(parentheticals, underName)
+	}
+
+	if typ != inType && inName != "" && inName != name {
+		parentheticals = append(parentheticals, "in "+inName)
+	}
+
+	if len(parentheticals) > 0 {
+		name += " (" + strings.Join(parentheticals, ", ") + ")"
+	}
+
+	return name
+}
+
 func typeName(typ types.Type) string {
 	if v, _ := typ.(interface{ Name() string }); v != nil {
 		return v.Name()
@@ -54,6 +85,11 @@
 	inspect.Preorder(nodeFilter, func(n ast.Node) {
 		call := n.(*ast.CallExpr)
 
+		if len(call.Args) != 1 {
+			return
+		}
+		arg := call.Args[0]
+
 		// Retrieve target type name.
 		var tname *types.TypeName
 		switch fun := call.Fun.(type) {
@@ -65,62 +101,119 @@
 		if tname == nil {
 			return
 		}
-		target := tname.Name()
 
-		// Check that target type T in T(v) has an underlying type of string.
-		T, _ := tname.Type().Underlying().(*types.Basic)
-		if T == nil || T.Kind() != types.String {
-			return
-		}
-		if s := T.Name(); target != s {
-			target += " (" + s + ")"
+		// In the conversion T(v) of a value v of type V to a target type T, we
+		// look for types T0 in the type set of T and V0 in the type set of V, such
+		// that V0->T0 is a problematic conversion. If T and V are not type
+		// parameters, this amounts to just checking if V->T is a problematic
+		// conversion.
+
+		// First, find a type T0 in T that has an underlying type of string.
+		T := tname.Type()
+		ttypes, err := structuralTypes(T)
+		if err != nil {
+			return // invalid type
 		}
 
-		// Check that type V of v has an underlying integral type that is not byte or rune.
-		if len(call.Args) != 1 {
-			return
+		var T0 types.Type // string type in the type set of T
+
+		for _, tt := range ttypes {
+			u, _ := tt.Underlying().(*types.Basic)
+			if u != nil && u.Kind() == types.String {
+				T0 = tt
+				break
+			}
 		}
-		v := call.Args[0]
-		vtyp := pass.TypesInfo.TypeOf(v)
-		V, _ := vtyp.Underlying().(*types.Basic)
-		if V == nil || V.Info()&types.IsInteger == 0 {
-			return
-		}
-		switch V.Kind() {
-		case types.Byte, types.Rune, types.UntypedRune:
+
+		if T0 == nil {
+			// No target types have an underlying type of string.
 			return
 		}
 
-		// Retrieve source type name.
-		source := typeName(vtyp)
-		if source == "" {
+		// Next, find a type V0 in V that has an underlying integral type that is
+		// not byte or rune.
+		V := pass.TypesInfo.TypeOf(arg)
+		vtypes, err := structuralTypes(V)
+		if err != nil {
+			return // invalid type
+		}
+
+		var V0 types.Type // integral type in the type set of V
+
+		for _, vt := range vtypes {
+			u, _ := vt.Underlying().(*types.Basic)
+			if u != nil && u.Info()&types.IsInteger != 0 {
+				switch u.Kind() {
+				case types.Byte, types.Rune, types.UntypedRune:
+					continue
+				}
+				V0 = vt
+				break
+			}
+		}
+
+		if V0 == nil {
+			// No source types are non-byte or rune integer types.
 			return
 		}
-		if s := V.Name(); source != s {
-			source += " (" + s + ")"
+
+		convertibleToRune := true // if true, we can suggest a fix
+		for _, t := range vtypes {
+			if !types.ConvertibleTo(t, types.Typ[types.Rune]) {
+				convertibleToRune = false
+				break
+			}
 		}
+
+		target := describe(T0, T, tname.Name())
+		source := describe(V0, V, typeName(V))
+
+		if target == "" || source == "" {
+			return // something went wrong
+		}
+
 		diag := analysis.Diagnostic{
 			Pos:     n.Pos(),
 			Message: fmt.Sprintf("conversion from %s to %s yields a string of one rune, not a string of digits (did you mean fmt.Sprint(x)?)", source, target),
-			SuggestedFixes: []analysis.SuggestedFix{
+		}
+
+		if convertibleToRune {
+			diag.SuggestedFixes = []analysis.SuggestedFix{
 				{
 					Message: "Did you mean to convert a rune to a string?",
 					TextEdits: []analysis.TextEdit{
 						{
-							Pos:     v.Pos(),
-							End:     v.Pos(),
+							Pos:     arg.Pos(),
+							End:     arg.Pos(),
 							NewText: []byte("rune("),
 						},
 						{
-							Pos:     v.End(),
-							End:     v.End(),
+							Pos:     arg.End(),
+							End:     arg.End(),
 							NewText: []byte(")"),
 						},
 					},
 				},
-			},
+			}
 		}
 		pass.Report(diag)
 	})
 	return nil, nil
 }
+
+func structuralTypes(t types.Type) ([]types.Type, error) {
+	var structuralTypes []types.Type
+	switch t := t.(type) {
+	case *typeparams.TypeParam:
+		terms, err := typeparams.StructuralTerms(t)
+		if err != nil {
+			return nil, err
+		}
+		for _, term := range terms {
+			structuralTypes = append(structuralTypes, term.Type())
+		}
+	default:
+		structuralTypes = append(structuralTypes, t)
+	}
+	return structuralTypes, nil
+}
diff --git a/go/analysis/passes/stringintconv/string_test.go b/go/analysis/passes/stringintconv/string_test.go
index 8dc4cb9..13f2133 100644
--- a/go/analysis/passes/stringintconv/string_test.go
+++ b/go/analysis/passes/stringintconv/string_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/stringintconv"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.RunWithSuggestedFixes(t, testdata, stringintconv.Analyzer, "a")
+	pkgs := []string{"a"}
+	if typeparams.Enabled {
+		pkgs = append(pkgs, "typeparams")
+	}
+	analysistest.RunWithSuggestedFixes(t, testdata, stringintconv.Analyzer, pkgs...)
 }
diff --git a/go/analysis/passes/stringintconv/testdata/src/typeparams/typeparams.go b/go/analysis/passes/stringintconv/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..b50aa3f
--- /dev/null
+++ b/go/analysis/passes/stringintconv/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,49 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+type (
+	Int     int
+	Uintptr = uintptr
+	String  string
+)
+
+func _[AllString ~string, MaybeString ~string | ~int, NotString ~int | byte, NamedString String | Int]() {
+	var (
+		i int
+		r rune
+		b byte
+		I Int
+		U uintptr
+		M MaybeString
+		N NotString
+	)
+	const p = 0
+
+	_ = MaybeString(i) // want `conversion from int to string .in MaybeString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	_ = MaybeString(r)
+	_ = MaybeString(b)
+	_ = MaybeString(I) // want `conversion from Int .int. to string .in MaybeString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	_ = MaybeString(U) // want `conversion from uintptr to string .in MaybeString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	// Type parameters are never constant types, so arguments are always
+	// converted to their default type (int versus untyped int, in this case)
+	_ = MaybeString(p) // want `conversion from int to string .in MaybeString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	// ...even if the type parameter is only strings.
+	_ = AllString(p) // want `conversion from int to string .in AllString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+
+	_ = NotString(i)
+	_ = NotString(r)
+	_ = NotString(b)
+	_ = NotString(I)
+	_ = NotString(U)
+	_ = NotString(p)
+
+	_ = NamedString(i) // want `conversion from int to String .string, in NamedString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	_ = string(M)      // want `conversion from int .in MaybeString. to string yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+
+	// Note that M is not convertible to rune.
+	_ = MaybeString(M) // want `conversion from int .in MaybeString. to string .in MaybeString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	_ = NotString(N)   // ok
+}
diff --git a/go/analysis/passes/stringintconv/testdata/src/typeparams/typeparams.go.golden b/go/analysis/passes/stringintconv/testdata/src/typeparams/typeparams.go.golden
new file mode 100644
index 0000000..8a78530
--- /dev/null
+++ b/go/analysis/passes/stringintconv/testdata/src/typeparams/typeparams.go.golden
@@ -0,0 +1,49 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+type (
+	Int     int
+	Uintptr = uintptr
+	String  string
+)
+
+func _[AllString ~string, MaybeString ~string | ~int, NotString ~int | byte, NamedString String | Int]() {
+	var (
+		i int
+		r rune
+		b byte
+		I Int
+		U uintptr
+		M MaybeString
+		N NotString
+	)
+	const p = 0
+
+	_ = MaybeString(rune(i)) // want `conversion from int to string .in MaybeString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	_ = MaybeString(r)
+	_ = MaybeString(b)
+	_ = MaybeString(rune(I)) // want `conversion from Int .int. to string .in MaybeString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	_ = MaybeString(rune(U)) // want `conversion from uintptr to string .in MaybeString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	// Type parameters are never constant types, so arguments are always
+	// converted to their default type (int versus untyped int, in this case)
+	_ = MaybeString(rune(p)) // want `conversion from int to string .in MaybeString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	// ...even if the type parameter is only strings.
+	_ = AllString(rune(p)) // want `conversion from int to string .in AllString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+
+	_ = NotString(i)
+	_ = NotString(r)
+	_ = NotString(b)
+	_ = NotString(I)
+	_ = NotString(U)
+	_ = NotString(p)
+
+	_ = NamedString(rune(i)) // want `conversion from int to String .string, in NamedString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	_ = string(M)            // want `conversion from int .in MaybeString. to string yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+
+	// Note that M is not convertible to rune.
+	_ = MaybeString(M) // want `conversion from int .in MaybeString. to string .in MaybeString. yields a string of one rune, not a string of digits .did you mean fmt\.Sprint.x.\?.`
+	_ = NotString(N)   // ok
+}
diff --git a/go/analysis/passes/structtag/testdata/src/a/a.go b/go/analysis/passes/structtag/testdata/src/a/a.go
index 8b1cea1..f9b035a 100644
--- a/go/analysis/passes/structtag/testdata/src/a/a.go
+++ b/go/analysis/passes/structtag/testdata/src/a/a.go
@@ -124,7 +124,7 @@
 	Q int `foo:" doesn't care "`
 }
 
-// Nested fiels can be shadowed by fields further up. For example,
+// Nested fields can be shadowed by fields further up. For example,
 // ShadowingAnonJSON replaces the json:"a" field in AnonymousJSONField.
 // However, if the two conflicting fields appear at the same level like in
 // DuplicateWithAnotherPackage, we should error.
diff --git a/go/analysis/passes/testinggoroutine/testdata/src/a/a.go b/go/analysis/passes/testinggoroutine/testdata/src/a/a.go
index c211ec3..c8fc91b 100644
--- a/go/analysis/passes/testinggoroutine/testdata/src/a/a.go
+++ b/go/analysis/passes/testinggoroutine/testdata/src/a/a.go
@@ -53,6 +53,8 @@
 	t.Fatal("TestFailed")
 }
 
+func g() {}
+
 func TestBadFatalIssue47470(t *testing.T) {
 	go f(t, "failed test 1") // want "call to .+T.+Fatal from a non-test goroutine"
 
@@ -272,3 +274,7 @@
 		}(i)
 	}
 }
+
+func TestIssue48124(t *testing.T) {
+	go h()
+}
diff --git a/go/analysis/passes/testinggoroutine/testdata/src/a/b.go b/go/analysis/passes/testinggoroutine/testdata/src/a/b.go
new file mode 100644
index 0000000..5e95177
--- /dev/null
+++ b/go/analysis/passes/testinggoroutine/testdata/src/a/b.go
@@ -0,0 +1,7 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package a
+
+func h() {}
diff --git a/go/analysis/passes/testinggoroutine/testdata/src/typeparams/typeparams.go b/go/analysis/passes/testinggoroutine/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..47e389f
--- /dev/null
+++ b/go/analysis/passes/testinggoroutine/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,17 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import (
+	"testing"
+)
+
+func f[P any](t *testing.T) {
+	t.Fatal("failed")
+}
+
+func TestBadFatalf[P any](t *testing.T) {
+	go f[int](t) // want "call to .+T.+Fatal from a non-test goroutine"
+}
diff --git a/go/analysis/passes/testinggoroutine/testinggoroutine.go b/go/analysis/passes/testinggoroutine/testinggoroutine.go
index 800bef5..7ea8f77 100644
--- a/go/analysis/passes/testinggoroutine/testinggoroutine.go
+++ b/go/analysis/passes/testinggoroutine/testinggoroutine.go
@@ -11,6 +11,7 @@
 	"golang.org/x/tools/go/analysis/passes/inspect"
 	"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
 	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 const Doc = `report calls to (*testing.T).Fatal from goroutines started by a test.
@@ -124,12 +125,27 @@
 // function literals declared in the same function, and
 // static calls within the same package are supported.
 func goStmtFun(goStmt *ast.GoStmt) ast.Node {
-	switch goStmt.Call.Fun.(type) {
-	case *ast.Ident:
-		id := goStmt.Call.Fun.(*ast.Ident)
+	switch fun := goStmt.Call.Fun.(type) {
+	case *ast.IndexExpr, *typeparams.IndexListExpr:
+		x, _, _, _ := typeparams.UnpackIndexExpr(fun)
+		id, _ := x.(*ast.Ident)
+		if id == nil {
+			break
+		}
+		if id.Obj == nil {
+			break
+		}
 		if funDecl, ok := id.Obj.Decl.(ast.Node); ok {
 			return funDecl
 		}
+	case *ast.Ident:
+		// TODO(cuonglm): improve this once golang/go#48141 resolved.
+		if fun.Obj == nil {
+			break
+		}
+		if funDecl, ok := fun.Obj.Decl.(ast.Node); ok {
+			return funDecl
+		}
 	case *ast.FuncLit:
 		return goStmt.Call.Fun
 	}
diff --git a/go/analysis/passes/testinggoroutine/testinggoroutine_test.go b/go/analysis/passes/testinggoroutine/testinggoroutine_test.go
index 1a59026..56c4385 100644
--- a/go/analysis/passes/testinggoroutine/testinggoroutine_test.go
+++ b/go/analysis/passes/testinggoroutine/testinggoroutine_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/testinggoroutine"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, testinggoroutine.Analyzer, "a")
+	pkgs := []string{"a"}
+	if typeparams.Enabled {
+		pkgs = append(pkgs, "typeparams")
+	}
+	analysistest.Run(t, testdata, testinggoroutine.Analyzer, pkgs...)
 }
diff --git a/go/analysis/passes/tests/testdata/src/a/a_test.go b/go/analysis/passes/tests/testdata/src/a/a_test.go
index 67bfda7..e38184a 100644
--- a/go/analysis/passes/tests/testdata/src/a/a_test.go
+++ b/go/analysis/passes/tests/testdata/src/a/a_test.go
@@ -53,6 +53,46 @@
 
 func ExampleBar() {} // want "ExampleBar refers to unknown identifier: Bar"
 
+func Example_withOutput() {
+	// Output:
+	// meow
+} // OK because output is the last comment block
+
+func Example_withBadOutput() {
+	// Output: // want "output comment block must be the last comment block"
+	// meow
+
+	// todo: change to bark
+}
+
+func Example_withBadUnorderedOutput() {
+	// Unordered Output: // want "output comment block must be the last comment block"
+	// meow
+
+	// todo: change to bark
+}
+
+func Example_withCommentAfterFunc() {
+	// Output: // OK because it is the last comment block
+	// meow
+} // todo: change to bark
+
+func Example_withOutputCommentAfterFunc() {
+	// Output:
+	// meow
+} // Output: bark // OK because output is not inside of an example
+
+func Example_withMultipleOutputs() {
+	// Output: // want "there can only be one output comment block per example"
+	// meow
+
+	// Output: // want "there can only be one output comment block per example"
+	// bark
+
+	// Output: // OK because it is the last output comment block
+	// ribbit
+}
+
 func nonTest() {} // OK because it doesn't start with "Test".
 
 func (Buf) TesthasReceiver() {} // OK because it has a receiver.
diff --git a/go/analysis/passes/tests/testdata/src/a/go118_test.go b/go/analysis/passes/tests/testdata/src/a/go118_test.go
new file mode 100644
index 0000000..dc898da
--- /dev/null
+++ b/go/analysis/passes/tests/testdata/src/a/go118_test.go
@@ -0,0 +1,96 @@
+//go:build go1.18
+// +build go1.18
+
+package a
+
+import (
+	"testing"
+)
+
+func Fuzzfoo(*testing.F) {} // want "first letter after 'Fuzz' must not be lowercase"
+
+func FuzzBoo(*testing.F) {} // OK because first letter after 'Fuzz' is Uppercase.
+
+func FuzzCallDifferentFunc(f *testing.F) {
+	f.Name() //OK
+}
+
+func FuzzFunc(f *testing.F) {
+	f.Fuzz(func(t *testing.T) {}) // OK "first argument is of type *testing.T"
+}
+
+func FuzzFuncWithArgs(f *testing.F) {
+	f.Add()                                      // want `wrong number of values in call to \(\*testing.F\)\.Add: 0, fuzz target expects 2`
+	f.Add(1, 2, 3, 4)                            // want `wrong number of values in call to \(\*testing.F\)\.Add: 4, fuzz target expects 2`
+	f.Add(5, 5)                                  // want `mismatched type in call to \(\*testing.F\)\.Add: int, fuzz target expects \[\]byte`
+	f.Add([]byte("hello"), 5)                    // want `mismatched types in call to \(\*testing.F\)\.Add: \[\[\]byte int\], fuzz target expects \[int \[\]byte\]`
+	f.Add(5, []byte("hello"))                    // OK
+	f.Fuzz(func(t *testing.T, i int, b []byte) { // OK "arguments in func are allowed"
+		f.Add(5, []byte("hello"))     // want `fuzz target must not call any \*F methods`
+		f.Name()                      // OK "calls to (*F).Failed and (*F).Name are allowed"
+		f.Failed()                    // OK "calls to (*F).Failed and (*F).Name are allowed"
+		f.Fuzz(func(t *testing.T) {}) // want `fuzz target must not call any \*F methods`
+	})
+}
+
+func FuzzArgFunc(f *testing.F) {
+	f.Fuzz(0) // want "argument to Fuzz must be a function"
+}
+
+func FuzzFuncWithReturn(f *testing.F) {
+	f.Fuzz(func(t *testing.T) bool { return true }) // want "fuzz target must not return any value"
+}
+
+func FuzzFuncNoArg(f *testing.F) {
+	f.Fuzz(func() {}) // want "fuzz target must have 1 or more argument"
+}
+
+func FuzzFuncFirstArgNotTesting(f *testing.F) {
+	f.Fuzz(func(i int64) {}) // want "the first parameter of a fuzz target must be \\*testing.T"
+}
+
+func FuzzFuncFirstArgTestingNotT(f *testing.F) {
+	f.Fuzz(func(t *testing.F) {}) // want "the first parameter of a fuzz target must be \\*testing.T"
+}
+
+func FuzzFuncSecondArgNotAllowed(f *testing.F) {
+	f.Fuzz(func(t *testing.T, i complex64) {}) // want "fuzzing arguments can only have the following types: string, bool, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, \\[\\]byte"
+}
+
+func FuzzFuncSecondArgArrNotAllowed(f *testing.F) {
+	f.Fuzz(func(t *testing.T, i []int) {}) // want "fuzzing arguments can only have the following types: string, bool, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, \\[\\]byte"
+}
+
+func FuzzFuncConsecutiveArgNotAllowed(f *testing.F) {
+	f.Fuzz(func(t *testing.T, i, j string, k complex64) {}) // want "fuzzing arguments can only have the following types: string, bool, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, \\[\\]byte"
+}
+
+func FuzzFuncInner(f *testing.F) {
+	innerFunc := func(t *testing.T, i float32) {}
+	f.Fuzz(innerFunc) // ok
+}
+
+func FuzzArrayOfFunc(f *testing.F) {
+	var funcs = []func(t *testing.T, i int){func(t *testing.T, i int) {}}
+	f.Fuzz(funcs[0]) // ok
+}
+
+type GenericSlice[T any] []T
+
+func FuzzGenericFunc(f *testing.F) {
+	g := GenericSlice[func(t *testing.T, i int)]{func(t *testing.T, i int) {}}
+	f.Fuzz(g[0]) // ok
+}
+
+type F func(t *testing.T, i int32)
+
+type myType struct {
+	myVar F
+}
+
+func FuzzObjectMethod(f *testing.F) {
+	obj := myType{
+		myVar: func(t *testing.T, i int32) {},
+	}
+	f.Fuzz(obj.myVar) // ok
+}
diff --git a/go/analysis/passes/tests/testdata/src/typeparams/typeparams.go b/go/analysis/passes/tests/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..344a8f8
--- /dev/null
+++ b/go/analysis/passes/tests/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,10 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+func Zero[T any]() T {
+	var zero T
+	return zero
+}
diff --git a/go/analysis/passes/tests/testdata/src/typeparams/typeparams_test.go b/go/analysis/passes/tests/testdata/src/typeparams/typeparams_test.go
new file mode 100644
index 0000000..01fad75
--- /dev/null
+++ b/go/analysis/passes/tests/testdata/src/typeparams/typeparams_test.go
@@ -0,0 +1,21 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import "testing"
+
+func Test(*testing.T) {
+	_ = Zero[int]() // It is fine to use generics within tests.
+}
+
+// Note: We format {Test,Benchmark}typeParam with a 't' in "type" to avoid an error from
+// cmd/go/internal/load. That package can also give an error about Test and Benchmark
+// functions with TypeParameters. These tests may need to be updated if that logic changes.
+func TesttypeParam[T any](*testing.T)      {} // want "TesttypeParam has type parameters: it will not be run by go test as a TestXXX function" "TesttypeParam has malformed name"
+func BenchmarktypeParam[T any](*testing.B) {} // want "BenchmarktypeParam has type parameters: it will not be run by go test as a BenchmarkXXX function" "BenchmarktypeParam has malformed name"
+
+func ExampleZero[T any]() { // want "ExampleZero should not have type params"
+	print(Zero[T]())
+}
diff --git a/go/analysis/passes/tests/tests.go b/go/analysis/passes/tests/tests.go
index 8232276..ffa5205 100644
--- a/go/analysis/passes/tests/tests.go
+++ b/go/analysis/passes/tests/tests.go
@@ -7,13 +7,18 @@
 package tests
 
 import (
+	"fmt"
 	"go/ast"
+	"go/token"
 	"go/types"
+	"regexp"
 	"strings"
 	"unicode"
 	"unicode/utf8"
 
 	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/internal/analysisinternal"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 const Doc = `check for common mistaken usages of tests and examples
@@ -31,6 +36,24 @@
 	Run:  run,
 }
 
+var acceptedFuzzTypes = []types.Type{
+	types.Typ[types.String],
+	types.Typ[types.Bool],
+	types.Typ[types.Float32],
+	types.Typ[types.Float64],
+	types.Typ[types.Int],
+	types.Typ[types.Int8],
+	types.Typ[types.Int16],
+	types.Typ[types.Int32],
+	types.Typ[types.Int64],
+	types.Typ[types.Uint],
+	types.Typ[types.Uint8],
+	types.Typ[types.Uint16],
+	types.Typ[types.Uint32],
+	types.Typ[types.Uint64],
+	types.NewSlice(types.Universe.Lookup("byte").Type()),
+}
+
 func run(pass *analysis.Pass) (interface{}, error) {
 	for _, f := range pass.Files {
 		if !strings.HasSuffix(pass.Fset.File(f.Pos()).Name(), "_test.go") {
@@ -42,20 +65,230 @@
 				// Ignore non-functions or functions with receivers.
 				continue
 			}
-
 			switch {
 			case strings.HasPrefix(fn.Name.Name, "Example"):
-				checkExample(pass, fn)
+				checkExampleName(pass, fn)
+				checkExampleOutput(pass, fn, f.Comments)
 			case strings.HasPrefix(fn.Name.Name, "Test"):
 				checkTest(pass, fn, "Test")
 			case strings.HasPrefix(fn.Name.Name, "Benchmark"):
 				checkTest(pass, fn, "Benchmark")
 			}
+			// run fuzz tests diagnostics only for 1.18 i.e. when analysisinternal.DiagnoseFuzzTests is turned on.
+			if strings.HasPrefix(fn.Name.Name, "Fuzz") && analysisinternal.DiagnoseFuzzTests {
+				checkTest(pass, fn, "Fuzz")
+				checkFuzz(pass, fn)
+			}
 		}
 	}
 	return nil, nil
 }
 
+// Checks the contents of a fuzz function.
+func checkFuzz(pass *analysis.Pass, fn *ast.FuncDecl) {
+	params := checkFuzzCall(pass, fn)
+	if params != nil {
+		checkAddCalls(pass, fn, params)
+	}
+}
+
+// Check the arguments of f.Fuzz() calls :
+// 1. f.Fuzz() should call a function and it should be of type (*testing.F).Fuzz().
+// 2. The called function in f.Fuzz(func(){}) should not return result.
+// 3. First argument of func() should be of type *testing.T
+// 4. Second argument onwards should be of type []byte, string, bool, byte,
+//	  rune, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16,
+//	  uint32, uint64
+// 5. func() must not call any *F methods, e.g. (*F).Log, (*F).Error, (*F).Skip
+//    The only *F methods that are allowed in the (*F).Fuzz function are (*F).Failed and (*F).Name.
+// Returns the list of parameters to the fuzz function, if they are valid fuzz parameters.
+func checkFuzzCall(pass *analysis.Pass, fn *ast.FuncDecl) (params *types.Tuple) {
+	ast.Inspect(fn, func(n ast.Node) bool {
+		call, ok := n.(*ast.CallExpr)
+		if ok {
+			if !isFuzzTargetDotFuzz(pass, call) {
+				return true
+			}
+
+			// Only one argument (func) must be passed to (*testing.F).Fuzz.
+			if len(call.Args) != 1 {
+				return true
+			}
+			expr := call.Args[0]
+			if pass.TypesInfo.Types[expr].Type == nil {
+				return true
+			}
+			t := pass.TypesInfo.Types[expr].Type.Underlying()
+			tSign, argOk := t.(*types.Signature)
+			// Argument should be a function
+			if !argOk {
+				pass.ReportRangef(expr, "argument to Fuzz must be a function")
+				return false
+			}
+			// ff Argument function should not return
+			if tSign.Results().Len() != 0 {
+				pass.ReportRangef(expr, "fuzz target must not return any value")
+			}
+			// ff Argument function should have 1 or more argument
+			if tSign.Params().Len() == 0 {
+				pass.ReportRangef(expr, "fuzz target must have 1 or more argument")
+				return false
+			}
+			ok := validateFuzzArgs(pass, tSign.Params(), expr)
+			if ok && params == nil {
+				params = tSign.Params()
+			}
+			// Inspect the function that was passed as an argument to make sure that
+			// there are no calls to *F methods, except for Name and Failed.
+			ast.Inspect(expr, func(n ast.Node) bool {
+				if call, ok := n.(*ast.CallExpr); ok {
+					if !isFuzzTargetDot(pass, call, "") {
+						return true
+					}
+					if !isFuzzTargetDot(pass, call, "Name") && !isFuzzTargetDot(pass, call, "Failed") {
+						pass.ReportRangef(call, "fuzz target must not call any *F methods")
+					}
+				}
+				return true
+			})
+			// We do not need to look at any calls to f.Fuzz inside of a Fuzz call,
+			// since they are not allowed.
+			return false
+		}
+		return true
+	})
+	return params
+}
+
+// Check that the arguments of f.Add() calls have the same number and type of arguments as
+// the signature of the function passed to (*testing.F).Fuzz
+func checkAddCalls(pass *analysis.Pass, fn *ast.FuncDecl, params *types.Tuple) {
+	ast.Inspect(fn, func(n ast.Node) bool {
+		call, ok := n.(*ast.CallExpr)
+		if ok {
+			if !isFuzzTargetDotAdd(pass, call) {
+				return true
+			}
+
+			// The first argument to function passed to (*testing.F).Fuzz is (*testing.T).
+			if len(call.Args) != params.Len()-1 {
+				pass.ReportRangef(call, "wrong number of values in call to (*testing.F).Add: %d, fuzz target expects %d", len(call.Args), params.Len()-1)
+				return true
+			}
+			var mismatched []int
+			for i, expr := range call.Args {
+				if pass.TypesInfo.Types[expr].Type == nil {
+					return true
+				}
+				t := pass.TypesInfo.Types[expr].Type
+				if !types.Identical(t, params.At(i+1).Type()) {
+					mismatched = append(mismatched, i)
+				}
+			}
+			// If just one of the types is mismatched report for that
+			// type only. Otherwise report for the whole call to (*testing.F).Add
+			if len(mismatched) == 1 {
+				i := mismatched[0]
+				expr := call.Args[i]
+				t := pass.TypesInfo.Types[expr].Type
+				pass.ReportRangef(expr, fmt.Sprintf("mismatched type in call to (*testing.F).Add: %v, fuzz target expects %v", t, params.At(i+1).Type()))
+			} else if len(mismatched) > 1 {
+				var gotArgs, wantArgs []types.Type
+				for i := 0; i < len(call.Args); i++ {
+					gotArgs, wantArgs = append(gotArgs, pass.TypesInfo.Types[call.Args[i]].Type), append(wantArgs, params.At(i+1).Type())
+				}
+				pass.ReportRangef(call, fmt.Sprintf("mismatched types in call to (*testing.F).Add: %v, fuzz target expects %v", gotArgs, wantArgs))
+			}
+		}
+		return true
+	})
+}
+
+// isFuzzTargetDotFuzz reports whether call is (*testing.F).Fuzz().
+func isFuzzTargetDotFuzz(pass *analysis.Pass, call *ast.CallExpr) bool {
+	return isFuzzTargetDot(pass, call, "Fuzz")
+}
+
+// isFuzzTargetDotAdd reports whether call is (*testing.F).Add().
+func isFuzzTargetDotAdd(pass *analysis.Pass, call *ast.CallExpr) bool {
+	return isFuzzTargetDot(pass, call, "Add")
+}
+
+// isFuzzTargetDot reports whether call is (*testing.F).<name>().
+func isFuzzTargetDot(pass *analysis.Pass, call *ast.CallExpr, name string) bool {
+	if selExpr, ok := call.Fun.(*ast.SelectorExpr); ok {
+		if !isTestingType(pass.TypesInfo.Types[selExpr.X].Type, "F") {
+			return false
+		}
+		if name == "" || selExpr.Sel.Name == name {
+			return true
+		}
+	}
+	return false
+}
+
+// Validate the arguments of fuzz target.
+func validateFuzzArgs(pass *analysis.Pass, params *types.Tuple, expr ast.Expr) bool {
+	fLit, isFuncLit := expr.(*ast.FuncLit)
+	exprRange := expr
+	ok := true
+	if !isTestingType(params.At(0).Type(), "T") {
+		if isFuncLit {
+			exprRange = fLit.Type.Params.List[0].Type
+		}
+		pass.ReportRangef(exprRange, "the first parameter of a fuzz target must be *testing.T")
+		ok = false
+	}
+	for i := 1; i < params.Len(); i++ {
+		if !isAcceptedFuzzType(params.At(i).Type()) {
+			if isFuncLit {
+				curr := 0
+				for _, field := range fLit.Type.Params.List {
+					curr += len(field.Names)
+					if i < curr {
+						exprRange = field.Type
+						break
+					}
+				}
+			}
+			pass.ReportRangef(exprRange, "fuzzing arguments can only have the following types: "+formatAcceptedFuzzType())
+			ok = false
+		}
+	}
+	return ok
+}
+
+func isTestingType(typ types.Type, testingType string) bool {
+	ptr, ok := typ.(*types.Pointer)
+	if !ok {
+		return false
+	}
+	named, ok := ptr.Elem().(*types.Named)
+	if !ok {
+		return false
+	}
+	return named.Obj().Pkg().Path() == "testing" && named.Obj().Name() == testingType
+}
+
+// Validate that fuzz target function's arguments are of accepted types.
+func isAcceptedFuzzType(paramType types.Type) bool {
+	for _, typ := range acceptedFuzzTypes {
+		if types.Identical(typ, paramType) {
+			return true
+		}
+	}
+	return false
+}
+
+func formatAcceptedFuzzType() string {
+	var acceptedFuzzTypesStrings []string
+	for _, typ := range acceptedFuzzTypes {
+		acceptedFuzzTypesStrings = append(acceptedFuzzTypesStrings, typ.String())
+	}
+	acceptedFuzzTypesMsg := strings.Join(acceptedFuzzTypesStrings, ", ")
+	return acceptedFuzzTypesMsg
+}
+
 func isExampleSuffix(s string) bool {
 	r, size := utf8.DecodeRuneInString(s)
 	return size > 0 && unicode.IsLower(r)
@@ -108,7 +341,59 @@
 	return ret
 }
 
-func checkExample(pass *analysis.Pass, fn *ast.FuncDecl) {
+// This pattern is taken from /go/src/go/doc/example.go
+var outputRe = regexp.MustCompile(`(?i)^[[:space:]]*(unordered )?output:`)
+
+type commentMetadata struct {
+	isOutput bool
+	pos      token.Pos
+}
+
+func checkExampleOutput(pass *analysis.Pass, fn *ast.FuncDecl, fileComments []*ast.CommentGroup) {
+	commentsInExample := []commentMetadata{}
+	numOutputs := 0
+
+	// Find the comment blocks that are in the example. These comments are
+	// guaranteed to be in order of appearance.
+	for _, cg := range fileComments {
+		if cg.Pos() < fn.Pos() {
+			continue
+		} else if cg.End() > fn.End() {
+			break
+		}
+
+		isOutput := outputRe.MatchString(cg.Text())
+		if isOutput {
+			numOutputs++
+		}
+
+		commentsInExample = append(commentsInExample, commentMetadata{
+			isOutput: isOutput,
+			pos:      cg.Pos(),
+		})
+	}
+
+	// Change message based on whether there are multiple output comment blocks.
+	msg := "output comment block must be the last comment block"
+	if numOutputs > 1 {
+		msg = "there can only be one output comment block per example"
+	}
+
+	for i, cg := range commentsInExample {
+		// Check for output comments that are not the last comment in the example.
+		isLast := (i == len(commentsInExample)-1)
+		if cg.isOutput && !isLast {
+			pass.Report(
+				analysis.Diagnostic{
+					Pos:     cg.pos,
+					Message: msg,
+				},
+			)
+		}
+	}
+}
+
+func checkExampleName(pass *analysis.Pass, fn *ast.FuncDecl) {
 	fnName := fn.Name.Name
 	if params := fn.Type.Params; len(params.List) != 0 {
 		pass.Reportf(fn.Pos(), "%s should be niladic", fnName)
@@ -116,6 +401,9 @@
 	if results := fn.Type.Results; results != nil && len(results.List) != 0 {
 		pass.Reportf(fn.Pos(), "%s should return nothing", fnName)
 	}
+	if tparams := typeparams.ForFuncType(fn.Type); tparams != nil && len(tparams.List) > 0 {
+		pass.Reportf(fn.Pos(), "%s should not have type params", fnName)
+	}
 
 	if fnName == "Example" {
 		// Nothing more to do.
@@ -182,6 +470,12 @@
 		return
 	}
 
+	if tparams := typeparams.ForFuncType(fn.Type); tparams != nil && len(tparams.List) > 0 {
+		// Note: cmd/go/internal/load also errors about TestXXX and BenchmarkXXX functions with type parameters.
+		// We have currently decided to also warn before compilation/package loading. This can help users in IDEs.
+		pass.Reportf(fn.Pos(), "%s has type parameters: it will not be run by go test as a %sXXX function", fn.Name.Name, prefix)
+	}
+
 	if !isTestSuffix(fn.Name.Name[len(prefix):]) {
 		pass.Reportf(fn.Pos(), "%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix)
 	}
diff --git a/go/analysis/passes/tests/tests_test.go b/go/analysis/passes/tests/tests_test.go
index 34efbf6..b0b09dd 100644
--- a/go/analysis/passes/tests/tests_test.go
+++ b/go/analysis/passes/tests/tests_test.go
@@ -7,16 +7,29 @@
 import (
 	"testing"
 
+	"golang.org/x/tools/internal/analysisinternal"
+
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/tests"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
+	// In 1.18, diagnostic for Fuzz Tests must not be used by cmd/vet.
+	// So the code for Fuzz tests diagnostics is guarded behind flag analysisinternal.DiagnoseFuzzTests
+	// Turn on the flag DiagnoseFuzzTests for analysis tests and then turn it off.
+	analysisinternal.DiagnoseFuzzTests = true
+	defer func() {
+		analysisinternal.DiagnoseFuzzTests = false
+	}()
 	testdata := analysistest.TestData()
-
-	analysistest.Run(t, testdata, tests.Analyzer,
+	pkgs := []string{
 		"a",        // loads "a", "a [a.test]", and "a.test"
 		"b_x_test", // loads "b" and "b_x_test"
 		"divergent",
-	)
+	}
+	if typeparams.Enabled {
+		pkgs = append(pkgs, "typeparams")
+	}
+	analysistest.Run(t, testdata, tests.Analyzer, pkgs...)
 }
diff --git a/go/analysis/passes/unmarshal/testdata/src/typeparams/typeparams.go b/go/analysis/passes/unmarshal/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..cbf7deb
--- /dev/null
+++ b/go/analysis/passes/unmarshal/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,22 @@
+package typeparams
+
+import (
+	"encoding/json"
+	"fmt"
+)
+
+func unmarshalT[T any](data []byte) T {
+	var x T
+	json.Unmarshal(data, x)
+	return x
+}
+
+func unmarshalT2[T any](data []byte, t T) {
+    json.Unmarshal(data, t)
+}
+
+func main() {
+	x := make(map[string]interface{})
+	unmarshalT2([]byte(`{"a":1}`), &x)
+	fmt.Println(x)
+}
\ No newline at end of file
diff --git a/go/analysis/passes/unmarshal/unmarshal.go b/go/analysis/passes/unmarshal/unmarshal.go
index 92b37ca..5129048 100644
--- a/go/analysis/passes/unmarshal/unmarshal.go
+++ b/go/analysis/passes/unmarshal/unmarshal.go
@@ -14,6 +14,7 @@
 	"golang.org/x/tools/go/analysis/passes/inspect"
 	"golang.org/x/tools/go/ast/inspector"
 	"golang.org/x/tools/go/types/typeutil"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 const Doc = `report passing non-pointer or non-interface values to unmarshal
@@ -85,7 +86,7 @@
 
 		t := pass.TypesInfo.Types[call.Args[argidx]].Type
 		switch t.Underlying().(type) {
-		case *types.Pointer, *types.Interface:
+		case *types.Pointer, *types.Interface, *typeparams.TypeParam:
 			return
 		}
 
diff --git a/go/analysis/passes/unmarshal/unmarshal_test.go b/go/analysis/passes/unmarshal/unmarshal_test.go
index ae19e5d..e6171f2 100644
--- a/go/analysis/passes/unmarshal/unmarshal_test.go
+++ b/go/analysis/passes/unmarshal/unmarshal_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/unmarshal"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, unmarshal.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, unmarshal.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/unsafeptr/testdata/src/typeparams/typeparams.go b/go/analysis/passes/unsafeptr/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..c1e6c2d
--- /dev/null
+++ b/go/analysis/passes/unsafeptr/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,21 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import "unsafe"
+
+func _[IntPtr ~uintptr, RealPtr *T, AnyPtr uintptr | *T, T any]() {
+	var (
+		i IntPtr
+		r RealPtr
+		a AnyPtr
+	)
+	_ = unsafe.Pointer(i)          // incorrect, but not detected
+	_ = unsafe.Pointer(i + i)      // incorrect, but not detected
+	_ = unsafe.Pointer(1 + i)      // incorrect, but not detected
+	_ = unsafe.Pointer(uintptr(i)) // want "possible misuse of unsafe.Pointer"
+	_ = unsafe.Pointer(r)
+	_ = unsafe.Pointer(a) // possibly incorrect, but not detected
+}
diff --git a/go/analysis/passes/unsafeptr/unsafeptr_test.go b/go/analysis/passes/unsafeptr/unsafeptr_test.go
index 18e22c6..424de1f 100644
--- a/go/analysis/passes/unsafeptr/unsafeptr_test.go
+++ b/go/analysis/passes/unsafeptr/unsafeptr_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/unsafeptr"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, unsafeptr.Analyzer, "a")
+	pkgs := []string{"a"}
+	if typeparams.Enabled {
+		pkgs = append(pkgs, "typeparams")
+	}
+	analysistest.Run(t, testdata, unsafeptr.Analyzer, pkgs...)
 }
diff --git a/go/analysis/passes/unusedresult/testdata/src/typeparams/typeparams.go b/go/analysis/passes/unusedresult/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..c770ccd
--- /dev/null
+++ b/go/analysis/passes/unusedresult/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,41 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+//
+//go:build go1.18
+
+package typeparams
+
+import (
+	"bytes"
+	"errors"
+	"fmt"
+	"typeparams/userdefs"
+)
+
+func _[T any]() {
+	fmt.Errorf("") // want "result of fmt.Errorf call not used"
+	_ = fmt.Errorf("")
+
+	errors.New("") // want "result of errors.New call not used"
+
+	err := errors.New("")
+	err.Error() // want `result of \(error\).Error call not used`
+
+	var buf bytes.Buffer
+	buf.String() // want `result of \(bytes.Buffer\).String call not used`
+
+	fmt.Sprint("")  // want "result of fmt.Sprint call not used"
+	fmt.Sprintf("") // want "result of fmt.Sprintf call not used"
+
+	userdefs.MustUse[int](1) // want "result of typeparams/userdefs.MustUse call not used"
+	_ = userdefs.MustUse[int](2)
+
+	s := userdefs.SingleTypeParam[int]{X: 1}
+	s.String() // want `result of \(typeparams/userdefs.SingleTypeParam\[int\]\).String call not used`
+	_ = s.String()
+
+	m := userdefs.MultiTypeParam[int, string]{X: 1, Y: "one"}
+	m.String() // want `result of \(typeparams/userdefs.MultiTypeParam\[int, string\]\).String call not used`
+	_ = m.String()
+}
\ No newline at end of file
diff --git a/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs/userdefs.go b/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs/userdefs.go
new file mode 100644
index 0000000..218cc9a
--- /dev/null
+++ b/go/analysis/passes/unusedresult/testdata/src/typeparams/userdefs/userdefs.go
@@ -0,0 +1,28 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+//
+//go:build go1.18
+
+package userdefs
+
+func MustUse[T interface{ ~int }](v T) T {
+	return v + 1
+}
+
+type SingleTypeParam[T any] struct {
+	X T
+}
+
+func (_ *SingleTypeParam[T]) String() string {
+	return "SingleTypeParam"
+}
+
+type MultiTypeParam[T any, U any] struct {
+	X T
+	Y U
+}
+
+func (_ *MultiTypeParam[T, U]) String() string {
+	return "MultiTypeParam"
+}
\ No newline at end of file
diff --git a/go/analysis/passes/unusedresult/unusedresult.go b/go/analysis/passes/unusedresult/unusedresult.go
index bececee..06747ba 100644
--- a/go/analysis/passes/unusedresult/unusedresult.go
+++ b/go/analysis/passes/unusedresult/unusedresult.go
@@ -17,6 +17,7 @@
 	"golang.org/x/tools/go/analysis/passes/inspect"
 	"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
 	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // TODO(adonovan): make this analysis modular: export a mustUseResult
@@ -70,6 +71,11 @@
 			return // a conversion, not a call
 		}
 
+		x, _, _, _ := typeparams.UnpackIndexExpr(fun)
+		if x != nil {
+			fun = x // If this is generic function or method call, skip the instantiation arguments
+		}
+
 		selector, ok := fun.(*ast.SelectorExpr)
 		if !ok {
 			return // neither a method call nor a qualified ident
diff --git a/go/analysis/passes/unusedresult/unusedresult_test.go b/go/analysis/passes/unusedresult/unusedresult_test.go
index 90bf7ba..a2b079a 100644
--- a/go/analysis/passes/unusedresult/unusedresult_test.go
+++ b/go/analysis/passes/unusedresult/unusedresult_test.go
@@ -9,9 +9,16 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/go/analysis/passes/unusedresult"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, unusedresult.Analyzer, "a")
+	funcs := "typeparams/userdefs.MustUse,errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint"
+	unusedresult.Analyzer.Flags.Set("funcs", funcs)
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, unusedresult.Analyzer, tests...)
 }
diff --git a/go/analysis/passes/usesgenerics/testdata/src/a/a.go b/go/analysis/passes/usesgenerics/testdata/src/a/a.go
new file mode 100644
index 0000000..a6dd888
--- /dev/null
+++ b/go/analysis/passes/usesgenerics/testdata/src/a/a.go
@@ -0,0 +1,9 @@
+// want package:`features{typeDecl,funcDecl,funcInstance}`
+
+package a
+
+type T[P any] int
+
+func F[P any]() {}
+
+var _ = F[int]
diff --git a/go/analysis/passes/usesgenerics/testdata/src/b/b.go b/go/analysis/passes/usesgenerics/testdata/src/b/b.go
new file mode 100644
index 0000000..81c2810
--- /dev/null
+++ b/go/analysis/passes/usesgenerics/testdata/src/b/b.go
@@ -0,0 +1,7 @@
+// want package:`features{typeSet}`
+
+package b
+
+type Constraint interface {
+	~int | string
+}
diff --git a/go/analysis/passes/usesgenerics/testdata/src/c/c.go b/go/analysis/passes/usesgenerics/testdata/src/c/c.go
new file mode 100644
index 0000000..f07499e
--- /dev/null
+++ b/go/analysis/passes/usesgenerics/testdata/src/c/c.go
@@ -0,0 +1,13 @@
+// want package:`features{typeDecl,funcDecl,typeSet,typeInstance,funcInstance}`
+
+// Features funcDecl, typeSet, and funcInstance come from imported packages "a"
+// and "b". These features are not directly present in "c".
+
+package c
+
+import (
+	"a"
+	"b"
+)
+
+type T[P b.Constraint] a.T[P]
diff --git a/go/analysis/passes/usesgenerics/testdata/src/d/d.go b/go/analysis/passes/usesgenerics/testdata/src/d/d.go
new file mode 100644
index 0000000..a06c776
--- /dev/null
+++ b/go/analysis/passes/usesgenerics/testdata/src/d/d.go
@@ -0,0 +1,13 @@
+// want package:`features{typeSet}`
+
+package d
+
+type myInt int
+
+func _() {
+	// Sanity check that we can both detect local types and interfaces with
+	// embedded defined types.
+	type constraint interface {
+		myInt
+	}
+}
diff --git a/go/analysis/passes/usesgenerics/usesgenerics.go b/go/analysis/passes/usesgenerics/usesgenerics.go
new file mode 100644
index 0000000..4956e0e
--- /dev/null
+++ b/go/analysis/passes/usesgenerics/usesgenerics.go
@@ -0,0 +1,84 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package usesgenerics defines an Analyzer that checks for usage of generic
+// features added in Go 1.18.
+package usesgenerics
+
+import (
+	"reflect"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams/genericfeatures"
+)
+
+var Analyzer = &analysis.Analyzer{
+	Name:       "usesgenerics",
+	Doc:        Doc,
+	Requires:   []*analysis.Analyzer{inspect.Analyzer},
+	Run:        run,
+	ResultType: reflect.TypeOf((*Result)(nil)),
+	FactTypes:  []analysis.Fact{new(featuresFact)},
+}
+
+const Doc = `detect whether a package uses generics features
+
+The usesgenerics analysis reports whether a package directly or transitively
+uses certain features associated with generic programming in Go.`
+
+type Features = genericfeatures.Features
+
+const (
+	GenericTypeDecls  = genericfeatures.GenericTypeDecls
+	GenericFuncDecls  = genericfeatures.GenericFuncDecls
+	EmbeddedTypeSets  = genericfeatures.EmbeddedTypeSets
+	TypeInstantiation = genericfeatures.TypeInstantiation
+	FuncInstantiation = genericfeatures.FuncInstantiation
+)
+
+// Result is the usesgenerics analyzer result type. The Direct field records
+// features used directly by the package being analyzed (i.e. contained in the
+// package source code). The Transitive field records any features used by the
+// package or any of its transitive imports.
+type Result struct {
+	Direct, Transitive Features
+}
+
+type featuresFact struct {
+	Features Features
+}
+
+func (f *featuresFact) AFact()         {}
+func (f *featuresFact) String() string { return f.Features.String() }
+
+func run(pass *analysis.Pass) (interface{}, error) {
+	inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+	direct := genericfeatures.ForPackage(inspect, pass.TypesInfo)
+
+	transitive := direct | importedTransitiveFeatures(pass)
+	if transitive != 0 {
+		pass.ExportPackageFact(&featuresFact{transitive})
+	}
+
+	return &Result{
+		Direct:     direct,
+		Transitive: transitive,
+	}, nil
+}
+
+// importedTransitiveFeatures computes features that are used transitively via
+// imports.
+func importedTransitiveFeatures(pass *analysis.Pass) Features {
+	var feats Features
+	for _, imp := range pass.Pkg.Imports() {
+		var importedFact featuresFact
+		if pass.ImportPackageFact(imp, &importedFact) {
+			feats |= importedFact.Features
+		}
+	}
+	return feats
+}
diff --git a/go/analysis/passes/usesgenerics/usesgenerics_test.go b/go/analysis/passes/usesgenerics/usesgenerics_test.go
new file mode 100644
index 0000000..3dcff24
--- /dev/null
+++ b/go/analysis/passes/usesgenerics/usesgenerics_test.go
@@ -0,0 +1,21 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package usesgenerics_test
+
+import (
+	"testing"
+
+	"golang.org/x/tools/go/analysis/analysistest"
+	"golang.org/x/tools/go/analysis/passes/usesgenerics"
+	"golang.org/x/tools/internal/typeparams"
+)
+
+func Test(t *testing.T) {
+	if !typeparams.Enabled {
+		t.Skip("type parameters are not enabled at this Go version")
+	}
+	testdata := analysistest.TestData()
+	analysistest.Run(t, testdata, usesgenerics.Analyzer, "a", "b", "c", "d")
+}
diff --git a/go/analysis/unitchecker/unitchecker.go b/go/analysis/unitchecker/unitchecker.go
index 5424489..b539866 100644
--- a/go/analysis/unitchecker/unitchecker.go
+++ b/go/analysis/unitchecker/unitchecker.go
@@ -51,6 +51,7 @@
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/analysis/internal/analysisflags"
 	"golang.org/x/tools/go/analysis/internal/facts"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // A Config describes a compilation unit to be analyzed.
@@ -233,6 +234,8 @@
 		Scopes:     make(map[ast.Node]*types.Scope),
 		Selections: make(map[*ast.SelectorExpr]*types.Selection),
 	}
+	typeparams.InitInstanceInfo(info)
+
 	pkg, err := tc.Check(cfg.ImportPath, fset, files, info)
 	if err != nil {
 		if cfg.SucceedOnTypecheckFailure {
diff --git a/go/ast/astutil/enclosing.go b/go/ast/astutil/enclosing.go
index 6b7052b..a5c6d6d 100644
--- a/go/ast/astutil/enclosing.go
+++ b/go/ast/astutil/enclosing.go
@@ -11,6 +11,8 @@
 	"go/ast"
 	"go/token"
 	"sort"
+
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // PathEnclosingInterval returns the node that encloses the source
@@ -294,8 +296,8 @@
 
 	case *ast.FieldList:
 		children = append(children,
-			tok(n.Opening, len("(")),
-			tok(n.Closing, len(")")))
+			tok(n.Opening, len("(")), // or len("[")
+			tok(n.Closing, len(")"))) // or len("]")
 
 	case *ast.File:
 		// TODO test: Doc
@@ -322,6 +324,9 @@
 			children = append(children, n.Recv)
 		}
 		children = append(children, n.Name)
+		if tparams := typeparams.ForFuncType(n.Type); tparams != nil {
+			children = append(children, tparams)
+		}
 		if n.Type.Params != nil {
 			children = append(children, n.Type.Params)
 		}
@@ -371,8 +376,13 @@
 
 	case *ast.IndexExpr:
 		children = append(children,
-			tok(n.Lbrack, len("{")),
-			tok(n.Rbrack, len("}")))
+			tok(n.Lbrack, len("[")),
+			tok(n.Rbrack, len("]")))
+
+	case *typeparams.IndexListExpr:
+		children = append(children,
+			tok(n.Lbrack, len("[")),
+			tok(n.Rbrack, len("]")))
 
 	case *ast.InterfaceType:
 		children = append(children,
@@ -581,6 +591,8 @@
 		return "decrement statement"
 	case *ast.IndexExpr:
 		return "index expression"
+	case *typeparams.IndexListExpr:
+		return "index list expression"
 	case *ast.InterfaceType:
 		return "interface type"
 	case *ast.KeyValueExpr:
diff --git a/go/ast/astutil/enclosing_test.go b/go/ast/astutil/enclosing_test.go
index 107f87c..5e86ff9 100644
--- a/go/ast/astutil/enclosing_test.go
+++ b/go/ast/astutil/enclosing_test.go
@@ -19,6 +19,7 @@
 	"testing"
 
 	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // pathToString returns a string containing the concrete types of the
@@ -59,7 +60,10 @@
 }
 
 // Common input for following tests.
-const input = `
+var input = makeInput()
+
+func makeInput() string {
+	src := `
 // Hello.
 package main
 import "fmt"
@@ -70,52 +74,88 @@
 }
 `
 
+	if typeparams.Enabled {
+		src += `
+func g[A any, P interface{ctype1| ~ctype2}](a1 A, p1 P) {}
+
+type PT[T constraint] struct{ t T }
+
+var v GT[targ1]
+
+var h = g[ targ2, targ3]
+`
+	}
+	return src
+}
+
 func TestPathEnclosingInterval_Exact(t *testing.T) {
-	// For the exact tests, we check that a substring is mapped to
-	// the canonical string for the node it denotes.
-	tests := []struct {
+	type testCase struct {
 		substr string // first occurrence of this string indicates interval
 		node   string // complete text of expected containing node
-	}{
+	}
+
+	dup := func(s string) testCase { return testCase{s, s} }
+	// For the exact tests, we check that a substring is mapped to
+	// the canonical string for the node it denotes.
+	tests := []testCase{
 		{"package",
 			input[11 : len(input)-1]},
 		{"\npack",
 			input[11 : len(input)-1]},
-		{"main",
-			"main"},
+		dup("main"),
 		{"import",
 			"import \"fmt\""},
-		{"\"fmt\"",
-			"\"fmt\""},
+		dup("\"fmt\""),
 		{"\nfunc f() {}\n",
 			"func f() {}"},
 		{"x ",
 			"x"},
 		{" y",
 			"y"},
-		{"z",
-			"z"},
+		dup("z"),
 		{" + ",
 			"x + y"},
 		{" :=",
 			"z := (x + y)"},
-		{"x + y",
-			"x + y"},
-		{"(x + y)",
-			"(x + y)"},
+		dup("x + y"),
+		dup("(x + y)"),
 		{" (x + y) ",
 			"(x + y)"},
 		{" (x + y) // add",
 			"(x + y)"},
 		{"func",
 			"func f() {}"},
-		{"func f() {}",
-			"func f() {}"},
+		dup("func f() {}"),
 		{"\nfun",
 			"func f() {}"},
 		{" f",
 			"f"},
 	}
+	if typeparams.Enabled {
+		tests = append(tests, []testCase{
+			dup("[A any, P interface{ctype1| ~ctype2}]"),
+			{"[", "[A any, P interface{ctype1| ~ctype2}]"},
+			dup("A"),
+			{" any", "any"},
+			dup("ctype1"),
+			{"|", "ctype1| ~ctype2"},
+			dup("ctype2"),
+			{"~", "~ctype2"},
+			dup("~ctype2"),
+			{" ~ctype2", "~ctype2"},
+			{"]", "[A any, P interface{ctype1| ~ctype2}]"},
+			dup("a1"),
+			dup("a1 A"),
+			dup("(a1 A, p1 P)"),
+			dup("type PT[T constraint] struct{ t T }"),
+			dup("PT"),
+			dup("[T constraint]"),
+			dup("constraint"),
+			dup("targ1"),
+			{" targ2", "targ2"},
+			dup("g[ targ2, targ3]"),
+		}...)
+	}
 	for _, test := range tests {
 		f, start, end := findInterval(t, new(token.FileSet), input, test.substr)
 		if f == nil {
@@ -145,13 +185,14 @@
 }
 
 func TestPathEnclosingInterval_Paths(t *testing.T) {
+	type testCase struct {
+		substr string // first occurrence of this string indicates interval
+		path   string // the pathToString(),exact of the expected path
+	}
 	// For these tests, we check only the path of the enclosing
 	// node, but not its complete text because it's often quite
 	// large when !exact.
-	tests := []struct {
-		substr string // first occurrence of this string indicates interval
-		path   string // the pathToString(),exact of the expected path
-	}{
+	tests := []testCase{
 		{"// add",
 			"[BlockStmt FuncDecl File],false"},
 		{"(x + y",
@@ -179,6 +220,18 @@
 		{"f() // NB",
 			"[CallExpr ExprStmt BlockStmt FuncDecl File],true"},
 	}
+	if typeparams.Enabled {
+		tests = append(tests, []testCase{
+			{" any", "[Ident Field FieldList FuncDecl File],true"},
+			{"|", "[BinaryExpr Field FieldList InterfaceType Field FieldList FuncDecl File],true"},
+			{"ctype2",
+				"[Ident UnaryExpr BinaryExpr Field FieldList InterfaceType Field FieldList FuncDecl File],true"},
+			{"a1", "[Ident Field FieldList FuncDecl File],true"},
+			{"PT[T constraint]", "[TypeSpec GenDecl File],false"},
+			{"[T constraint]", "[FieldList TypeSpec GenDecl File],true"},
+			{"targ2", "[Ident IndexListExpr ValueSpec GenDecl File],true"},
+		}...)
+	}
 	for _, test := range tests {
 		f, start, end := findInterval(t, new(token.FileSet), input, test.substr)
 		if f == nil {
diff --git a/go/ast/astutil/rewrite.go b/go/ast/astutil/rewrite.go
index 5fe75b1..6d9ca23 100644
--- a/go/ast/astutil/rewrite.go
+++ b/go/ast/astutil/rewrite.go
@@ -253,6 +253,10 @@
 		a.apply(n, "X", nil, n.X)
 		a.apply(n, "Index", nil, n.Index)
 
+	case *typeparams.IndexListExpr:
+		a.apply(n, "X", nil, n.X)
+		a.applyList(n, "Indices")
+
 	case *ast.SliceExpr:
 		a.apply(n, "X", nil, n.X)
 		a.apply(n, "Low", nil, n.Low)
@@ -439,13 +443,7 @@
 		}
 
 	default:
-		if ix := typeparams.GetIndexExprData(n); ix != nil {
-			a.apply(n, "X", nil, ix.X)
-			// *ast.IndexExpr was handled above, so n must be an *ast.MultiIndexExpr.
-			a.applyList(n, "Indices")
-		} else {
-			panic(fmt.Sprintf("Apply: unexpected node type %T", n))
-		}
+		panic(fmt.Sprintf("Apply: unexpected node type %T", n))
 	}
 
 	if a.post != nil && !a.post(&a.cursor) {
diff --git a/go/ast/inspector/inspector_test.go b/go/ast/inspector/inspector_test.go
index 3e9d3ba..9e53918 100644
--- a/go/ast/inspector/inspector_test.go
+++ b/go/ast/inspector/inspector_test.go
@@ -12,10 +12,12 @@
 	"log"
 	"path/filepath"
 	"reflect"
+	"strconv"
 	"strings"
 	"testing"
 
 	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 var netFiles []*ast.File
@@ -69,6 +71,72 @@
 	compare(t, nodesA, nodesB)
 }
 
+func TestInspectGenericNodes(t *testing.T) {
+	if !typeparams.Enabled {
+		t.Skip("type parameters are not supported at this Go version")
+	}
+
+	// src is using the 16 identifiers i0, i1, ... i15 so
+	// we can easily verify that we've found all of them.
+	const src = `package a
+
+type I interface { ~i0|i1 }
+
+type T[i2, i3 interface{ ~i4 }] struct {}
+
+func f[i5, i6 any]() {
+	_ = f[i7, i8]
+	var x T[i9, i10]
+}
+
+func (*T[i11, i12]) m()
+
+var _ i13[i14, i15]
+`
+	fset := token.NewFileSet()
+	f, _ := parser.ParseFile(fset, "a.go", src, 0)
+	inspect := inspector.New([]*ast.File{f})
+	found := make([]bool, 16)
+
+	indexListExprs := make(map[*typeparams.IndexListExpr]bool)
+
+	// Verify that we reach all i* identifiers, and collect IndexListExpr nodes.
+	inspect.Preorder(nil, func(n ast.Node) {
+		switch n := n.(type) {
+		case *ast.Ident:
+			if n.Name[0] == 'i' {
+				index, err := strconv.Atoi(n.Name[1:])
+				if err != nil {
+					t.Fatal(err)
+				}
+				found[index] = true
+			}
+		case *typeparams.IndexListExpr:
+			indexListExprs[n] = false
+		}
+	})
+	for i, v := range found {
+		if !v {
+			t.Errorf("missed identifier i%d", i)
+		}
+	}
+
+	// Verify that we can filter to IndexListExprs that we found in the first
+	// step.
+	if len(indexListExprs) == 0 {
+		t.Fatal("no index list exprs found")
+	}
+	inspect.Preorder([]ast.Node{&typeparams.IndexListExpr{}}, func(n ast.Node) {
+		ix := n.(*typeparams.IndexListExpr)
+		indexListExprs[ix] = true
+	})
+	for ix, v := range indexListExprs {
+		if !v {
+			t.Errorf("inspected node %v not filtered", ix)
+		}
+	}
+}
+
 // TestPruning compares Inspector against ast.Inspect,
 // pruning descent within ast.CallExpr nodes.
 func TestInspectPruning(t *testing.T) {
diff --git a/go/ast/inspector/typeof.go b/go/ast/inspector/typeof.go
index b6b00cf..11f4fc3 100644
--- a/go/ast/inspector/typeof.go
+++ b/go/ast/inspector/typeof.go
@@ -9,7 +9,11 @@
 // The initial map-based implementation was too slow;
 // see https://go-review.googlesource.com/c/tools/+/135655/1/go/ast/inspector/inspector.go#196
 
-import "go/ast"
+import (
+	"go/ast"
+
+	"golang.org/x/tools/internal/typeparams"
+)
 
 const (
 	nArrayType = iota
@@ -47,6 +51,7 @@
 	nImportSpec
 	nIncDecStmt
 	nIndexExpr
+	nIndexListExpr
 	nInterfaceType
 	nKeyValueExpr
 	nLabeledStmt
@@ -164,6 +169,8 @@
 		return 1 << nIncDecStmt
 	case *ast.IndexExpr:
 		return 1 << nIndexExpr
+	case *typeparams.IndexListExpr:
+		return 1 << nIndexListExpr
 	case *ast.InterfaceType:
 		return 1 << nInterfaceType
 	case *ast.KeyValueExpr:
diff --git a/go/callgraph/vta/graph.go b/go/callgraph/vta/graph.go
index 6c9e6a5..ad7ef0e 100644
--- a/go/callgraph/vta/graph.go
+++ b/go/callgraph/vta/graph.go
@@ -190,6 +190,25 @@
 	return fmt.Sprintf("PtrInterface(%v)", l.typ)
 }
 
+// nestedPtrFunction node represents all references and dereferences of locals
+// and globals that have a nested pointer to function type. We merge such
+// constructs into a single node for simplicity and without much precision
+// sacrifice as such variables are rare in practice. Both a and b would be
+// represented as the same PtrFunction(func()) node in:
+//   var a *func()
+//   var b **func()
+type nestedPtrFunction struct {
+	typ types.Type
+}
+
+func (p nestedPtrFunction) Type() types.Type {
+	return p.typ
+}
+
+func (p nestedPtrFunction) String() string {
+	return fmt.Sprintf("PtrFunction(%v)", p.typ)
+}
+
 // panicArg models types of all arguments passed to panic.
 type panicArg struct{}
 
@@ -554,6 +573,13 @@
 }
 
 func addArgumentFlows(b *builder, c ssa.CallInstruction, f *ssa.Function) {
+	// When f has no paremeters (including receiver), there is no type
+	// flow here. Also, f's body and parameters might be missing, such
+	// as when vta is used within the golang.org/x/tools/go/analysis
+	// framework (see github.com/golang/go/issues/50670).
+	if len(f.Params) == 0 {
+		return
+	}
 	cc := c.Common()
 	// When c is an unresolved method call (cc.Method != nil), cc.Value contains
 	// the receiver object rather than cc.Args[0].
@@ -566,6 +592,14 @@
 		offset = 1
 	}
 	for i, v := range cc.Args {
+		// Parameters of f might not be available, as in the case
+		// when vta is used within the golang.org/x/tools/go/analysis
+		// framework (see github.com/golang/go/issues/50670).
+		//
+		// TODO: investigate other cases of missing body and parameters
+		if len(f.Params) <= i+offset {
+			return
+		}
 		b.addInFlowAliasEdges(b.nodeFromVal(f.Params[i+offset]), b.nodeFromVal(v))
 	}
 }
@@ -606,7 +640,7 @@
 
 // addInFlowEdge adds s -> d to g if d is node that can have an inflow, i.e., a node
 // that represents an interface or an unresolved function value. Otherwise, there
-// is no interesting type flow so the edge is ommited.
+// is no interesting type flow so the edge is omitted.
 func (b *builder) addInFlowEdge(s, d node) {
 	if hasInFlow(d) {
 		b.graph.addEdge(b.representative(s), b.representative(d))
@@ -615,12 +649,16 @@
 
 // Creates const, pointer, global, func, and local nodes based on register instructions.
 func (b *builder) nodeFromVal(val ssa.Value) node {
-	if p, ok := val.Type().(*types.Pointer); ok && !isInterface(p.Elem()) {
+	if p, ok := val.Type().(*types.Pointer); ok && !isInterface(p.Elem()) && !isFunction(p.Elem()) {
 		// Nested pointer to interfaces are modeled as a special
 		// nestedPtrInterface node.
 		if i := interfaceUnderPtr(p.Elem()); i != nil {
 			return nestedPtrInterface{typ: i}
 		}
+		// The same goes for nested function types.
+		if f := functionUnderPtr(p.Elem()); f != nil {
+			return nestedPtrFunction{typ: f}
+		}
 		return pointer{typ: p}
 	}
 
@@ -665,6 +703,8 @@
 		return channelElem{typ: t}
 	case nestedPtrInterface:
 		return nestedPtrInterface{typ: t}
+	case nestedPtrFunction:
+		return nestedPtrFunction{typ: t}
 	case field:
 		return field{StructType: canonicalize(i.StructType, &b.canon), index: i.index}
 	case indexedLocal:
diff --git a/go/callgraph/vta/graph_test.go b/go/callgraph/vta/graph_test.go
index 61bb05a..8608844 100644
--- a/go/callgraph/vta/graph_test.go
+++ b/go/callgraph/vta/graph_test.go
@@ -18,15 +18,15 @@
 
 func TestNodeInterface(t *testing.T) {
 	// Since ssa package does not allow explicit creation of ssa
-	// values, we use the values from the program testdata/simple.go:
+	// values, we use the values from the program testdata/src/simple.go:
 	//   - basic type int
 	//   - struct X with two int fields a and b
 	//   - global variable "gl"
 	//   - "main" function and its
 	//   - first register instruction t0 := *gl
-	prog, _, err := testProg("testdata/simple.go")
+	prog, _, err := testProg("testdata/src/simple.go")
 	if err != nil {
-		t.Fatalf("couldn't load testdata/simple.go program: %v", err)
+		t.Fatalf("couldn't load testdata/src/simple.go program: %v", err)
 	}
 
 	pkg := prog.AllPackages()[0]
@@ -43,6 +43,8 @@
 	pint := types.NewPointer(bint)
 	i := types.NewInterface(nil, nil)
 
+	voidFunc := main.Signature.Underlying()
+
 	for _, test := range []struct {
 		n node
 		s string
@@ -59,8 +61,9 @@
 		{global{val: gl}, "Global(gl)", gl.Type()},
 		{local{val: reg}, "Local(t0)", bint},
 		{indexedLocal{val: reg, typ: X, index: 0}, "Local(t0[0])", X},
-		{function{f: main}, "Function(main)", main.Signature.Underlying()},
+		{function{f: main}, "Function(main)", voidFunc},
 		{nestedPtrInterface{typ: i}, "PtrInterface(interface{})", i},
+		{nestedPtrFunction{typ: voidFunc}, "PtrFunction(func())", voidFunc},
 		{panicArg{}, "Panic", nil},
 		{recoverReturn{}, "Recover", nil},
 	} {
@@ -75,9 +78,9 @@
 
 func TestVtaGraph(t *testing.T) {
 	// Get the basic type int from a real program.
-	prog, _, err := testProg("testdata/simple.go")
+	prog, _, err := testProg("testdata/src/simple.go")
 	if err != nil {
-		t.Fatalf("couldn't load testdata/simple.go program: %v", err)
+		t.Fatalf("couldn't load testdata/src/simple.go program: %v", err)
 	}
 
 	glPtrType, ok := prog.AllPackages()[0].Var("gl").Type().(*types.Pointer)
@@ -167,24 +170,25 @@
 
 func TestVTAGraphConstruction(t *testing.T) {
 	for _, file := range []string{
-		"testdata/store.go",
-		"testdata/phi.go",
-		"testdata/type_conversions.go",
-		"testdata/type_assertions.go",
-		"testdata/fields.go",
-		"testdata/node_uniqueness.go",
-		"testdata/store_load_alias.go",
-		"testdata/phi_alias.go",
-		"testdata/channels.go",
-		"testdata/select.go",
-		"testdata/stores_arrays.go",
-		"testdata/maps.go",
-		"testdata/ranges.go",
-		"testdata/closures.go",
-		"testdata/static_calls.go",
-		"testdata/dynamic_calls.go",
-		"testdata/returns.go",
-		"testdata/panic.go",
+		"testdata/src/store.go",
+		"testdata/src/phi.go",
+		"testdata/src/type_conversions.go",
+		"testdata/src/type_assertions.go",
+		"testdata/src/fields.go",
+		"testdata/src/node_uniqueness.go",
+		"testdata/src/store_load_alias.go",
+		"testdata/src/phi_alias.go",
+		"testdata/src/channels.go",
+		"testdata/src/select.go",
+		"testdata/src/stores_arrays.go",
+		"testdata/src/maps.go",
+		"testdata/src/ranges.go",
+		"testdata/src/closures.go",
+		"testdata/src/function_alias.go",
+		"testdata/src/static_calls.go",
+		"testdata/src/dynamic_calls.go",
+		"testdata/src/returns.go",
+		"testdata/src/panic.go",
 	} {
 		t.Run(file, func(t *testing.T) {
 			prog, want, err := testProg(file)
diff --git a/go/callgraph/vta/internal/trie/bits.go b/go/callgraph/vta/internal/trie/bits.go
new file mode 100644
index 0000000..f2fd0ba
--- /dev/null
+++ b/go/callgraph/vta/internal/trie/bits.go
@@ -0,0 +1,124 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package trie
+
+import (
+	"math/bits"
+)
+
+// This file contains bit twiddling functions for Patricia tries.
+// Consult this paper for details.
+//   C. Okasaki and A. Gill, “Fast mergeable integer maps,” in ACM SIGPLAN
+//   Workshop on ML, September 1998, pp. 77–86.
+
+// key is a key in a Map.
+type key uint64
+
+// bitpos is the position of a bit. A position is represented by having a 1
+// bit in that position.
+// Examples:
+// * 0b0010 is the position of the `1` bit in 2.
+//    It is the 3rd most specific bit position in big endian encoding
+//    (0b0 and 0b1 are more specific).
+// * 0b0100 is the position of the bit that 1 and 5 disagree on.
+// * 0b0 is a special value indicating that all bit agree.
+type bitpos uint64
+
+// prefixes represent a set of keys that all agree with the
+// prefix up to a bitpos m.
+//
+// The value for a prefix is determined by the mask(k, m) function.
+// (See mask for details on the values.)
+// A `p` prefix for position `m` matches a key `k` iff mask(k, m) == p.
+// A prefix always mask(p, m) == p.
+//
+// A key is its own prefix for the bit position 64,
+//   e.g. seeing a `prefix(key)` is not a problem.
+// Prefixes should never be turned into keys.
+type prefix uint64
+
+// branchingBit returns the position of the first bit in `x` and `y`
+// that are not equal.
+func branchingBit(x, y prefix) bitpos {
+	p := x ^ y
+	if p == 0 {
+		return 0
+	}
+	return bitpos(1) << uint(bits.Len64(uint64(p))-1) // uint conversion needed for go1.12
+}
+
+// zeroBit returns true if k has a 0 bit at position `b`.
+func zeroBit(k prefix, b bitpos) bool {
+	return (uint64(k) & uint64(b)) == 0
+}
+
+// matchPrefix returns true if a prefix k matches a prefix p up to position `b`.
+func matchPrefix(k prefix, p prefix, b bitpos) bool {
+	return mask(k, b) == p
+}
+
+// mask returns a prefix of `k` with all bits after and including `b` zeroed out.
+//
+// In big endian encoding, this value is the [64-(m-1)] most significant bits of k
+// followed by a `0` bit at bitpos m, followed m-1 `1` bits.
+// Examples:
+//  prefix(0b1011) for a bitpos 0b0100 represents the keys:
+//    0b1000, 0b1001, 0b1010, 0b1011, 0b1100, 0b1101, 0b1110, 0b1111
+//
+// This mask function has the property that if matchPrefix(k, p, b), then
+// k <= p if and only if zeroBit(k, m). This induces binary search tree tries.
+// See Okasaki & Gill for more details about this choice of mask function.
+//
+// mask is idempotent for a given `b`, i.e. mask(mask(p, b), b) == mask(p,b).
+func mask(k prefix, b bitpos) prefix {
+	return prefix((uint64(k) | (uint64(b) - 1)) & (^uint64(b)))
+}
+
+// ord returns true if m comes before n in the bit ordering.
+func ord(m, n bitpos) bool {
+	return m > n // big endian encoding
+}
+
+// prefixesOverlap returns true if there is some key a prefix `p` for bitpos `m`
+// can hold that can also be held by a prefix `q` for some bitpos `n`.
+//
+// This is equivalent to:
+//   m ==n && p == q,
+//   higher(m, n) && matchPrefix(q, p, m), or
+//   higher(n, m) && matchPrefix(p, q, n)
+func prefixesOverlap(p prefix, m bitpos, q prefix, n bitpos) bool {
+	fbb := n
+	if ord(m, n) {
+		fbb = m
+	}
+	return mask(p, fbb) == mask(q, fbb)
+	// Lemma:
+	//   mask(p, fbb) == mask(q, fbb)
+	// iff
+	//   m > n && matchPrefix(q, p, m) or  (note: big endian encoding)
+	//   m < n && matchPrefix(p, q, n) or  (note: big endian encoding)
+	//   m ==n && p == q
+	// Quick-n-dirty proof:
+	// p == mask(p0, m) for some p0 by precondition.
+	// q == mask(q0, n) for some q0 by precondition.
+	// So mask(p, m) == p and mask(q, n) == q as mask(*, n') is idempotent.
+	//
+	// [=> proof]
+	// Suppose mask(p, fbb) == mask(q, fbb).
+	// if m ==n, p == mask(p, m) == mask(p, fbb) == mask(q, fbb) == mask(q, n) == q
+	// if m > n, fbb = firstBranchBit(m, n) = m (big endian).
+	//   p == mask(p, m) == mask(p, fbb) == mask(q, fbb) == mask(q, m)
+	//   so mask(q, m) == p or matchPrefix(q, p, m)
+	// if m < n, is symmetric to the above.
+	//
+	// [<= proof]
+	// case m ==n && p == q. Then mask(p, fbb) == mask(q, fbb)
+	//
+	// case m > n && matchPrefix(q, p, m).
+	// fbb == firstBranchBit(m, n) == m (by m>n).
+	// mask(q, fbb) == mask(q, m) == p == mask(p, m) == mask(p, fbb)
+	//
+	// case m < n && matchPrefix(p, q, n) is symmetric.
+}
diff --git a/go/callgraph/vta/internal/trie/bits_test.go b/go/callgraph/vta/internal/trie/bits_test.go
new file mode 100644
index 0000000..07784cd
--- /dev/null
+++ b/go/callgraph/vta/internal/trie/bits_test.go
@@ -0,0 +1,313 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13
+// +build go1.13
+
+package trie
+
+import (
+	"math/rand"
+	"testing"
+)
+
+func TestMask(t *testing.T) {
+	for _, c := range []struct {
+		p    prefix
+		b    bitpos
+		want prefix
+	}{
+		{
+			p:    0b00001000,
+			b:    0b00000100,
+			want: 0b00001011,
+		}, {
+			p:    0b01011011,
+			b:    0b00000000,
+			want: ^prefix(0),
+		}, {
+			p:    0b01011011,
+			b:    0b00000001,
+			want: 0b01011010,
+		}, {
+			p:    0b01011011,
+			b:    0b00000010,
+			want: 0b01011001,
+		}, {
+			p:    0b01011011,
+			b:    0b00000100,
+			want: 0b01011011,
+		}, {
+			p:    0b01011011,
+			b:    0b00001000,
+			want: 0b01010111,
+		}, {
+			p:    0b01011011,
+			b:    0b00010000,
+			want: 0b01001111,
+		}, {
+			p:    0b01011011,
+			b:    0b00100000,
+			want: 0b01011111,
+		}, {
+			p:    0b01011011,
+			b:    0b01000000,
+			want: 0b00111111,
+		}, {
+			p:    0b01011011,
+			b:    0b01000000,
+			want: 0b00111111,
+		}, {
+			p:    0b01011011,
+			b:    0b10000000,
+			want: 0b01111111,
+		},
+	} {
+		if got := mask(c.p, c.b); got != c.want {
+			t.Errorf("mask(%#b,%#b) got %#b. want %#b", c.p, c.b, got, c.want)
+		}
+	}
+}
+
+func TestMaskImpotent(t *testing.T) {
+	// test mask(mask(p, b), b) == mask(p,b)
+	for _, p := range []prefix{
+		0b0, 0b1, 0b100, ^prefix(0b0), ^prefix(0b10),
+	} {
+		for _, b := range []bitpos{
+			0, 0b1, 1 << 2, 1 << 63,
+		} {
+			once := mask(p, b)
+			twice := mask(once, b)
+			if once != twice {
+				t.Errorf("mask(mask(%#b,%#b), %#b) != mask(%#b,%#b) got %#b. want %#b",
+					p, b, b, p, b, twice, once)
+			}
+		}
+	}
+}
+
+func TestMatchPrefix(t *testing.T) {
+	for _, c := range []struct {
+		k prefix
+		p prefix
+		b bitpos
+	}{
+		{
+			k: 0b1000,
+			p: 0b1011,
+			b: 0b0100,
+		}, {
+			k: 0b1001,
+			p: 0b1011,
+			b: 0b0100,
+		}, {
+			k: 0b1010,
+			p: 0b1011,
+			b: 0b0100,
+		}, {
+			k: 0b1011,
+			p: 0b1011,
+			b: 0b0100,
+		}, {
+			k: 0b1100,
+			p: 0b1011,
+			b: 0b0100,
+		}, {
+			k: 0b1101,
+			p: 0b1011,
+			b: 0b0100,
+		}, {
+			k: 0b1110,
+			p: 0b1011,
+			b: 0b0100,
+		}, {
+			k: 0b1111,
+			p: 0b1011,
+			b: 0b0100,
+		},
+	} {
+		if !matchPrefix(c.k, c.p, c.b) {
+			t.Errorf("matchPrefix(%#b, %#b,%#b) should be true", c.k, c.p, c.b)
+		}
+	}
+}
+
+func TestNotMatchPrefix(t *testing.T) {
+	for _, c := range []struct {
+		k prefix
+		p prefix
+		b bitpos
+	}{
+		{
+			k: 0b0000,
+			p: 0b1011,
+			b: 0b0100,
+		}, {
+			k: 0b0010,
+			p: 0b1011,
+			b: 0b0100,
+		},
+	} {
+		if matchPrefix(c.k, c.p, c.b) {
+			t.Errorf("matchPrefix(%#b, %#b,%#b) should be false", c.k, c.p, c.b)
+		}
+	}
+}
+
+func TestBranchingBit(t *testing.T) {
+	for _, c := range []struct {
+		x    prefix
+		y    prefix
+		want bitpos
+	}{
+		{
+			x:    0b0000,
+			y:    0b1011,
+			want: 0b1000,
+		}, {
+			x:    0b1010,
+			y:    0b1011,
+			want: 0b0001,
+		}, {
+			x:    0b1011,
+			y:    0b1111,
+			want: 0b0100,
+		}, {
+			x:    0b1011,
+			y:    0b1001,
+			want: 0b0010,
+		},
+	} {
+		if got := branchingBit(c.x, c.y); got != c.want {
+			t.Errorf("branchingBit(%#b, %#b,) is not expected value. got %#b want %#b",
+				c.x, c.y, got, c.want)
+		}
+	}
+}
+
+func TestZeroBit(t *testing.T) {
+	for _, c := range []struct {
+		k prefix
+		b bitpos
+	}{
+		{
+			k: 0b1000,
+			b: 0b0100,
+		}, {
+			k: 0b1001,
+			b: 0b0100,
+		}, {
+			k: 0b1010,
+			b: 0b0100,
+		},
+	} {
+		if !zeroBit(c.k, c.b) {
+			t.Errorf("zeroBit(%#b, %#b) should be true", c.k, c.b)
+		}
+	}
+}
+func TestZeroBitFails(t *testing.T) {
+	for _, c := range []struct {
+		k prefix
+		b bitpos
+	}{
+		{
+			k: 0b1000,
+			b: 0b1000,
+		}, {
+			k: 0b1001,
+			b: 0b0001,
+		}, {
+			k: 0b1010,
+			b: 0b0010,
+		}, {
+			k: 0b1011,
+			b: 0b0001,
+		},
+	} {
+		if zeroBit(c.k, c.b) {
+			t.Errorf("zeroBit(%#b, %#b) should be false", c.k, c.b)
+		}
+	}
+}
+
+func TestOrd(t *testing.T) {
+	a := bitpos(0b0010)
+	b := bitpos(0b1000)
+	if ord(a, b) {
+		t.Errorf("ord(%#b, %#b) should be false", a, b)
+	}
+	if !ord(b, a) {
+		t.Errorf("ord(%#b, %#b) should be true", b, a)
+	}
+	if ord(a, a) {
+		t.Errorf("ord(%#b, %#b) should be false", a, a)
+	}
+	if !ord(a, 0) {
+		t.Errorf("ord(%#b, %#b) should be true", a, 0)
+	}
+}
+
+func TestPrefixesOverlapLemma(t *testing.T) {
+	// test
+	//   mask(p, fbb) == mask(q, fbb)
+	// iff
+	//   m > n && matchPrefix(q, p, m) or  (note: big endian encoding)
+	//   m < n && matchPrefix(p, q, n) or  (note: big endian encoding)
+	//   m ==n && p == q
+
+	// Case 1: mask(p, fbb) == mask(q, fbb) => m > n && matchPrefix(q, p, m)
+	m, n := bitpos(1<<2), bitpos(1<<1)
+	p, q := mask(0b100, m), mask(0b010, n)
+	if !(prefixesOverlap(p, m, q, n) && m > n && matchPrefix(q, p, m)) {
+		t.Errorf("prefixesOverlap(%#b, %#b, %#b, %#b) lemma does not hold",
+			p, m, q, n)
+	}
+	// Case 2: mask(p, fbb) == mask(q, fbb) => m < n && matchPrefix(p, q, n)
+	m, n = bitpos(1<<2), bitpos(1<<3)
+	p, q = mask(0b100, m), mask(0b1000, n)
+	if !(prefixesOverlap(p, m, q, n) && m < n && matchPrefix(p, q, n)) {
+		t.Errorf("prefixesOverlap(%#b, %#b, %#b, %#b) lemma does not hold",
+			p, m, q, n)
+	}
+	// Case 3: mask(p, fbb) == mask(q, fbb) => m < n && matchPrefix(p, q, n)
+	m, n = bitpos(1<<2), bitpos(1<<2)
+	p, q = mask(0b100, m), mask(0b001, n)
+	if !(prefixesOverlap(p, m, q, n) && m == n && p == q) {
+		t.Errorf("prefixesOverlap(%#b, %#b, %#b, %#b) lemma does not hold",
+			p, m, q, n)
+	}
+	// Case 4: mask(p, fbb) != mask(q, fbb)
+	m, n = bitpos(1<<1), bitpos(1<<1)
+	p, q = mask(0b100, m), mask(0b001, n)
+	if prefixesOverlap(p, m, q, n) ||
+		(m > n && matchPrefix(q, p, m)) ||
+		(m < n && matchPrefix(p, q, n)) ||
+		(m == n && p == q) {
+		t.Errorf("prefixesOverlap(%#b, %#b, %#b, %#b) lemma does not hold",
+			p, m, q, n)
+	}
+
+	// Do a few more random cases
+	r := rand.New(rand.NewSource(123))
+	N := 2000
+	for i := 0; i < N; i++ {
+		m := bitpos(1 << (r.Uint64() % (64 + 1)))
+		n := bitpos(1 << (r.Uint64() % (64 + 1)))
+
+		p := mask(prefix(r.Uint64()), m)
+		q := mask(prefix(r.Uint64()), n)
+
+		lhs := prefixesOverlap(p, m, q, n)
+		rhs := (m > n && matchPrefix(q, p, m)) ||
+			(m < n && matchPrefix(p, q, n)) ||
+			(m == n && p == q)
+
+		if lhs != rhs {
+			t.Errorf("prefixesOverlap(%#b, %#b, %#b, %#b) != <lemma> got %v. want %v",
+				p, m, q, n, lhs, rhs)
+		}
+	}
+}
diff --git a/go/callgraph/vta/internal/trie/builder.go b/go/callgraph/vta/internal/trie/builder.go
new file mode 100644
index 0000000..25d3805
--- /dev/null
+++ b/go/callgraph/vta/internal/trie/builder.go
@@ -0,0 +1,504 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package trie
+
+// Collision functions combine a left and right hand side (lhs and rhs) values
+// the two values are associated with the same key and produces the value that
+// will be stored for the key.
+//
+// Collision functions must be idempotent:
+//   collision(x, x) == x for all x.
+// Collisions functions may be applied whenever a value is inserted
+// or two maps are merged, or intersected.
+type Collision func(lhs interface{}, rhs interface{}) interface{}
+
+// TakeLhs always returns the left value in a collision.
+func TakeLhs(lhs, rhs interface{}) interface{} { return lhs }
+
+// TakeRhs always returns the right hand side in a collision.
+func TakeRhs(lhs, rhs interface{}) interface{} { return rhs }
+
+// Builder creates new Map. Each Builder has a unique Scope.
+//
+// IMPORTANT:  Nodes are hash-consed internally to reduce memory consumption. To
+// support hash-consing Builders keep an internal Map of all of the Maps that they
+// have created. To GC any of the Maps created by the Builder, all references to
+// the Builder must be dropped. This includes MutMaps.
+type Builder struct {
+	scope Scope
+
+	// hash-consing maps for each node type.
+	empty    *empty
+	leaves   map[leaf]*leaf
+	branches map[branch]*branch
+	// It may be possible to support more types of patricia tries
+	// (e.g. non-hash-consed) by making Builder an interface and abstracting
+	// the mkLeaf and mkBranch functions.
+}
+
+// NewBuilder creates a new Builder with a unique Scope.
+func NewBuilder() *Builder {
+	s := newScope()
+	return &Builder{
+		scope:    s,
+		empty:    &empty{s},
+		leaves:   make(map[leaf]*leaf),
+		branches: make(map[branch]*branch),
+	}
+}
+
+func (b *Builder) Scope() Scope { return b.scope }
+
+// Rescope changes the builder's scope to a new unique Scope.
+//
+// Any Maps created using the previous scope need to be Cloned
+// before any operation.
+//
+// This makes the old internals of the Builder eligible to be GC'ed.
+func (b *Builder) Rescope() {
+	s := newScope()
+	b.scope = s
+	b.empty = &empty{s}
+	b.leaves = make(map[leaf]*leaf)
+	b.branches = make(map[branch]*branch)
+}
+
+// Empty is the empty map.
+func (b *Builder) Empty() Map { return Map{b.Scope(), b.empty} }
+
+// InsertWith inserts a new association from k to v into the Map m to create a new map
+// in the current scope and handle collisions using the collision function c.
+//
+// This is roughly corresponds to updating a map[uint64]interface{} by:
+//   if _, ok := m[k]; ok { m[k] = c(m[k], v} else { m[k] = v}
+//
+// An insertion or update happened whenever Insert(m, ...) != m .
+func (b *Builder) InsertWith(c Collision, m Map, k uint64, v interface{}) Map {
+	m = b.Clone(m)
+	return Map{b.Scope(), b.insert(c, m.n, b.mkLeaf(key(k), v), false)}
+}
+
+// Inserts a new association from key to value into the Map m to create
+// a new map in the current scope.
+//
+// If there was a previous value mapped by key, keep the previously mapped value.
+// This is roughly corresponds to updating a map[uint64]interface{} by:
+//   if _, ok := m[k]; ok { m[k] = val }
+//
+// This is equivalent to b.Merge(m, b.Create({k: v})).
+func (b *Builder) Insert(m Map, k uint64, v interface{}) Map {
+	return b.InsertWith(TakeLhs, m, k, v)
+}
+
+// Updates a (key, value) in the map. This is roughly corresponds to
+// updating a map[uint64]interface{} by:
+//   m[key] = val
+func (b *Builder) Update(m Map, key uint64, val interface{}) Map {
+	return b.InsertWith(TakeRhs, m, key, val)
+}
+
+// Merge two maps lhs and rhs to create a new map in the current scope.
+//
+// Whenever there is a key in both maps (a collision), the resulting value mapped by
+// the key will be `c(lhs[key], rhs[key])`.
+func (b *Builder) MergeWith(c Collision, lhs, rhs Map) Map {
+	lhs, rhs = b.Clone(lhs), b.Clone(rhs)
+	return Map{b.Scope(), b.merge(c, lhs.n, rhs.n)}
+}
+
+// Merge two maps lhs and rhs to create a new map in the current scope.
+//
+// Whenever there is a key in both maps (a collision), the resulting value mapped by
+// the key will be the value in lhs `b.Collision(lhs[key], rhs[key])`.
+func (b *Builder) Merge(lhs, rhs Map) Map {
+	return b.MergeWith(TakeLhs, lhs, rhs)
+}
+
+// Clone returns a Map that contains the same (key, value) elements
+// within b.Scope(), i.e. return m if m.Scope() == b.Scope() or return
+// a deep copy of m within b.Scope() otherwise.
+func (b *Builder) Clone(m Map) Map {
+	if m.Scope() == b.Scope() {
+		return m
+	} else if m.n == nil {
+		return Map{b.Scope(), b.empty}
+	}
+	return Map{b.Scope(), b.clone(m.n)}
+}
+func (b *Builder) clone(n node) node {
+	switch n := n.(type) {
+	case *empty:
+		return b.empty
+	case *leaf:
+		return b.mkLeaf(n.k, n.v)
+	case *branch:
+		return b.mkBranch(n.prefix, n.branching, b.clone(n.left), b.clone(n.right))
+	default:
+		panic("unreachable")
+	}
+}
+
+// Remove a key from a Map m and return the resulting Map.
+func (b *Builder) Remove(m Map, k uint64) Map {
+	m = b.Clone(m)
+	return Map{b.Scope(), b.remove(m.n, key(k))}
+}
+
+// Intersect Maps lhs and rhs and returns a map with all of the keys in
+// both lhs and rhs and the value comes from lhs, i.e.
+//   {(k, lhs[k]) | k in lhs, k in rhs}.
+func (b *Builder) Intersect(lhs, rhs Map) Map {
+	return b.IntersectWith(TakeLhs, lhs, rhs)
+}
+
+// IntersectWith take lhs and rhs and returns the intersection
+// with the value coming from the collision function, i.e.
+//   {(k, c(lhs[k], rhs[k]) ) | k in lhs, k in rhs}.
+// The elements of the resulting map are always { <k, c(lhs[k], rhs[k]) > }
+// for each key k that a key in both lhs and rhs.
+func (b *Builder) IntersectWith(c Collision, lhs, rhs Map) Map {
+	l, r := b.Clone(lhs), b.Clone(rhs)
+	return Map{b.Scope(), b.intersect(c, l.n, r.n)}
+}
+
+// MutMap is a convenient wrapper for a Map and a *Builder that will be used to create
+// new Maps from it.
+type MutMap struct {
+	B *Builder
+	M Map
+}
+
+// MutEmpty is an empty MutMap for a builder.
+func (b *Builder) MutEmpty() MutMap {
+	return MutMap{b, b.Empty()}
+}
+
+// Insert an element into the map using the collision function for the builder.
+// Returns true if the element was inserted.
+func (mm *MutMap) Insert(k uint64, v interface{}) bool {
+	old := mm.M
+	mm.M = mm.B.Insert(old, k, v)
+	return old != mm.M
+}
+
+// Updates an element in the map. Returns true if the map was updated.
+func (mm *MutMap) Update(k uint64, v interface{}) bool {
+	old := mm.M
+	mm.M = mm.B.Update(old, k, v)
+	return old != mm.M
+}
+
+// Removes a key from the map. Returns true if the element was removed.
+func (mm *MutMap) Remove(k uint64) bool {
+	old := mm.M
+	mm.M = mm.B.Remove(old, k)
+	return old != mm.M
+}
+
+// Merge another map into the current one using the collision function
+// for the builder. Returns true if the map changed.
+func (mm *MutMap) Merge(other Map) bool {
+	old := mm.M
+	mm.M = mm.B.Merge(old, other)
+	return old != mm.M
+}
+
+// Intersect another map into the current one using the collision function
+// for the builder. Returns true if the map changed.
+func (mm *MutMap) Intersect(other Map) bool {
+	old := mm.M
+	mm.M = mm.B.Intersect(old, other)
+	return old != mm.M
+}
+
+func (b *Builder) Create(m map[uint64]interface{}) Map {
+	var leaves []*leaf
+	for k, v := range m {
+		leaves = append(leaves, b.mkLeaf(key(k), v))
+	}
+	return Map{b.Scope(), b.create(leaves)}
+}
+
+// Merge another map into the current one using the collision function
+// for the builder. Returns true if the map changed.
+func (mm *MutMap) MergeWith(c Collision, other Map) bool {
+	old := mm.M
+	mm.M = mm.B.MergeWith(c, old, other)
+	return old != mm.M
+}
+
+// creates a map for a collection of leaf nodes.
+func (b *Builder) create(leaves []*leaf) node {
+	n := len(leaves)
+	if n == 0 {
+		return b.empty
+	} else if n == 1 {
+		return leaves[0]
+	}
+	// Note: we can do a more sophisicated algorithm by:
+	// - sorting the leaves ahead of time,
+	// - taking the prefix and branching bit of the min and max key,
+	// - binary searching for the branching bit,
+	// - splitting exactly where the branch will be, and
+	// - making the branch node for this prefix + branching bit.
+	// Skipping until this is a performance bottleneck.
+
+	m := n / 2 // (n >= 2) ==> 1 <= m < n
+	l, r := leaves[:m], leaves[m:]
+	return b.merge(nil, b.create(l), b.create(r))
+}
+
+// mkLeaf returns the hash-consed representative of (k, v) in the current scope.
+func (b *Builder) mkLeaf(k key, v interface{}) *leaf {
+	l := &leaf{k: k, v: v}
+	if rep, ok := b.leaves[*l]; ok {
+		return rep
+	}
+	b.leaves[*l] = l
+	return l
+}
+
+// mkBranch returns the hash-consed representative of the tuple
+//   (prefix, branch, left, right)
+// in the current scope.
+func (b *Builder) mkBranch(p prefix, bp bitpos, left node, right node) *branch {
+	br := &branch{
+		sz:        left.size() + right.size(),
+		prefix:    p,
+		branching: bp,
+		left:      left,
+		right:     right,
+	}
+	if rep, ok := b.branches[*br]; ok {
+		return rep
+	}
+	b.branches[*br] = br
+	return br
+}
+
+// join two maps with prefixes p0 and p1 that are *known* to disagree.
+func (b *Builder) join(p0 prefix, t0 node, p1 prefix, t1 node) *branch {
+	m := branchingBit(p0, p1)
+	var left, right node
+	if zeroBit(p0, m) {
+		left, right = t0, t1
+	} else {
+		left, right = t1, t0
+	}
+	prefix := mask(p0, m)
+	return b.mkBranch(prefix, m, left, right)
+}
+
+// collide two leaves with the same key to create a leaf
+// with the collided value.
+func (b *Builder) collide(c Collision, left, right *leaf) *leaf {
+	if left == right {
+		return left // c is idempotent: c(x, x) == x
+	}
+	val := left.v // keep the left value by default if c is nil
+	if c != nil {
+		val = c(left.v, right.v)
+	}
+	switch val {
+	case left.v:
+		return left
+	case right.v:
+		return right
+	default:
+		return b.mkLeaf(left.k, val)
+	}
+}
+
+// inserts a leaf l into a map m and returns the resulting map.
+// When lhs is true, l is the left hand side in a collision.
+// Both l and m are in the current scope.
+func (b *Builder) insert(c Collision, m node, l *leaf, lhs bool) node {
+	switch m := m.(type) {
+	case *empty:
+		return l
+	case *leaf:
+		if m.k == l.k {
+			left, right := l, m
+			if !lhs {
+				left, right = right, left
+			}
+			return b.collide(c, left, right)
+		}
+		return b.join(prefix(l.k), l, prefix(m.k), m)
+	case *branch:
+		// fallthrough
+	}
+	// m is a branch
+	br := m.(*branch)
+	if !matchPrefix(prefix(l.k), br.prefix, br.branching) {
+		return b.join(prefix(l.k), l, br.prefix, br)
+	}
+	var left, right node
+	if zeroBit(prefix(l.k), br.branching) {
+		left, right = b.insert(c, br.left, l, lhs), br.right
+	} else {
+		left, right = br.left, b.insert(c, br.right, l, lhs)
+	}
+	if left == br.left && right == br.right {
+		return m
+	}
+	return b.mkBranch(br.prefix, br.branching, left, right)
+}
+
+// merge two maps in the current scope.
+func (b *Builder) merge(c Collision, lhs, rhs node) node {
+	if lhs == rhs {
+		return lhs
+	}
+	switch lhs := lhs.(type) {
+	case *empty:
+		return rhs
+	case *leaf:
+		return b.insert(c, rhs, lhs, true)
+	case *branch:
+		switch rhs := rhs.(type) {
+		case *empty:
+			return lhs
+		case *leaf:
+			return b.insert(c, lhs, rhs, false)
+		case *branch:
+			// fallthrough
+		}
+	}
+
+	// Last remaining case is branch branch merging.
+	// For brevity, we adopt the Okasaki and Gill naming conventions
+	// for branching and prefixes.
+	s, t := lhs.(*branch), rhs.(*branch)
+	p, m := s.prefix, s.branching
+	q, n := t.prefix, t.branching
+
+	if m == n && p == q { // prefixes are identical.
+		left, right := b.merge(c, s.left, t.left), b.merge(c, s.right, t.right)
+		return b.mkBranch(p, m, left, right)
+	}
+	if !prefixesOverlap(p, m, q, n) {
+		return b.join(p, s, q, t) // prefixes are disjoint.
+	}
+	// prefixesOverlap(p, m, q, n) && !(m ==n && p == q)
+	// By prefixesOverlap(...), either:
+	//   higher(m, n) && matchPrefix(q, p, m), or
+	//   higher(n, m) && matchPrefix(p, q, n)
+	// So either s or t may can be merged with one branch or the other.
+	switch {
+	case ord(m, n) && zeroBit(q, m):
+		return b.mkBranch(p, m, b.merge(c, s.left, t), s.right)
+	case ord(m, n) && !zeroBit(q, m):
+		return b.mkBranch(p, m, s.left, b.merge(c, s.right, t))
+	case ord(n, m) && zeroBit(p, n):
+		return b.mkBranch(q, n, b.merge(c, s, t.left), t.right)
+	default:
+		return b.mkBranch(q, n, t.left, b.merge(c, s, t.right))
+	}
+}
+
+func (b *Builder) remove(m node, k key) node {
+	switch m := m.(type) {
+	case *empty:
+		return m
+	case *leaf:
+		if m.k == k {
+			return b.empty
+		}
+		return m
+	case *branch:
+		// fallthrough
+	}
+	br := m.(*branch)
+	kp := prefix(k)
+	if !matchPrefix(kp, br.prefix, br.branching) {
+		// The prefix does not match. kp is not in br.
+		return br
+	}
+	// the prefix matches. try to remove from the left or right branch.
+	left, right := br.left, br.right
+	if zeroBit(kp, br.branching) {
+		left = b.remove(left, k) // k may be in the left branch.
+	} else {
+		right = b.remove(right, k) // k may be in the right branch.
+	}
+	if left == br.left && right == br.right {
+		return br // no update
+	} else if _, ok := left.(*empty); ok {
+		return right // left updated and is empty.
+	} else if _, ok := right.(*empty); ok {
+		return left // right updated and is empty.
+	}
+	// Either left or right updated. Both left and right are not empty.
+	// The left and right branches still share the same prefix and disagree
+	// on the same branching bit. It is safe to directly create the branch.
+	return b.mkBranch(br.prefix, br.branching, left, right)
+}
+
+func (b *Builder) intersect(c Collision, l, r node) node {
+	if l == r {
+		return l
+	}
+	switch l := l.(type) {
+	case *empty:
+		return b.empty
+	case *leaf:
+		if rleaf := r.find(l.k); rleaf != nil {
+			return b.collide(c, l, rleaf)
+		}
+		return b.empty
+	case *branch:
+		switch r := r.(type) {
+		case *empty:
+			return b.empty
+		case *leaf:
+			if lleaf := l.find(r.k); lleaf != nil {
+				return b.collide(c, lleaf, r)
+			}
+			return b.empty
+		case *branch:
+			// fallthrough
+		}
+	}
+	// Last remaining case is branch branch intersection.
+	s, t := l.(*branch), r.(*branch)
+	p, m := s.prefix, s.branching
+	q, n := t.prefix, t.branching
+
+	if m == n && p == q {
+		// prefixes are identical.
+		left, right := b.intersect(c, s.left, t.left), b.intersect(c, s.right, t.right)
+		if _, ok := left.(*empty); ok {
+			return right
+		} else if _, ok := right.(*empty); ok {
+			return left
+		}
+		// The left and right branches are both non-empty.
+		// They still share the same prefix and disagree on the same branching bit.
+		// It is safe to directly create the branch.
+		return b.mkBranch(p, m, left, right)
+	}
+
+	if !prefixesOverlap(p, m, q, n) {
+		return b.empty // The prefixes share no keys.
+	}
+	// prefixesOverlap(p, m, q, n) && !(m ==n && p == q)
+	// By prefixesOverlap(...), either:
+	//   ord(m, n) && matchPrefix(q, p, m), or
+	//   ord(n, m) && matchPrefix(p, q, n)
+	// So either s or t may be a strict subtree of the other.
+	var lhs, rhs node
+	switch {
+	case ord(m, n) && zeroBit(q, m):
+		lhs, rhs = s.left, t
+	case ord(m, n) && !zeroBit(q, m):
+		lhs, rhs = s.right, t
+	case ord(n, m) && zeroBit(p, n):
+		lhs, rhs = s, t.left
+	default:
+		lhs, rhs = s, t.right
+	}
+	return b.intersect(c, lhs, rhs)
+}
diff --git a/go/callgraph/vta/internal/trie/op_test.go b/go/callgraph/vta/internal/trie/op_test.go
new file mode 100644
index 0000000..ba0d5be
--- /dev/null
+++ b/go/callgraph/vta/internal/trie/op_test.go
@@ -0,0 +1,460 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package trie_test
+
+import (
+	"fmt"
+	"math/rand"
+	"reflect"
+	"testing"
+	"time"
+
+	"golang.org/x/tools/go/callgraph/vta/internal/trie"
+)
+
+// This file tests trie.Map by cross checking operations on a collection of
+// trie.Map's against a collection of map[uint64]interface{}. This includes
+// both limited fuzz testing for correctness and benchmarking.
+
+// mapCollection is effectively a []map[uint64]interface{}.
+// These support operations being applied to the i'th maps.
+type mapCollection interface {
+	Elements() []map[uint64]interface{}
+
+	DeepEqual(l, r int) bool
+	Lookup(id int, k uint64) (interface{}, bool)
+
+	Insert(id int, k uint64, v interface{})
+	Update(id int, k uint64, v interface{})
+	Remove(id int, k uint64)
+	Intersect(l int, r int)
+	Merge(l int, r int)
+	Clear(id int)
+
+	Average(l int, r int)
+	Assign(l int, r int)
+}
+
+// opCode of an operation.
+type opCode int
+
+const (
+	deepEqualsOp opCode = iota
+	lookupOp
+	insert
+	update
+	remove
+	merge
+	intersect
+	clear
+	takeAverage
+	assign
+)
+
+func (op opCode) String() string {
+	switch op {
+	case deepEqualsOp:
+		return "DE"
+	case lookupOp:
+		return "LO"
+	case insert:
+		return "IN"
+	case update:
+		return "UP"
+	case remove:
+		return "RE"
+	case merge:
+		return "ME"
+	case intersect:
+		return "IT"
+	case clear:
+		return "CL"
+	case takeAverage:
+		return "AV"
+	case assign:
+		return "AS"
+	default:
+		return "??"
+	}
+}
+
+// A mapCollection backed by MutMaps.
+type trieCollection struct {
+	b     *trie.Builder
+	tries []trie.MutMap
+}
+
+func (c *trieCollection) Elements() []map[uint64]interface{} {
+	var maps []map[uint64]interface{}
+	for _, m := range c.tries {
+		maps = append(maps, trie.Elems(m.M))
+	}
+	return maps
+}
+func (c *trieCollection) Eq(id int, m map[uint64]interface{}) bool {
+	elems := trie.Elems(c.tries[id].M)
+	return !reflect.DeepEqual(elems, m)
+}
+
+func (c *trieCollection) Lookup(id int, k uint64) (interface{}, bool) {
+	return c.tries[id].M.Lookup(k)
+}
+func (c *trieCollection) DeepEqual(l, r int) bool {
+	return c.tries[l].M.DeepEqual(c.tries[r].M)
+}
+
+func (c *trieCollection) Add() {
+	c.tries = append(c.tries, c.b.MutEmpty())
+}
+
+func (c *trieCollection) Insert(id int, k uint64, v interface{}) {
+	c.tries[id].Insert(k, v)
+}
+
+func (c *trieCollection) Update(id int, k uint64, v interface{}) {
+	c.tries[id].Update(k, v)
+}
+
+func (c *trieCollection) Remove(id int, k uint64) {
+	c.tries[id].Remove(k)
+}
+
+func (c *trieCollection) Intersect(l int, r int) {
+	c.tries[l].Intersect(c.tries[r].M)
+}
+
+func (c *trieCollection) Merge(l int, r int) {
+	c.tries[l].Merge(c.tries[r].M)
+}
+
+func (c *trieCollection) Average(l int, r int) {
+	c.tries[l].MergeWith(average, c.tries[r].M)
+}
+
+func (c *trieCollection) Clear(id int) {
+	c.tries[id] = c.b.MutEmpty()
+}
+func (c *trieCollection) Assign(l, r int) {
+	c.tries[l] = c.tries[r]
+}
+
+func average(x interface{}, y interface{}) interface{} {
+	if x, ok := x.(float32); ok {
+		if y, ok := y.(float32); ok {
+			return (x + y) / 2.0
+		}
+	}
+	return x
+}
+
+type builtinCollection []map[uint64]interface{}
+
+func (c builtinCollection) Elements() []map[uint64]interface{} {
+	return c
+}
+
+func (c builtinCollection) Lookup(id int, k uint64) (interface{}, bool) {
+	v, ok := c[id][k]
+	return v, ok
+}
+func (c builtinCollection) DeepEqual(l, r int) bool {
+	return reflect.DeepEqual(c[l], c[r])
+}
+
+func (c builtinCollection) Insert(id int, k uint64, v interface{}) {
+	if _, ok := c[id][k]; !ok {
+		c[id][k] = v
+	}
+}
+
+func (c builtinCollection) Update(id int, k uint64, v interface{}) {
+	c[id][k] = v
+}
+
+func (c builtinCollection) Remove(id int, k uint64) {
+	delete(c[id], k)
+}
+
+func (c builtinCollection) Intersect(l int, r int) {
+	result := map[uint64]interface{}{}
+	for k, v := range c[l] {
+		if _, ok := c[r][k]; ok {
+			result[k] = v
+		}
+	}
+	c[l] = result
+}
+
+func (c builtinCollection) Merge(l int, r int) {
+	result := map[uint64]interface{}{}
+	for k, v := range c[r] {
+		result[k] = v
+	}
+	for k, v := range c[l] {
+		result[k] = v
+	}
+	c[l] = result
+}
+
+func (c builtinCollection) Average(l int, r int) {
+	avg := map[uint64]interface{}{}
+	for k, lv := range c[l] {
+		if rv, ok := c[r][k]; ok {
+			avg[k] = average(lv, rv)
+		} else {
+			avg[k] = lv // add elements just in l
+		}
+	}
+	for k, rv := range c[r] {
+		if _, ok := c[l][k]; !ok {
+			avg[k] = rv // add elements just in r
+		}
+	}
+	c[l] = avg
+}
+
+func (c builtinCollection) Assign(l, r int) {
+	m := map[uint64]interface{}{}
+	for k, v := range c[r] {
+		m[k] = v
+	}
+	c[l] = m
+}
+
+func (c builtinCollection) Clear(id int) {
+	c[id] = map[uint64]interface{}{}
+}
+
+func newTriesCollection(size int) *trieCollection {
+	tc := &trieCollection{
+		b:     trie.NewBuilder(),
+		tries: make([]trie.MutMap, size),
+	}
+	for i := 0; i < size; i++ {
+		tc.tries[i] = tc.b.MutEmpty()
+	}
+	return tc
+}
+
+func newMapsCollection(size int) *builtinCollection {
+	maps := make(builtinCollection, size)
+	for i := 0; i < size; i++ {
+		maps[i] = map[uint64]interface{}{}
+	}
+	return &maps
+}
+
+// operation on a map collection.
+type operation struct {
+	code opCode
+	l, r int
+	k    uint64
+	v    float32
+}
+
+// Apply the operation to maps.
+func (op operation) Apply(maps mapCollection) interface{} {
+	type lookupresult struct {
+		v  interface{}
+		ok bool
+	}
+	switch op.code {
+	case deepEqualsOp:
+		return maps.DeepEqual(op.l, op.r)
+	case lookupOp:
+		v, ok := maps.Lookup(op.l, op.k)
+		return lookupresult{v, ok}
+	case insert:
+		maps.Insert(op.l, op.k, op.v)
+	case update:
+		maps.Update(op.l, op.k, op.v)
+	case remove:
+		maps.Remove(op.l, op.k)
+	case merge:
+		maps.Merge(op.l, op.r)
+	case intersect:
+		maps.Intersect(op.l, op.r)
+	case clear:
+		maps.Clear(op.l)
+	case takeAverage:
+		maps.Average(op.l, op.r)
+	case assign:
+		maps.Assign(op.l, op.r)
+	}
+	return nil
+}
+
+// Returns a collection of op codes with dist[op] copies of op.
+func distribution(dist map[opCode]int) []opCode {
+	var codes []opCode
+	for op, n := range dist {
+		for i := 0; i < n; i++ {
+			codes = append(codes, op)
+		}
+	}
+	return codes
+}
+
+// options for generating a random operation.
+type options struct {
+	maps   int
+	maxKey uint64
+	maxVal int
+	codes  []opCode
+}
+
+// returns a random operation using r as a source of randomness.
+func randOperator(r *rand.Rand, opts options) operation {
+	id := func() int { return r.Intn(opts.maps) }
+	key := func() uint64 { return r.Uint64() % opts.maxKey }
+	val := func() float32 { return float32(r.Intn(opts.maxVal)) }
+	switch code := opts.codes[r.Intn(len(opts.codes))]; code {
+	case lookupOp, remove:
+		return operation{code: code, l: id(), k: key()}
+	case insert, update:
+		return operation{code: code, l: id(), k: key(), v: val()}
+	case deepEqualsOp, merge, intersect, takeAverage, assign:
+		return operation{code: code, l: id(), r: id()}
+	case clear:
+		return operation{code: code, l: id()}
+	default:
+		panic("Invalid op code")
+	}
+}
+
+func randOperators(r *rand.Rand, numops int, opts options) []operation {
+	ops := make([]operation, numops)
+	for i := 0; i < numops; i++ {
+		ops[i] = randOperator(r, opts)
+	}
+	return ops
+}
+
+// TestOperations applies a series of random operations to collection of
+// trie.MutMaps and map[uint64]interface{}. It tests for the maps being equal.
+func TestOperations(t *testing.T) {
+	seed := time.Now().UnixNano()
+	s := rand.NewSource(seed)
+	r := rand.New(s)
+	t.Log("seed: ", seed)
+
+	size := 10
+	N := 100000
+	ops := randOperators(r, N, options{
+		maps:   size,
+		maxKey: 128,
+		maxVal: 100,
+		codes: distribution(map[opCode]int{
+			deepEqualsOp: 1,
+			lookupOp:     10,
+			insert:       10,
+			update:       10,
+			remove:       10,
+			merge:        10,
+			intersect:    10,
+			clear:        2,
+			takeAverage:  5,
+			assign:       5,
+		}),
+	})
+
+	var tries mapCollection = newTriesCollection(size)
+	var maps mapCollection = newMapsCollection(size)
+	check := func() error {
+		if got, want := tries.Elements(), maps.Elements(); !reflect.DeepEqual(got, want) {
+			return fmt.Errorf("elements of tries and maps and tries differed. got %v want %v", got, want)
+		}
+		return nil
+	}
+
+	for i, op := range ops {
+		got, want := op.Apply(tries), op.Apply(maps)
+		if got != want {
+			t.Errorf("op[%d]: (%v).Apply(%v) != (%v).Apply(%v). got %v want %v",
+				i, op, tries, op, maps, got, want)
+		}
+	}
+	if err := check(); err != nil {
+		t.Errorf("%d operators failed with %s", size, err)
+		t.Log("Rerunning with more checking")
+		tries, maps = newTriesCollection(size), newMapsCollection(size)
+		for i, op := range ops {
+			op.Apply(tries)
+			op.Apply(maps)
+			if err := check(); err != nil {
+				t.Fatalf("Failed first on op[%d]=%v: %v", i, op, err)
+			}
+		}
+	}
+}
+
+func run(b *testing.B, opts options, seed int64, mk func(int) mapCollection) {
+	r := rand.New(rand.NewSource(seed))
+	ops := randOperators(r, b.N, opts)
+	maps := mk(opts.maps)
+	for _, op := range ops {
+		op.Apply(maps)
+	}
+}
+
+var standard options = options{
+	maps:   10,
+	maxKey: 128,
+	maxVal: 100,
+	codes: distribution(map[opCode]int{
+		deepEqualsOp: 1,
+		lookupOp:     20,
+		insert:       20,
+		update:       20,
+		remove:       20,
+		merge:        10,
+		intersect:    10,
+		clear:        1,
+		takeAverage:  5,
+		assign:       20,
+	}),
+}
+
+func BenchmarkTrieStandard(b *testing.B) {
+	run(b, standard, 123, func(size int) mapCollection {
+		return newTriesCollection(size)
+	})
+}
+
+func BenchmarkMapsStandard(b *testing.B) {
+	run(b, standard, 123, func(size int) mapCollection {
+		return newMapsCollection(size)
+	})
+}
+
+var smallWide options = options{
+	maps:   100,
+	maxKey: 100,
+	maxVal: 8,
+	codes: distribution(map[opCode]int{
+		deepEqualsOp: 0,
+		lookupOp:     0,
+		insert:       30,
+		update:       20,
+		remove:       0,
+		merge:        10,
+		intersect:    0,
+		clear:        1,
+		takeAverage:  0,
+		assign:       30,
+	}),
+}
+
+func BenchmarkTrieSmallWide(b *testing.B) {
+	run(b, smallWide, 456, func(size int) mapCollection {
+		return newTriesCollection(size)
+	})
+}
+
+func BenchmarkMapsSmallWide(b *testing.B) {
+	run(b, smallWide, 456, func(size int) mapCollection {
+		return newMapsCollection(size)
+	})
+}
diff --git a/go/callgraph/vta/internal/trie/scope.go b/go/callgraph/vta/internal/trie/scope.go
new file mode 100644
index 0000000..4a6d0bb
--- /dev/null
+++ b/go/callgraph/vta/internal/trie/scope.go
@@ -0,0 +1,28 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package trie
+
+import (
+	"strconv"
+	"sync/atomic"
+)
+
+// Scope represents a distinct collection of maps.
+// Maps with the same Scope can be equal. Maps in different scopes are distinct.
+// Each Builder creates maps within a unique Scope.
+type Scope struct {
+	id int32
+}
+
+var nextScopeId int32
+
+func newScope() Scope {
+	id := atomic.AddInt32(&nextScopeId, 1)
+	return Scope{id: id}
+}
+
+func (s Scope) String() string {
+	return strconv.Itoa(int(s.id))
+}
diff --git a/go/callgraph/vta/internal/trie/trie.go b/go/callgraph/vta/internal/trie/trie.go
new file mode 100644
index 0000000..160eb21
--- /dev/null
+++ b/go/callgraph/vta/internal/trie/trie.go
@@ -0,0 +1,227 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// trie implements persistent Patricia trie maps.
+//
+// Each Map is effectively a map from uint64 to interface{}. Patricia tries are
+// a form of radix tree that are particularly appropriate when many maps will be
+// created, merged together and large amounts of sharing are expected (e.g.
+// environment abstract domains in program analysis).
+//
+// This implementation closely follows the paper:
+//   C. Okasaki and A. Gill, “Fast mergeable integer maps,” in ACM SIGPLAN
+//   Workshop on ML, September 1998, pp. 77–86.
+// Each Map is immutable and can be read from concurrently. The map does not
+// guarantee that the value pointed to by the interface{} value is not updated
+// concurrently.
+//
+// These Maps are optimized for situations where there will be many maps created at
+// with a high degree of sharing and combining of maps together. If you do not expect,
+// significant amount of sharing, the builtin map[T]U is much better choice!
+//
+// Each Map is created by a Builder. Each Builder has a unique Scope and each node is
+// created within this scope. Maps x and y are == if they contains the same
+// (key,value) mappings and have equal scopes.
+//
+// Internally these are big endian Patricia trie nodes, and the keys are sorted.
+package trie
+
+import (
+	"fmt"
+	"strings"
+)
+
+// Map is effectively a finite mapping from uint64 keys to interface{} values.
+// Maps are immutable and can be read from concurrently.
+//
+// Notes on concurrency:
+// - A Map value itself is an interface and assignments to a Map value can race.
+// - Map does not guarantee that the value pointed to by the interface{} value
+//   is not updated concurrently.
+type Map struct {
+	s Scope
+	n node
+}
+
+func (m Map) Scope() Scope {
+	return m.s
+}
+func (m Map) Size() int {
+	if m.n == nil {
+		return 0
+	}
+	return m.n.size()
+}
+func (m Map) Lookup(k uint64) (interface{}, bool) {
+	if m.n != nil {
+		if leaf := m.n.find(key(k)); leaf != nil {
+			return leaf.v, true
+		}
+	}
+	return nil, false
+}
+
+// Converts the map into a {<key>: <value>[, ...]} string. This uses the default
+// %s string conversion for <value>.
+func (m Map) String() string {
+	var kvs []string
+	m.Range(func(u uint64, i interface{}) bool {
+		kvs = append(kvs, fmt.Sprintf("%d: %s", u, i))
+		return true
+	})
+	return fmt.Sprintf("{%s}", strings.Join(kvs, ", "))
+}
+
+// Range over the leaf (key, value) pairs in the map in order and
+// applies cb(key, value) to each. Stops early if cb returns false.
+// Returns true if all elements were visited without stopping early.
+func (m Map) Range(cb func(uint64, interface{}) bool) bool {
+	if m.n != nil {
+		return m.n.visit(cb)
+	}
+	return true
+}
+
+// DeepEqual returns true if m and other contain the same (k, v) mappings
+// [regardless of Scope].
+//
+// Equivalently m.DeepEqual(other) <=> reflect.DeepEqual(Elems(m), Elems(other))
+func (m Map) DeepEqual(other Map) bool {
+	if m.Scope() == other.Scope() {
+		return m.n == other.n
+	}
+	if (m.n == nil) || (other.n == nil) {
+		return m.Size() == 0 && other.Size() == 0
+	}
+	return m.n.deepEqual(other.n)
+}
+
+// Elems are the (k,v) elements in the Map as a map[uint64]interface{}
+func Elems(m Map) map[uint64]interface{} {
+	dest := make(map[uint64]interface{}, m.Size())
+	m.Range(func(k uint64, v interface{}) bool {
+		dest[k] = v
+		return true
+	})
+	return dest
+}
+
+// node is an internal node within a trie map.
+// A node is either empty, a leaf or a branch.
+type node interface {
+	size() int
+
+	// visit the leaves (key, value) pairs in the map in order and
+	// applies cb(key, value) to each. Stops early if cb returns false.
+	// Returns true if all elements were visited without stopping early.
+	visit(cb func(uint64, interface{}) bool) bool
+
+	// Two nodes contain the same elements regardless of scope.
+	deepEqual(node) bool
+
+	// find the leaf for the given key value or nil if it is not present.
+	find(k key) *leaf
+
+	// implementations must implement this.
+	nodeImpl()
+}
+
+// empty represents the empty map within a scope.
+//
+// The current builder ensure
+type empty struct {
+	s Scope
+}
+
+// leaf represents a single <key, value> pair.
+type leaf struct {
+	k key
+	v interface{}
+}
+
+// branch represents a tree node within the Patricia trie.
+//
+// All keys within the branch match a `prefix` of the key
+// up to a `branching` bit, and the left and right nodes
+// contain keys that disagree on the bit at the `branching` bit.
+type branch struct {
+	sz        int    // size. cached for O(1) lookup
+	prefix    prefix // == mask(p0, branching) for some p0
+	branching bitpos
+
+	// Invariants:
+	// - neither is nil.
+	// - neither is *empty.
+	// - all keys in left are <= p.
+	// - all keys in right are > p.
+	left, right node
+}
+
+// all of these types are Maps.
+var _ node = &empty{}
+var _ node = &leaf{}
+var _ node = &branch{}
+
+func (*empty) nodeImpl()  {}
+func (*leaf) nodeImpl()   {}
+func (*branch) nodeImpl() {}
+
+func (*empty) find(k key) *leaf { return nil }
+func (l *leaf) find(k key) *leaf {
+	if k == l.k {
+		return l
+	}
+	return nil
+}
+func (br *branch) find(k key) *leaf {
+	kp := prefix(k)
+	if !matchPrefix(kp, br.prefix, br.branching) {
+		return nil
+	}
+	if zeroBit(kp, br.branching) {
+		return br.left.find(k)
+	}
+	return br.right.find(k)
+}
+
+func (*empty) size() int     { return 0 }
+func (*leaf) size() int      { return 1 }
+func (br *branch) size() int { return br.sz }
+
+func (*empty) deepEqual(m node) bool {
+	_, ok := m.(*empty)
+	return ok
+}
+func (l *leaf) deepEqual(m node) bool {
+	if m, ok := m.(*leaf); ok {
+		return m == l || (l.k == m.k && l.v == m.v)
+	}
+	return false
+}
+
+func (br *branch) deepEqual(m node) bool {
+	if m, ok := m.(*branch); ok {
+		if br == m {
+			return true
+		}
+		return br.sz == m.sz && br.branching == m.branching && br.prefix == m.prefix &&
+			br.left.deepEqual(m.left) && br.right.deepEqual(m.right)
+	}
+	// if m is not a branch, m contains 0 or 1 elem.
+	// br contains at least 2 keys that disagree on a prefix.
+	return false
+}
+
+func (*empty) visit(cb func(uint64, interface{}) bool) bool {
+	return true
+}
+func (l *leaf) visit(cb func(uint64, interface{}) bool) bool {
+	return cb(uint64(l.k), l.v)
+}
+func (br *branch) visit(cb func(uint64, interface{}) bool) bool {
+	if !br.left.visit(cb) {
+		return false
+	}
+	return br.right.visit(cb)
+}
diff --git a/go/callgraph/vta/internal/trie/trie_test.go b/go/callgraph/vta/internal/trie/trie_test.go
new file mode 100644
index 0000000..c0651b0
--- /dev/null
+++ b/go/callgraph/vta/internal/trie/trie_test.go
@@ -0,0 +1,543 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13
+// +build go1.13
+
+package trie
+
+import (
+	"reflect"
+	"strconv"
+	"testing"
+)
+
+func TestScope(t *testing.T) {
+	def := Scope{}
+	s0, s1 := newScope(), newScope()
+	if s0 == def || s1 == def {
+		t.Error("newScope() should never be == to the default scope")
+	}
+	if s0 == s1 {
+		t.Errorf("newScope() %q and %q should not be ==", s0, s1)
+	}
+	if s0.id == 0 {
+		t.Error("s0.id is 0")
+	}
+	if s1.id == 0 {
+		t.Error("s1.id is 0")
+	}
+	got := s0.String()
+	if _, err := strconv.Atoi(got); err != nil {
+		t.Errorf("scope{%s}.String() is not an int: got %s with error %s", s0, got, err)
+	}
+}
+
+func TestCollision(t *testing.T) {
+	var x interface{} = 1
+	var y interface{} = 2
+
+	if v := TakeLhs(x, y); v != x {
+		t.Errorf("TakeLhs(%s, %s) got %s. want %s", x, y, v, x)
+	}
+	if v := TakeRhs(x, y); v != y {
+		t.Errorf("TakeRhs(%s, %s) got %s. want %s", x, y, v, y)
+	}
+}
+
+func TestDefault(t *testing.T) {
+	def := Map{}
+
+	if def.Size() != 0 {
+		t.Errorf("default node has non-0 size %d", def.Size())
+	}
+	if want, got := (Scope{}), def.Scope(); got != want {
+		t.Errorf("default is in a non default scope (%s) from b (%s)", got, want)
+	}
+	if v, ok := def.Lookup(123); !(v == nil && !ok) {
+		t.Errorf("Scope{}.Lookup() = (%s, %v) not (nil, false)", v, ok)
+	}
+	if !def.Range(func(k uint64, v interface{}) bool {
+		t.Errorf("Scope{}.Range() called it callback on %d:%s", k, v)
+		return true
+	}) {
+		t.Error("Scope{}.Range() always iterates through all elements")
+	}
+
+	if got, want := def.String(), "{}"; got != want {
+		t.Errorf("Scope{}.String() got %s. want %s", got, want)
+	}
+
+	b := NewBuilder()
+	if def == b.Empty() {
+		t.Error("Scope{} == to an empty node from a builder")
+	}
+	if b.Clone(def) != b.Empty() {
+		t.Error("b.Clone(Scope{}) should equal b.Empty()")
+	}
+	if !def.DeepEqual(b.Empty()) {
+		t.Error("Scope{}.DeepEqual(b.Empty()) should hold")
+	}
+}
+
+func TestBuilders(t *testing.T) {
+	b0, b1 := NewBuilder(), NewBuilder()
+	if b0.Scope() == b1.Scope() {
+		t.Errorf("builders have the same scope %s", b0.Scope())
+	}
+
+	if b0.Empty() == b1.Empty() {
+		t.Errorf("empty nodes from different scopes are disequal")
+	}
+	if !b0.Empty().DeepEqual(b1.Empty()) {
+		t.Errorf("empty nodes from different scopes are not DeepEqual")
+	}
+
+	clone := b1.Clone(b0.Empty())
+	if clone != b1.Empty() {
+		t.Errorf("Clone() empty nodes %v != %v", clone, b1.Empty())
+	}
+}
+
+func TestEmpty(t *testing.T) {
+	b := NewBuilder()
+	e := b.Empty()
+	if e.Size() != 0 {
+		t.Errorf("empty nodes has non-0 size %d", e.Size())
+	}
+	if e.Scope() != b.Scope() {
+		t.Errorf("b.Empty() is in a different scope (%s) from b (%s)", e.Scope(), b.Scope())
+	}
+	if v, ok := e.Lookup(123); !(v == nil && !ok) {
+		t.Errorf("empty.Lookup() = (%s, %v) not (nil, false)", v, ok)
+	}
+	if l := e.n.find(123); l != nil {
+		t.Errorf("empty.find(123) got %v. want nil", l)
+	}
+	e.Range(func(k uint64, v interface{}) bool {
+		t.Errorf("empty.Range() called it callback on %d:%s", k, v)
+		return true
+	})
+
+	want := "{}"
+	if got := e.String(); got != want {
+		t.Errorf("empty.String(123) got %s. want %s", got, want)
+	}
+}
+
+func TestCreate(t *testing.T) {
+	// The node orders are printed in lexicographic little-endian.
+	b := NewBuilder()
+	for _, c := range []struct {
+		m    map[uint64]interface{}
+		want string
+	}{
+		{
+			map[uint64]interface{}{},
+			"{}",
+		},
+		{
+			map[uint64]interface{}{1: "a"},
+			"{1: a}",
+		},
+		{
+			map[uint64]interface{}{2: "b", 1: "a"},
+			"{1: a, 2: b}",
+		},
+		{
+			map[uint64]interface{}{1: "x", 4: "y", 5: "z"},
+			"{1: x, 4: y, 5: z}",
+		},
+	} {
+		m := b.Create(c.m)
+		if got := m.String(); got != c.want {
+			t.Errorf("Create(%v) got %q. want %q ", c.m, got, c.want)
+		}
+	}
+}
+
+func TestElems(t *testing.T) {
+	b := NewBuilder()
+	for _, orig := range []map[uint64]interface{}{
+		{},
+		{1: "a"},
+		{1: "a", 2: "b"},
+		{1: "x", 4: "y", 5: "z"},
+		{1: "x", 4: "y", 5: "z", 123: "abc"},
+	} {
+		m := b.Create(orig)
+		if elems := Elems(m); !reflect.DeepEqual(orig, elems) {
+			t.Errorf("Elems(%v) got %q. want %q ", m, elems, orig)
+		}
+	}
+}
+
+func TestRange(t *testing.T) {
+	b := NewBuilder()
+	m := b.Create(map[uint64]interface{}{1: "x", 3: "y", 5: "z", 6: "stop", 8: "a"})
+
+	calls := 0
+	cb := func(k uint64, v interface{}) bool {
+		t.Logf("visiting (%d, %v)", k, v)
+		calls++
+		return k%2 != 0 // stop after the first even number.
+	}
+	// The nodes are visited in increasing order.
+	all := m.Range(cb)
+	if all {
+		t.Error("expected to stop early")
+	}
+	want := 4
+	if calls != want {
+		t.Errorf("# of callbacks (%d) was expected to equal %d (1 + # of evens)",
+			calls, want)
+	}
+}
+
+func TestDeepEqual(t *testing.T) {
+	for _, m := range []map[uint64]interface{}{
+		{},
+		{1: "x"},
+		{1: "x", 2: "y"},
+	} {
+		l := NewBuilder().Create(m)
+		r := NewBuilder().Create(m)
+		if !l.DeepEqual(r) {
+			t.Errorf("Expect %v to be DeepEqual() to %v", l, r)
+		}
+	}
+}
+
+func TestNotDeepEqual(t *testing.T) {
+	for _, c := range []struct {
+		left  map[uint64]interface{}
+		right map[uint64]interface{}
+	}{
+		{
+			map[uint64]interface{}{1: "x"},
+			map[uint64]interface{}{},
+		},
+		{
+			map[uint64]interface{}{},
+			map[uint64]interface{}{1: "y"},
+		},
+		{
+			map[uint64]interface{}{1: "x"},
+			map[uint64]interface{}{1: "y"},
+		},
+		{
+			map[uint64]interface{}{1: "x"},
+			map[uint64]interface{}{1: "x", 2: "Y"},
+		},
+		{
+			map[uint64]interface{}{1: "x", 2: "Y"},
+			map[uint64]interface{}{1: "x"},
+		},
+		{
+			map[uint64]interface{}{1: "x", 2: "y"},
+			map[uint64]interface{}{1: "x", 2: "Y"},
+		},
+	} {
+		l := NewBuilder().Create(c.left)
+		r := NewBuilder().Create(c.right)
+		if l.DeepEqual(r) {
+			t.Errorf("Expect %v to be !DeepEqual() to %v", l, r)
+		}
+	}
+}
+
+func TestMerge(t *testing.T) {
+	b := NewBuilder()
+	for _, c := range []struct {
+		left  map[uint64]interface{}
+		right map[uint64]interface{}
+		want  string
+	}{
+		{
+			map[uint64]interface{}{},
+			map[uint64]interface{}{},
+			"{}",
+		},
+		{
+			map[uint64]interface{}{},
+			map[uint64]interface{}{1: "a"},
+			"{1: a}",
+		},
+		{
+			map[uint64]interface{}{1: "a"},
+			map[uint64]interface{}{},
+			"{1: a}",
+		},
+		{
+			map[uint64]interface{}{1: "a", 2: "b"},
+			map[uint64]interface{}{},
+			"{1: a, 2: b}",
+		},
+		{
+			map[uint64]interface{}{1: "x"},
+			map[uint64]interface{}{1: "y"},
+			"{1: x}", // default collision is left
+		},
+		{
+			map[uint64]interface{}{1: "x"},
+			map[uint64]interface{}{2: "y"},
+			"{1: x, 2: y}",
+		},
+		{
+			map[uint64]interface{}{4: "y", 5: "z"},
+			map[uint64]interface{}{1: "x"},
+			"{1: x, 4: y, 5: z}",
+		},
+		{
+			map[uint64]interface{}{1: "x", 5: "z"},
+			map[uint64]interface{}{4: "y"},
+			"{1: x, 4: y, 5: z}",
+		},
+		{
+			map[uint64]interface{}{1: "x", 4: "y"},
+			map[uint64]interface{}{5: "z"},
+			"{1: x, 4: y, 5: z}",
+		},
+		{
+			map[uint64]interface{}{1: "a", 4: "c"},
+			map[uint64]interface{}{2: "b", 5: "d"},
+			"{1: a, 2: b, 4: c, 5: d}",
+		},
+		{
+			map[uint64]interface{}{1: "a", 4: "c"},
+			map[uint64]interface{}{2: "b", 5 + 8: "d"},
+			"{1: a, 2: b, 4: c, 13: d}",
+		},
+		{
+			map[uint64]interface{}{2: "b", 5 + 8: "d"},
+			map[uint64]interface{}{1: "a", 4: "c"},
+			"{1: a, 2: b, 4: c, 13: d}",
+		},
+		{
+			map[uint64]interface{}{1: "a", 4: "c"},
+			map[uint64]interface{}{2: "b", 5 + 8: "d"},
+			"{1: a, 2: b, 4: c, 13: d}",
+		},
+		{
+			map[uint64]interface{}{2: "b", 5 + 8: "d"},
+			map[uint64]interface{}{1: "a", 4: "c"},
+			"{1: a, 2: b, 4: c, 13: d}",
+		},
+		{
+			map[uint64]interface{}{2: "b", 5 + 8: "d"},
+			map[uint64]interface{}{2: "", 3: "a"},
+			"{2: b, 3: a, 13: d}",
+		},
+
+		{
+			// crafted for `!prefixesOverlap(p, m, q, n)`
+			left:  map[uint64]interface{}{1: "a", 2 + 1: "b"},
+			right: map[uint64]interface{}{4 + 1: "c", 4 + 2: "d"},
+			// p: 5, m: 2 q: 1, n: 2
+			want: "{1: a, 3: b, 5: c, 6: d}",
+		},
+		{
+			// crafted for `ord(m, n) && !zeroBit(q, m)`
+			left:  map[uint64]interface{}{8 + 2 + 1: "a", 16 + 4: "b"},
+			right: map[uint64]interface{}{16 + 8 + 2 + 1: "c", 16 + 8 + 4 + 2 + 1: "d"},
+			// left: p: 15, m: 16
+			// right: q: 27, n: 4
+			want: "{11: a, 20: b, 27: c, 31: d}",
+		},
+		{
+			// crafted for `ord(n, m) && !zeroBit(p, n)`
+			// p: 6, m: 1 q: 5, n: 2
+			left:  map[uint64]interface{}{4 + 2: "b", 4 + 2 + 1: "c"},
+			right: map[uint64]interface{}{4: "a", 4 + 2 + 1: "dropped"},
+			want:  "{4: a, 6: b, 7: c}",
+		},
+	} {
+		l, r := b.Create(c.left), b.Create(c.right)
+		m := b.Merge(l, r)
+		if got := m.String(); got != c.want {
+			t.Errorf("Merge(%s, %s) got %q. want %q ", l, r, got, c.want)
+		}
+	}
+}
+
+func TestIntersect(t *testing.T) {
+	// Most of the test cases go after specific branches of intersect.
+	b := NewBuilder()
+	for _, c := range []struct {
+		left  map[uint64]interface{}
+		right map[uint64]interface{}
+		want  string
+	}{
+		{
+			left:  map[uint64]interface{}{10: "a", 39: "b"},
+			right: map[uint64]interface{}{10: "A", 39: "B", 75: "C"},
+			want:  "{10: a, 39: b}",
+		},
+		{
+			left:  map[uint64]interface{}{10: "a", 39: "b"},
+			right: map[uint64]interface{}{},
+			want:  "{}",
+		},
+		{
+			left:  map[uint64]interface{}{},
+			right: map[uint64]interface{}{10: "A", 39: "B", 75: "C"},
+			want:  "{}",
+		},
+		{ // m == n && p == q  && left.(*empty) case
+			left:  map[uint64]interface{}{4: 1, 6: 3, 10: 8, 15: "on left"},
+			right: map[uint64]interface{}{0: 8, 7: 6, 11: 0, 15: "on right"},
+			want:  "{15: on left}",
+		},
+		{ // m == n && p == q  && right.(*empty) case
+			left:  map[uint64]interface{}{0: "on left", 1: 2, 2: 3, 3: 1, 7: 3},
+			right: map[uint64]interface{}{0: "on right", 5: 1, 6: 8},
+			want:  "{0: on left}",
+		},
+		{ // m == n && p == q  &&  both left and right are not empty
+			left:  map[uint64]interface{}{1: "a", 2: "b", 3: "c"},
+			right: map[uint64]interface{}{0: "A", 1: "B", 2: "C"},
+			want:  "{1: a, 2: b}",
+		},
+		{ // m == n && p == q  &&  both left and right are not empty
+			left:  map[uint64]interface{}{1: "a", 2: "b", 3: "c"},
+			right: map[uint64]interface{}{0: "A", 1: "B", 2: "C"},
+			want:  "{1: a, 2: b}",
+		},
+		{ // !prefixesOverlap(p, m, q, n)
+			// p = 1, m = 2, q = 5, n = 2
+			left:  map[uint64]interface{}{0b001: 1, 0b011: 3},
+			right: map[uint64]interface{}{0b100: 4, 0b111: 7},
+			want:  "{}",
+		},
+		{ // ord(m, n) && zeroBit(q, m)
+			// p = 3, m = 4, q = 0, n = 1
+			left:  map[uint64]interface{}{0b010: 2, 0b101: 5},
+			right: map[uint64]interface{}{0b000: 0, 0b001: 1},
+			want:  "{}",
+		},
+
+		{ // ord(m, n) && !zeroBit(q, m)
+			// p = 29, m = 2, q = 30, n = 1
+			left: map[uint64]interface{}{
+				0b11101: "29",
+				0b11110: "30",
+			},
+			right: map[uint64]interface{}{
+				0b11110: "30 on right",
+				0b11111: "31",
+			},
+			want: "{30: 30}",
+		},
+		{ // ord(n, m) && zeroBit(p, n)
+			// p = 5, m = 2, q = 3, n = 4
+			left:  map[uint64]interface{}{0b000: 0, 0b001: 1},
+			right: map[uint64]interface{}{0b010: 2, 0b101: 5},
+			want:  "{}",
+		},
+		{ // default case
+			// p = 5, m = 2, q = 3, n = 4
+			left:  map[uint64]interface{}{0b100: 1, 0b110: 3},
+			right: map[uint64]interface{}{0b000: 8, 0b111: 6},
+			want:  "{}",
+		},
+	} {
+		l, r := b.Create(c.left), b.Create(c.right)
+		m := b.Intersect(l, r)
+		if got := m.String(); got != c.want {
+			t.Errorf("Intersect(%s, %s) got %q. want %q ", l, r, got, c.want)
+		}
+	}
+}
+
+func TestIntersectWith(t *testing.T) {
+	b := NewBuilder()
+	l := b.Create(map[uint64]interface{}{10: 2.0, 39: 32.0})
+	r := b.Create(map[uint64]interface{}{10: 6.0, 39: 10.0, 75: 1.0})
+
+	prodIfDifferent := func(x interface{}, y interface{}) interface{} {
+		if x, ok := x.(float64); ok {
+			if y, ok := y.(float64); ok {
+				if x == y {
+					return x
+				}
+				return x * y
+			}
+		}
+		return x
+	}
+
+	m := b.IntersectWith(prodIfDifferent, l, r)
+
+	want := "{10: %!s(float64=12), 39: %!s(float64=320)}"
+	if got := m.String(); got != want {
+		t.Errorf("IntersectWith(min, %s, %s) got %q. want %q ", l, r, got, want)
+	}
+}
+
+func TestRemove(t *testing.T) {
+	// Most of the test cases go after specific branches of intersect.
+	b := NewBuilder()
+	for _, c := range []struct {
+		m    map[uint64]interface{}
+		key  uint64
+		want string
+	}{
+		{map[uint64]interface{}{}, 10, "{}"},
+		{map[uint64]interface{}{10: "a"}, 10, "{}"},
+		{map[uint64]interface{}{39: "b"}, 10, "{39: b}"},
+		// Branch cases:
+		// !matchPrefix(kp, br.prefix, br.branching)
+		{map[uint64]interface{}{10: "a", 39: "b"}, 128, "{10: a, 39: b}"},
+		// case: left == br.left && right == br.right
+		{map[uint64]interface{}{10: "a", 39: "b"}, 16, "{10: a, 39: b}"},
+		// left updated and is empty.
+		{map[uint64]interface{}{10: "a", 39: "b"}, 10, "{39: b}"},
+		// right updated and is empty.
+		{map[uint64]interface{}{10: "a", 39: "b"}, 39, "{10: a}"},
+		// final b.mkBranch(...) case.
+		{map[uint64]interface{}{10: "a", 39: "b", 128: "c"}, 39, "{10: a, 128: c}"},
+	} {
+		pre := b.Create(c.m)
+		post := b.Remove(pre, c.key)
+		if got := post.String(); got != c.want {
+			t.Errorf("Remove(%s, %d) got %q. want %q ", pre, c.key, got, c.want)
+		}
+	}
+}
+
+func TestRescope(t *testing.T) {
+	b := NewBuilder()
+	l := b.Create(map[uint64]interface{}{10: "a", 39: "b"})
+	r := b.Create(map[uint64]interface{}{10: "A", 39: "B", 75: "C"})
+
+	b.Rescope()
+
+	m := b.Intersect(l, r)
+	if got, want := m.String(), "{10: a, 39: b}"; got != want {
+		t.Errorf("Intersect(%s, %s) got %q. want %q", l, r, got, want)
+	}
+	if m.Scope() == l.Scope() {
+		t.Errorf("m.Scope() = %v should not equal l.Scope() = %v", m.Scope(), l.Scope())
+	}
+	if m.Scope() == r.Scope() {
+		t.Errorf("m.Scope() = %v should not equal r.Scope() = %v", m.Scope(), r.Scope())
+	}
+}
+
+func TestSharing(t *testing.T) {
+	b := NewBuilder()
+	l := b.Create(map[uint64]interface{}{0: "a", 1: "b"})
+	r := b.Create(map[uint64]interface{}{1: "B", 2: "C"})
+
+	rleftold := r.n.(*branch).left
+
+	m := b.Merge(l, r)
+	if mleft := m.n.(*branch).left; mleft != l.n {
+		t.Errorf("unexpected value for left branch of %v. want %v got %v", m, l, mleft)
+	}
+
+	if rleftnow := r.n.(*branch).left; rleftnow != rleftold {
+		t.Errorf("r.n.(*branch).left was modified by the Merge operation. was %v now %v", rleftold, rleftnow)
+	}
+}
diff --git a/go/callgraph/vta/propagation.go b/go/callgraph/vta/propagation.go
index 6c11801..5934ebc 100644
--- a/go/callgraph/vta/propagation.go
+++ b/go/callgraph/vta/propagation.go
@@ -7,6 +7,7 @@
 import (
 	"go/types"
 
+	"golang.org/x/tools/go/callgraph/vta/internal/trie"
 	"golang.org/x/tools/go/ssa"
 
 	"golang.org/x/tools/go/types/typeutil"
@@ -94,21 +95,21 @@
 // the role of a map from nodes to a set of propTypes.
 type propTypeMap struct {
 	nodeToScc  map[node]int
-	sccToTypes map[int]map[propType]bool
+	sccToTypes map[int]*trie.MutMap
 }
 
-// propTypes returns a set of propTypes associated with
+// propTypes returns a list of propTypes associated with
 // node `n`. If `n` is not in the map `ptm`, nil is returned.
-//
-// Note: for performance reasons, the returned set is a
-// reference to existing set in the map `ptm`, so any updates
-// to it will affect `ptm` as well.
-func (ptm propTypeMap) propTypes(n node) map[propType]bool {
+func (ptm propTypeMap) propTypes(n node) []propType {
 	id, ok := ptm.nodeToScc[n]
 	if !ok {
 		return nil
 	}
-	return ptm.sccToTypes[id]
+	var pts []propType
+	for _, elem := range trie.Elems(ptm.sccToTypes[id].M) {
+		pts = append(pts, elem.(propType))
+	}
+	return pts
 }
 
 // propagate reduces the `graph` based on its SCCs and
@@ -118,12 +119,6 @@
 // reaching the node. `canon` is used for type uniqueness.
 func propagate(graph vtaGraph, canon *typeutil.Map) propTypeMap {
 	nodeToScc, sccID := scc(graph)
-	// Initialize sccToTypes to avoid repeated check
-	// for initialization later.
-	sccToTypes := make(map[int]map[propType]bool, sccID)
-	for i := 0; i <= sccID; i++ {
-		sccToTypes[i] = make(map[propType]bool)
-	}
 
 	// We also need the reverse map, from ids to SCCs.
 	sccs := make(map[int][]node, sccID)
@@ -131,35 +126,53 @@
 		sccs[id] = append(sccs[id], n)
 	}
 
+	// propTypeIds are used to create unique ids for
+	// propType, to be used for trie-based type sets.
+	propTypeIds := make(map[propType]uint64)
+	// Id creation is based on == equality, which works
+	// as types are canonicalized (see getPropType).
+	propTypeId := func(p propType) uint64 {
+		if id, ok := propTypeIds[p]; ok {
+			return id
+		}
+		id := uint64(len(propTypeIds))
+		propTypeIds[p] = id
+		return id
+	}
+	builder := trie.NewBuilder()
+	// Initialize sccToTypes to avoid repeated check
+	// for initialization later.
+	sccToTypes := make(map[int]*trie.MutMap, sccID)
+	for i := 0; i <= sccID; i++ {
+		sccToTypes[i] = nodeTypes(sccs[i], builder, propTypeId, canon)
+	}
+
 	for i := len(sccs) - 1; i >= 0; i-- {
-		nodes := sccs[i]
-		// Save the types induced by the nodes of the SCC.
-		mergeTypes(sccToTypes[i], nodeTypes(nodes, canon))
-		nextSccs := make(map[int]bool)
-		for _, node := range nodes {
+		nextSccs := make(map[int]struct{})
+		for _, node := range sccs[i] {
 			for succ := range graph[node] {
-				nextSccs[nodeToScc[succ]] = true
+				nextSccs[nodeToScc[succ]] = struct{}{}
 			}
 		}
 		// Propagate types to all successor SCCs.
 		for nextScc := range nextSccs {
-			mergeTypes(sccToTypes[nextScc], sccToTypes[i])
+			sccToTypes[nextScc].Merge(sccToTypes[i].M)
 		}
 	}
-
 	return propTypeMap{nodeToScc: nodeToScc, sccToTypes: sccToTypes}
 }
 
 // nodeTypes returns a set of propTypes for `nodes`. These are the
 // propTypes stemming from the type of each node in `nodes` plus.
-func nodeTypes(nodes []node, canon *typeutil.Map) map[propType]bool {
-	types := make(map[propType]bool)
+func nodeTypes(nodes []node, builder *trie.Builder, propTypeId func(p propType) uint64, canon *typeutil.Map) *trie.MutMap {
+	typeSet := builder.MutEmpty()
 	for _, n := range nodes {
 		if hasInitialTypes(n) {
-			types[getPropType(n, canon)] = true
+			pt := getPropType(n, canon)
+			typeSet.Update(propTypeId(pt), pt)
 		}
 	}
-	return types
+	return &typeSet
 }
 
 // getPropType creates a propType for `node` based on its type.
@@ -172,10 +185,3 @@
 	}
 	return propType{f: nil, typ: t}
 }
-
-// mergeTypes merges propTypes in `rhs` to `lhs`.
-func mergeTypes(lhs, rhs map[propType]bool) {
-	for typ := range rhs {
-		lhs[typ] = true
-	}
-}
diff --git a/go/callgraph/vta/propagation_test.go b/go/callgraph/vta/propagation_test.go
index 219fd70..9670741 100644
--- a/go/callgraph/vta/propagation_test.go
+++ b/go/callgraph/vta/propagation_test.go
@@ -101,7 +101,7 @@
 	nodeToTypeStr := make(map[string]string)
 	for node := range pMap.nodeToScc {
 		var propStrings []string
-		for prop := range pMap.propTypes(node) {
+		for _, prop := range pMap.propTypes(node) {
 			propStrings = append(propStrings, propTypeString(prop))
 		}
 		sort.Strings(propStrings)
diff --git a/go/callgraph/vta/testdata/callgraph_collections.go b/go/callgraph/vta/testdata/src/callgraph_collections.go
similarity index 100%
rename from go/callgraph/vta/testdata/callgraph_collections.go
rename to go/callgraph/vta/testdata/src/callgraph_collections.go
diff --git a/go/callgraph/vta/testdata/src/callgraph_field_funcs.go b/go/callgraph/vta/testdata/src/callgraph_field_funcs.go
new file mode 100644
index 0000000..cf4c0f1
--- /dev/null
+++ b/go/callgraph/vta/testdata/src/callgraph_field_funcs.go
@@ -0,0 +1,67 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// go:build ignore
+
+package testdata
+
+type WrappedFunc struct {
+	F func() complex64
+}
+
+func callWrappedFunc(f WrappedFunc) {
+	f.F()
+}
+
+func foo() complex64 {
+	println("foo")
+	return -1
+}
+
+func Foo(b bool) {
+	callWrappedFunc(WrappedFunc{foo})
+	x := func() {}
+	y := func() {}
+	var a *func()
+	if b {
+		a = &x
+	} else {
+		a = &y
+	}
+	(*a)()
+}
+
+// Relevant SSA:
+// func Foo(b bool):
+//         t0 = local WrappedFunc (complit)
+//         t1 = &t0.F [#0]
+//         *t1 = foo
+//         t2 = *t0
+//         t3 = callWrappedFunc(t2)
+//         t4 = new func() (x)
+//         *t4 = Foo$1
+//         t5 = new func() (y)
+//         *t5 = Foo$2
+//         if b goto 1 else 3
+// 1:
+//         jump 2
+// 2:
+//         t6 = phi [1: t4, 3: t5] #a
+//         t7 = *t6
+//         t8 = t7()
+//         return
+// 3:
+//         jump 2
+//
+// func callWrappedFunc(f WrappedFunc):
+//         t0 = local WrappedFunc (f)
+//         *t0 = f
+//         t1 = &t0.F [#0]
+//         t2 = *t1
+//         t3 = t2()
+//         return
+
+// WANT:
+// callWrappedFunc: t2() -> foo
+// Foo: callWrappedFunc(t2) -> callWrappedFunc; t7() -> Foo$1, Foo$2
diff --git a/go/callgraph/vta/testdata/src/callgraph_fields.go b/go/callgraph/vta/testdata/src/callgraph_fields.go
new file mode 100644
index 0000000..00aa649
--- /dev/null
+++ b/go/callgraph/vta/testdata/src/callgraph_fields.go
@@ -0,0 +1,91 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// go:build ignore
+
+package testdata
+
+type I interface {
+	Foo()
+}
+
+type A struct {
+	I
+}
+
+func (a *A) Do() {
+	a.Foo()
+}
+
+type B struct{}
+
+func (b B) Foo() {}
+
+func NewA(b B) *A {
+	return &A{I: &b}
+}
+
+func Baz(b B) {
+	a := NewA(b)
+	a.Do()
+}
+
+// Relevant SSA:
+// func Baz(b B):
+//        t0 = local B (b)
+//        *t0 = b
+//        t1 = *t0
+//        t2 = NewA(t1)
+//        t3 = (*A).Do(t2)
+//        return
+//
+// func (a *A) Do():
+//        t0 = &a.I [#0]
+//        t1 = *t0
+//        t2 = invoke t1.Foo()
+//        return
+//
+// Name: (testdata.A).Foo
+// Synthetic: wrapper for func (testdata.I).Foo()
+// Location: testdata/callgraph_fields.go:10:2
+// func (arg0 testdata.A) Foo():
+//	  t0 = local testdata.A ()
+//        *t0 = arg0
+//        t1 = &t0.I [#0]
+//        t2 = *t1
+//        t3 = invoke t2.Foo()
+//        return
+//
+// Name: (*testdata.A).Foo
+// Synthetic: wrapper for func (testdata.I).Foo()
+// Location: testdata/callgraph_fields.go:10:2
+// func (arg0 *testdata.A) Foo():
+//        t0 = &arg0.I [#0]
+//        t1 = *t0
+//        t2 = invoke t1.Foo()
+//        return
+//
+// func (b B) Foo():
+//        t0 = local B (b)
+//        *t0 = b
+//        return
+//
+// func (b *testdata.B) Foo():
+//        t0 = ssa:wrapnilchk(b, "testdata.B":string, "Foo":string)
+//        t1 = *t0
+//        t2 = (testdata.B).Foo(t1)
+//        return
+//
+// func NewA(b B) *A:
+//        t0 = new B (b)
+//        *t0 = b
+//        t1 = new A (complit)
+//        t2 = &t1.I [#0]
+//        t3 = make I <- *B (t0)
+//        *t2 = t3
+//        return t1
+
+// WANT:
+// Baz: (*A).Do(t2) -> A.Do; NewA(t1) -> NewA
+// A.Do: invoke t1.Foo() -> B.Foo
diff --git a/go/callgraph/vta/testdata/callgraph_ho.go b/go/callgraph/vta/testdata/src/callgraph_ho.go
similarity index 100%
rename from go/callgraph/vta/testdata/callgraph_ho.go
rename to go/callgraph/vta/testdata/src/callgraph_ho.go
diff --git a/go/callgraph/vta/testdata/callgraph_interfaces.go b/go/callgraph/vta/testdata/src/callgraph_interfaces.go
similarity index 100%
rename from go/callgraph/vta/testdata/callgraph_interfaces.go
rename to go/callgraph/vta/testdata/src/callgraph_interfaces.go
diff --git a/go/callgraph/vta/testdata/callgraph_nested_ptr.go b/go/callgraph/vta/testdata/src/callgraph_nested_ptr.go
similarity index 100%
rename from go/callgraph/vta/testdata/callgraph_nested_ptr.go
rename to go/callgraph/vta/testdata/src/callgraph_nested_ptr.go
diff --git a/go/callgraph/vta/testdata/callgraph_pointers.go b/go/callgraph/vta/testdata/src/callgraph_pointers.go
similarity index 100%
rename from go/callgraph/vta/testdata/callgraph_pointers.go
rename to go/callgraph/vta/testdata/src/callgraph_pointers.go
diff --git a/go/callgraph/vta/testdata/callgraph_static.go b/go/callgraph/vta/testdata/src/callgraph_static.go
similarity index 100%
rename from go/callgraph/vta/testdata/callgraph_static.go
rename to go/callgraph/vta/testdata/src/callgraph_static.go
diff --git a/go/callgraph/vta/testdata/channels.go b/go/callgraph/vta/testdata/src/channels.go
similarity index 100%
rename from go/callgraph/vta/testdata/channels.go
rename to go/callgraph/vta/testdata/src/channels.go
diff --git a/go/callgraph/vta/testdata/closures.go b/go/callgraph/vta/testdata/src/closures.go
similarity index 100%
rename from go/callgraph/vta/testdata/closures.go
rename to go/callgraph/vta/testdata/src/closures.go
diff --git a/go/callgraph/vta/testdata/src/d/d.go b/go/callgraph/vta/testdata/src/d/d.go
new file mode 100644
index 0000000..eedcc3a
--- /dev/null
+++ b/go/callgraph/vta/testdata/src/d/d.go
@@ -0,0 +1,13 @@
+package d
+
+func D(i int) int {
+	return i + 1
+}
+
+type Data struct {
+	V int
+}
+
+func (d Data) Do() int {
+	return d.V - 1
+}
diff --git a/go/callgraph/vta/testdata/dynamic_calls.go b/go/callgraph/vta/testdata/src/dynamic_calls.go
similarity index 66%
rename from go/callgraph/vta/testdata/dynamic_calls.go
rename to go/callgraph/vta/testdata/src/dynamic_calls.go
index fa4270b..b8c14b2 100644
--- a/go/callgraph/vta/testdata/dynamic_calls.go
+++ b/go/callgraph/vta/testdata/src/dynamic_calls.go
@@ -37,7 +37,13 @@
 //   t4 = h()
 //   return t4
 
+// Local(t2) has seemingly duplicates of successors. This
+// happens in stringification of type propagation graph.
+// Due to CHA, we analyze A.foo and *A.foo as well as B.foo
+// and *B.foo, which have similar bodies and hence similar
+// type flow that gets merged together during stringification.
+
 // WANT:
-// Local(t2) -> Local(ai), Local(bi)
+// Local(t2) -> Local(ai), Local(ai), Local(bi), Local(bi)
 // Constant(testdata.I) -> Local(t4)
 // Local(t1) -> Local(t2)
diff --git a/go/callgraph/vta/testdata/fields.go b/go/callgraph/vta/testdata/src/fields.go
similarity index 100%
rename from go/callgraph/vta/testdata/fields.go
rename to go/callgraph/vta/testdata/src/fields.go
diff --git a/go/callgraph/vta/testdata/src/function_alias.go b/go/callgraph/vta/testdata/src/function_alias.go
new file mode 100644
index 0000000..b38e0e0
--- /dev/null
+++ b/go/callgraph/vta/testdata/src/function_alias.go
@@ -0,0 +1,74 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// go:build ignore
+
+package testdata
+
+type Doer func()
+
+type A struct {
+	foo func()
+	do  Doer
+}
+
+func Baz(f func()) {
+	j := &f
+	k := &j
+	**k = func() {}
+	a := A{}
+	a.foo = **k
+	a.foo()
+	a.do = a.foo
+	a.do()
+}
+
+// Relevant SSA:
+// func Baz(f func()):
+//        t0 = new func() (f)
+//        *t0 = f
+//        t1 = new *func() (j)
+//        *t1 = t0
+//        t2 = *t1
+//        *t2 = Baz$1
+//        t3 = local A (a)
+//        t4 = &t3.foo [#0]
+//        t5 = *t1
+//        t6 = *t5
+//        *t4 = t6
+//        t7 = &t3.foo [#0]
+//        t8 = *t7
+//        t9 = t8()
+//        t10 = &t3.do [#1]                                                 *Doer
+//        t11 = &t3.foo [#0]                                              *func()
+//        t12 = *t11                                                       func()
+//        t13 = changetype Doer <- func() (t12)                              Doer
+//        *t10 = t13
+//        t14 = &t3.do [#1]                                                 *Doer
+//        t15 = *t14                                                         Doer
+//        t16 = t15()                                                          ()
+
+// Flow chain showing that Baz$1 reaches t8():
+//   Baz$1 -> t2 <-> PtrFunction(func()) <-> t5 -> t6 -> t4 <-> Field(testdata.A:foo) <-> t7 -> t8
+// Flow chain showing that Baz$1 reaches t15():
+//  Field(testdata.A:foo) <-> t11 -> t12 -> t13 -> t10 <-> Field(testdata.A:do) <-> t14 -> t15
+
+// WANT:
+// Local(f) -> Local(t0)
+// Local(t0) -> PtrFunction(func())
+// Function(Baz$1) -> Local(t2)
+// PtrFunction(func()) -> Local(t0), Local(t2), Local(t5)
+// Local(t2) -> PtrFunction(func())
+// Local(t4) -> Field(testdata.A:foo)
+// Local(t5) -> Local(t6), PtrFunction(func())
+// Local(t6) -> Local(t4)
+// Local(t7) -> Field(testdata.A:foo), Local(t8)
+// Field(testdata.A:foo) -> Local(t11), Local(t4), Local(t7)
+// Local(t4) -> Field(testdata.A:foo)
+// Field(testdata.A:do) -> Local(t10), Local(t14)
+// Local(t10) -> Field(testdata.A:do)
+// Local(t11) -> Field(testdata.A:foo), Local(t12)
+// Local(t12) -> Local(t13)
+// Local(t13) -> Local(t10)
+// Local(t14) -> Field(testdata.A:do), Local(t15)
diff --git a/go/callgraph/vta/testdata/go117.go b/go/callgraph/vta/testdata/src/go117.go
similarity index 100%
rename from go/callgraph/vta/testdata/go117.go
rename to go/callgraph/vta/testdata/src/go117.go
diff --git a/go/callgraph/vta/testdata/maps.go b/go/callgraph/vta/testdata/src/maps.go
similarity index 100%
rename from go/callgraph/vta/testdata/maps.go
rename to go/callgraph/vta/testdata/src/maps.go
diff --git a/go/callgraph/vta/testdata/node_uniqueness.go b/go/callgraph/vta/testdata/src/node_uniqueness.go
similarity index 91%
rename from go/callgraph/vta/testdata/node_uniqueness.go
rename to go/callgraph/vta/testdata/src/node_uniqueness.go
index 0c1dc07..fd48405 100644
--- a/go/callgraph/vta/testdata/node_uniqueness.go
+++ b/go/callgraph/vta/testdata/src/node_uniqueness.go
@@ -49,8 +49,8 @@
 
 // Without canon approach, one of Pointer(*A) -> Local(t0) and Pointer(*A) -> Local(t2) edges is
 // missing in the graph string representation. The original graph has both of the edges but the
-// source node Pointer(*A) is not the same; two occurences of Pointer(*A) are considered separate
-// nodes. Since they have the same string representation, one edge gets overriden by the other
+// source node Pointer(*A) is not the same; two occurrences of Pointer(*A) are considered separate
+// nodes. Since they have the same string representation, one edge gets overridden by the other
 // during the graph stringification, instead of being joined together as in below.
 
 // WANT:
diff --git a/go/callgraph/vta/testdata/panic.go b/go/callgraph/vta/testdata/src/panic.go
similarity index 100%
rename from go/callgraph/vta/testdata/panic.go
rename to go/callgraph/vta/testdata/src/panic.go
diff --git a/go/callgraph/vta/testdata/phi.go b/go/callgraph/vta/testdata/src/phi.go
similarity index 100%
rename from go/callgraph/vta/testdata/phi.go
rename to go/callgraph/vta/testdata/src/phi.go
diff --git a/go/callgraph/vta/testdata/phi_alias.go b/go/callgraph/vta/testdata/src/phi_alias.go
similarity index 100%
rename from go/callgraph/vta/testdata/phi_alias.go
rename to go/callgraph/vta/testdata/src/phi_alias.go
diff --git a/go/callgraph/vta/testdata/ranges.go b/go/callgraph/vta/testdata/src/ranges.go
similarity index 100%
rename from go/callgraph/vta/testdata/ranges.go
rename to go/callgraph/vta/testdata/src/ranges.go
diff --git a/go/callgraph/vta/testdata/returns.go b/go/callgraph/vta/testdata/src/returns.go
similarity index 100%
rename from go/callgraph/vta/testdata/returns.go
rename to go/callgraph/vta/testdata/src/returns.go
diff --git a/go/callgraph/vta/testdata/select.go b/go/callgraph/vta/testdata/src/select.go
similarity index 100%
rename from go/callgraph/vta/testdata/select.go
rename to go/callgraph/vta/testdata/src/select.go
diff --git a/go/callgraph/vta/testdata/simple.go b/go/callgraph/vta/testdata/src/simple.go
similarity index 100%
rename from go/callgraph/vta/testdata/simple.go
rename to go/callgraph/vta/testdata/src/simple.go
diff --git a/go/callgraph/vta/testdata/static_calls.go b/go/callgraph/vta/testdata/src/static_calls.go
similarity index 100%
rename from go/callgraph/vta/testdata/static_calls.go
rename to go/callgraph/vta/testdata/src/static_calls.go
diff --git a/go/callgraph/vta/testdata/store.go b/go/callgraph/vta/testdata/src/store.go
similarity index 100%
rename from go/callgraph/vta/testdata/store.go
rename to go/callgraph/vta/testdata/src/store.go
diff --git a/go/callgraph/vta/testdata/store_load_alias.go b/go/callgraph/vta/testdata/src/store_load_alias.go
similarity index 100%
rename from go/callgraph/vta/testdata/store_load_alias.go
rename to go/callgraph/vta/testdata/src/store_load_alias.go
diff --git a/go/callgraph/vta/testdata/stores_arrays.go b/go/callgraph/vta/testdata/src/stores_arrays.go
similarity index 100%
rename from go/callgraph/vta/testdata/stores_arrays.go
rename to go/callgraph/vta/testdata/src/stores_arrays.go
diff --git a/go/callgraph/vta/testdata/src/t/t.go b/go/callgraph/vta/testdata/src/t/t.go
new file mode 100644
index 0000000..55a7002
--- /dev/null
+++ b/go/callgraph/vta/testdata/src/t/t.go
@@ -0,0 +1,8 @@
+package t
+
+import "d"
+
+func t(i int) int {
+	data := d.Data{V: i}
+	return d.D(i) + data.Do()
+}
diff --git a/go/callgraph/vta/testdata/type_assertions.go b/go/callgraph/vta/testdata/src/type_assertions.go
similarity index 100%
rename from go/callgraph/vta/testdata/type_assertions.go
rename to go/callgraph/vta/testdata/src/type_assertions.go
diff --git a/go/callgraph/vta/testdata/type_conversions.go b/go/callgraph/vta/testdata/src/type_conversions.go
similarity index 100%
rename from go/callgraph/vta/testdata/type_conversions.go
rename to go/callgraph/vta/testdata/src/type_conversions.go
diff --git a/go/callgraph/vta/utils.go b/go/callgraph/vta/utils.go
index 69361ab..e7a97e2 100644
--- a/go/callgraph/vta/utils.go
+++ b/go/callgraph/vta/utils.go
@@ -19,6 +19,9 @@
 	if _, ok := n.(nestedPtrInterface); ok {
 		return true
 	}
+	if _, ok := n.(nestedPtrFunction); ok {
+		return true
+	}
 
 	if _, ok := n.Type().(*types.Pointer); ok {
 		return true
@@ -33,7 +36,9 @@
 //  2) is a (nested) pointer to interface (needed for, say,
 //     slice elements of nested pointers to interface type)
 //  3) is a function type (needed for higher-order type flow)
-//  4) is a global Recover or Panic node
+//  4) is a (nested) pointer to function (needed for, say,
+//     slice elements of nested pointers to function type)
+//  5) is a global Recover or Panic node
 func hasInFlow(n node) bool {
 	if _, ok := n.(panicArg); ok {
 		return true
@@ -44,20 +49,19 @@
 
 	t := n.Type()
 
-	if _, ok := t.Underlying().(*types.Signature); ok {
-		return true
-	}
-
 	if i := interfaceUnderPtr(t); i != nil {
 		return true
 	}
+	if f := functionUnderPtr(t); f != nil {
+		return true
+	}
 
-	return isInterface(t)
+	return isInterface(t) || isFunction(t)
 }
 
 // hasInitialTypes check if a node can have initial types.
 // Returns true iff `n` is not a panic or recover node as
-// those are artifical.
+// those are artificial.
 func hasInitialTypes(n node) bool {
 	switch n.(type) {
 	case panicArg, recoverReturn:
@@ -72,6 +76,11 @@
 	return ok
 }
 
+func isFunction(t types.Type) bool {
+	_, ok := t.Underlying().(*types.Signature)
+	return ok
+}
+
 // interfaceUnderPtr checks if type `t` is a potentially nested
 // pointer to interface and if yes, returns the interface type.
 // Otherwise, returns nil.
@@ -88,6 +97,22 @@
 	return interfaceUnderPtr(p.Elem())
 }
 
+// functionUnderPtr checks if type `t` is a potentially nested
+// pointer to function type and if yes, returns the function type.
+// Otherwise, returns nil.
+func functionUnderPtr(t types.Type) types.Type {
+	p, ok := t.Underlying().(*types.Pointer)
+	if !ok {
+		return nil
+	}
+
+	if isFunction(p.Elem()) {
+		return p.Elem()
+	}
+
+	return functionUnderPtr(p.Elem())
+}
+
 // sliceArrayElem returns the element type of type `t` that is
 // expected to be a (pointer to) array or slice, consistent with
 // the ssa.Index and ssa.IndexAddr instructions. Panics otherwise.
@@ -114,10 +139,8 @@
 	}
 
 	for _, edge := range node.Out {
-		callee := edge.Callee.Func
-		// Skip synthetic functions wrapped around source functions.
-		if edge.Site == c && callee.Synthetic == "" {
-			matches = append(matches, callee)
+		if edge.Site == c {
+			matches = append(matches, edge.Callee.Func)
 		}
 	}
 	return matches
diff --git a/go/callgraph/vta/vta.go b/go/callgraph/vta/vta.go
index 6a0e55d..98fabe5 100644
--- a/go/callgraph/vta/vta.go
+++ b/go/callgraph/vta/vta.go
@@ -51,7 +51,6 @@
 // it may have. This information is then used to construct the call graph.
 // For each unresolved call site, vta uses the set of types and functions
 // reaching the node representing the call site to create a set of callees.
-
 package vta
 
 import (
@@ -62,10 +61,13 @@
 )
 
 // CallGraph uses the VTA algorithm to compute call graph for all functions
-// f such that f:true is in `funcs`. VTA refines the results of 'initial'
-// callgraph and uses it to establish interprocedural data flow. VTA is
-// sound if 'initial` is sound modulo reflection and unsage. The resulting
-// callgraph does not have a root node.
+// f:true in funcs. VTA refines the results of initial call graph and uses it
+// to establish interprocedural type flow. The resulting graph does not have
+// a root node.
+//
+// CallGraph does not make any assumptions on initial types global variables
+// and function/method inputs can have. CallGraph is then sound, modulo use of
+// reflection and unsafe, if the initial call graph is sound.
 func CallGraph(funcs map[*ssa.Function]bool, initial *callgraph.Graph) *callgraph.Graph {
 	vtaG, canon := typePropGraph(funcs, initial)
 	types := propagate(vtaG, canon)
@@ -124,14 +126,14 @@
 func resolve(c ssa.CallInstruction, types propTypeMap, cache methodCache) []*ssa.Function {
 	n := local{val: c.Common().Value}
 	var funcs []*ssa.Function
-	for p := range types.propTypes(n) {
+	for _, p := range types.propTypes(n) {
 		funcs = append(funcs, propFunc(p, c, cache)...)
 	}
 	return funcs
 }
 
 // propFunc returns the functions modeled with the propagation type `p`
-// assigned to call site `c`. If no such funciton exists, nil is returned.
+// assigned to call site `c`. If no such function exists, nil is returned.
 func propFunc(p propType, c ssa.CallInstruction, cache methodCache) []*ssa.Function {
 	if p.f != nil {
 		return []*ssa.Function{p.f}
diff --git a/go/callgraph/vta/vta_go117_test.go b/go/callgraph/vta/vta_go117_test.go
index fae657c..9ce6a88 100644
--- a/go/callgraph/vta/vta_go117_test.go
+++ b/go/callgraph/vta/vta_go117_test.go
@@ -15,7 +15,7 @@
 )
 
 func TestVTACallGraphGo117(t *testing.T) {
-	file := "testdata/go117.go"
+	file := "testdata/src/go117.go"
 	prog, want, err := testProg(file)
 	if err != nil {
 		t.Fatalf("couldn't load test file '%s': %s", file, err)
diff --git a/go/callgraph/vta/vta_test.go b/go/callgraph/vta/vta_test.go
index 87b27cb..33ceaf9 100644
--- a/go/callgraph/vta/vta_test.go
+++ b/go/callgraph/vta/vta_test.go
@@ -7,6 +7,9 @@
 import (
 	"testing"
 
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/analysistest"
+	"golang.org/x/tools/go/analysis/passes/buildssa"
 	"golang.org/x/tools/go/callgraph/cha"
 	"golang.org/x/tools/go/ssa"
 	"golang.org/x/tools/go/ssa/ssautil"
@@ -14,11 +17,13 @@
 
 func TestVTACallGraph(t *testing.T) {
 	for _, file := range []string{
-		"testdata/callgraph_static.go",
-		"testdata/callgraph_ho.go",
-		"testdata/callgraph_interfaces.go",
-		"testdata/callgraph_pointers.go",
-		"testdata/callgraph_collections.go",
+		"testdata/src/callgraph_static.go",
+		"testdata/src/callgraph_ho.go",
+		"testdata/src/callgraph_interfaces.go",
+		"testdata/src/callgraph_pointers.go",
+		"testdata/src/callgraph_collections.go",
+		"testdata/src/callgraph_fields.go",
+		"testdata/src/callgraph_field_funcs.go",
 	} {
 		t.Run(file, func(t *testing.T) {
 			prog, want, err := testProg(file)
@@ -41,12 +46,12 @@
 // enabled by having an arbitrary function set as input to CallGraph
 // instead of the whole program (i.e., ssautil.AllFunctions(prog)).
 func TestVTAProgVsFuncSet(t *testing.T) {
-	prog, want, err := testProg("testdata/callgraph_nested_ptr.go")
+	prog, want, err := testProg("testdata/src/callgraph_nested_ptr.go")
 	if err != nil {
-		t.Fatalf("couldn't load test `testdata/callgraph_nested_ptr.go`: %s", err)
+		t.Fatalf("couldn't load test `testdata/src/callgraph_nested_ptr.go`: %s", err)
 	}
 	if len(want) == 0 {
-		t.Fatal("couldn't find want in `testdata/callgraph_nested_ptr.go`")
+		t.Fatal("couldn't find want in `testdata/src/callgraph_nested_ptr.go`")
 	}
 
 	allFuncs := ssautil.AllFunctions(prog)
@@ -74,3 +79,35 @@
 		t.Errorf("pruned callgraph %v should contain %v", got, want)
 	}
 }
+
+// TestVTAPanicMissingDefinitions tests if VTA gracefully handles the case
+// where VTA panics when a definition of a function or method is not
+// available, which can happen when using analysis package. A successful
+// test simply does not panic.
+func TestVTAPanicMissingDefinitions(t *testing.T) {
+	run := func(pass *analysis.Pass) (interface{}, error) {
+		s := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
+		CallGraph(ssautil.AllFunctions(s.Pkg.Prog), cha.CallGraph(s.Pkg.Prog))
+		return nil, nil
+	}
+
+	analyzer := &analysis.Analyzer{
+		Name: "test",
+		Doc:  "test",
+		Run:  run,
+		Requires: []*analysis.Analyzer{
+			buildssa.Analyzer,
+		},
+	}
+
+	testdata := analysistest.TestData()
+	res := analysistest.Run(t, testdata, analyzer, "t", "d")
+	if len(res) != 2 {
+		t.Errorf("want analysis results for 2 packages; got %v", len(res))
+	}
+	for _, r := range res {
+		if r.Err != nil {
+			t.Errorf("want no error for package %v; got %v", r.Pass.Pkg.Path(), r.Err)
+		}
+	}
+}
diff --git a/go/gcexportdata/example_test.go b/go/gcexportdata/example_test.go
index fda3f60..7df05ab 100644
--- a/go/gcexportdata/example_test.go
+++ b/go/gcexportdata/example_test.go
@@ -16,6 +16,7 @@
 	"log"
 	"os"
 	"path/filepath"
+	"strings"
 
 	"golang.org/x/tools/go/gcexportdata"
 )
@@ -59,8 +60,9 @@
 	fmt.Printf("Package members:    %s...\n", members[:5])
 	println := pkg.Scope().Lookup("Println")
 	posn := fset.Position(println.Pos())
-	posn.Line = 123 // make example deterministic
-	fmt.Printf("Println type:       %s\n", println.Type())
+	posn.Line = 123                                                          // make example deterministic
+	typ := strings.ReplaceAll(println.Type().String(), "interface{}", "any") // go 1.18+ uses the 'any' alias
+	fmt.Printf("Println type:       %s\n", typ)
 	fmt.Printf("Println location:   %s\n", slashify(posn))
 
 	// Output:
@@ -68,7 +70,7 @@
 	// Package path:       fmt
 	// Export data:        fmt.a
 	// Package members:    [Errorf Formatter Fprint Fprintf Fprintln]...
-	// Println type:       func(a ...interface{}) (n int, err error)
+	// Println type:       func(a ...any) (n int, err error)
 	// Println location:   $GOROOT/src/fmt/print.go:123:1
 }
 
diff --git a/go/gcexportdata/gcexportdata.go b/go/gcexportdata/gcexportdata.go
index fc8beea..cec819d 100644
--- a/go/gcexportdata/gcexportdata.go
+++ b/go/gcexportdata/gcexportdata.go
@@ -50,11 +50,24 @@
 // additional trailing data beyond the end of the export data.
 func NewReader(r io.Reader) (io.Reader, error) {
 	buf := bufio.NewReader(r)
-	_, err := gcimporter.FindExportData(buf)
-	// If we ever switch to a zip-like archive format with the ToC
-	// at the end, we can return the correct portion of export data,
-	// but for now we must return the entire rest of the file.
-	return buf, err
+	_, size, err := gcimporter.FindExportData(buf)
+	if err != nil {
+		return nil, err
+	}
+
+	if size >= 0 {
+		// We were given an archive and found the __.PKGDEF in it.
+		// This tells us the size of the export data, and we don't
+		// need to return the entire file.
+		return &io.LimitedReader{
+			R: buf,
+			N: size,
+		}, nil
+	} else {
+		// We were given an object file. As such, we don't know how large
+		// the export data is and must return the entire file.
+		return buf, nil
+	}
 }
 
 // Read reads export data from in, decodes it, and returns type
diff --git a/go/internal/gccgoimporter/gccgoinstallation_test.go b/go/internal/gccgoimporter/gccgoinstallation_test.go
index f5bc22b..5bf7f11 100644
--- a/go/internal/gccgoimporter/gccgoinstallation_test.go
+++ b/go/internal/gccgoimporter/gccgoinstallation_test.go
@@ -9,6 +9,7 @@
 
 import (
 	"go/types"
+	"runtime"
 	"testing"
 )
 
@@ -156,6 +157,12 @@
 	if gpath == "" {
 		t.Skip("This test needs gccgo")
 	}
+	if runtime.GOOS == "aix" {
+		// We don't yet have a debug/xcoff package for reading
+		// object files on AIX. Remove this skip if/when issue #29038
+		// is implemented (see also issue #49445).
+		t.Skip("no support yet for debug/xcoff")
+	}
 
 	var inst GccgoInstallation
 	err := inst.InitFromDriver(gpath)
@@ -187,7 +194,7 @@
 		{pkgpath: "io", name: "ReadWriter", want: "type ReadWriter interface{Reader; Writer}"},
 		{pkgpath: "math", name: "Pi", want: "const Pi untyped float"},
 		{pkgpath: "math", name: "Sin", want: "func Sin(x float64) float64"},
-		{pkgpath: "sort", name: "Ints", want: "func Ints(a []int)"},
+		{pkgpath: "sort", name: "Search", want: "func Search(n int, f func(int) bool) int"},
 		{pkgpath: "unsafe", name: "Pointer", want: "type Pointer"},
 	} {
 		runImporterTest(t, imp, nil, &test)
diff --git a/go/internal/gccgoimporter/importer_test.go b/go/internal/gccgoimporter/importer_test.go
index d6fe970..7adffd0 100644
--- a/go/internal/gccgoimporter/importer_test.go
+++ b/go/internal/gccgoimporter/importer_test.go
@@ -10,11 +10,11 @@
 
 import (
 	"go/types"
-	"io/ioutil"
 	"os"
 	"os/exec"
 	"path/filepath"
 	"regexp"
+	"runtime"
 	"strconv"
 	"testing"
 )
@@ -133,6 +133,12 @@
 	if gpath == "" {
 		t.Skip("This test needs gccgo")
 	}
+	if runtime.GOOS == "aix" {
+		// We don't yet have a debug/xcoff package for reading
+		// object files on AIX. Remove this skip if/when issue #29038
+		// is implemented (see also issue #49445).
+		t.Skip("no support yet for debug/xcoff")
+	}
 
 	verout, err := exec.Command(gpath, "--version").CombinedOutput()
 	if err != nil {
@@ -153,21 +159,11 @@
 	}
 	t.Logf("gccgo version %d.%d", major, minor)
 
-	tmpdir, err := ioutil.TempDir("", "TestObjImporter")
-	if err != nil {
-		t.Fatal(err)
-	}
-	defer os.RemoveAll(tmpdir)
-
+	tmpdir := t.TempDir()
 	initmap := make(map[*types.Package]InitData)
 	imp := GetImporter([]string{tmpdir}, initmap)
 
-	artmpdir, err := ioutil.TempDir("", "TestObjImporter")
-	if err != nil {
-		t.Fatal(err)
-	}
-	defer os.RemoveAll(artmpdir)
-
+	artmpdir := t.TempDir()
 	arinitmap := make(map[*types.Package]InitData)
 	arimp := GetImporter([]string{artmpdir}, arinitmap)
 
diff --git a/go/internal/gccgoimporter/testdata/escapeinfo.gox b/go/internal/gccgoimporter/testdata/escapeinfo.gox
index 1db8156..94ce039 100644
--- a/go/internal/gccgoimporter/testdata/escapeinfo.gox
+++ b/go/internal/gccgoimporter/testdata/escapeinfo.gox
Binary files differ
diff --git a/go/internal/gccgoimporter/testdata/time.gox b/go/internal/gccgoimporter/testdata/time.gox
index 80c2dbc..a6822ea 100644
--- a/go/internal/gccgoimporter/testdata/time.gox
+++ b/go/internal/gccgoimporter/testdata/time.gox
Binary files differ
diff --git a/go/internal/gccgoimporter/testdata/unicode.gox b/go/internal/gccgoimporter/testdata/unicode.gox
index e70e539..ae1a6f7 100644
--- a/go/internal/gccgoimporter/testdata/unicode.gox
+++ b/go/internal/gccgoimporter/testdata/unicode.gox
Binary files differ
diff --git a/go/internal/gccgoimporter/testdata/v1reflect.gox b/go/internal/gccgoimporter/testdata/v1reflect.gox
index ea46841..d693fe6 100644
--- a/go/internal/gccgoimporter/testdata/v1reflect.gox
+++ b/go/internal/gccgoimporter/testdata/v1reflect.gox
Binary files differ
diff --git a/go/internal/gcimporter/bexport.go b/go/internal/gcimporter/bexport.go
index a807d0a..0a3cdb9 100644
--- a/go/internal/gcimporter/bexport.go
+++ b/go/internal/gcimporter/bexport.go
@@ -34,9 +34,6 @@
 // (suspected) format errors, and whenever a change is made to the format.
 const debugFormat = false // default: false
 
-// If trace is set, debugging output is printed to std out.
-const trace = false // default: false
-
 // Current export format version. Increase with each format change.
 // Note: The latest binary (non-indexed) export format is at version 6.
 //       This exporter is still at level 4, but it doesn't matter since
@@ -92,16 +89,18 @@
 // BExportData returns binary export data for pkg.
 // If no file set is provided, position info will be missing.
 func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
-	defer func() {
-		if e := recover(); e != nil {
-			if ierr, ok := e.(internalError); ok {
-				err = ierr
-				return
+	if !debug {
+		defer func() {
+			if e := recover(); e != nil {
+				if ierr, ok := e.(internalError); ok {
+					err = ierr
+					return
+				}
+				// Not an internal error; panic again.
+				panic(e)
 			}
-			// Not an internal error; panic again.
-			panic(e)
-		}
-	}()
+		}()
+	}
 
 	p := exporter{
 		fset:          fset,
diff --git a/go/internal/gcimporter/bexport_test.go b/go/internal/gcimporter/bexport_test.go
index 702278e..3da5397 100644
--- a/go/internal/gcimporter/bexport_test.go
+++ b/go/internal/gcimporter/bexport_test.go
@@ -15,12 +15,16 @@
 	"path/filepath"
 	"reflect"
 	"runtime"
+	"sort"
 	"strings"
 	"testing"
 
+	"golang.org/x/tools/go/ast/inspector"
 	"golang.org/x/tools/go/buildutil"
 	"golang.org/x/tools/go/internal/gcimporter"
 	"golang.org/x/tools/go/loader"
+	"golang.org/x/tools/internal/typeparams"
+	"golang.org/x/tools/internal/typeparams/genericfeatures"
 )
 
 var isRace = false
@@ -35,6 +39,9 @@
 	if isRace {
 		t.Skipf("stdlib tests take too long in race mode and flake on builders")
 	}
+	if testing.Short() {
+		t.Skip("skipping RAM hungry test in -short mode")
+	}
 
 	// Load, parse and type-check the program.
 	ctxt := build.Default // copy
@@ -42,6 +49,9 @@
 	conf := loader.Config{
 		Build:       &ctxt,
 		AllowErrors: true,
+		TypeChecker: types.Config{
+			Error: func(err error) { t.Log(err) },
+		},
 	}
 	for _, path := range buildutil.AllPackages(conf.Build) {
 		conf.Import(path)
@@ -64,14 +74,22 @@
 	}
 
 	numPkgs := len(prog.AllPackages)
-	if want := 248; numPkgs < want {
+	if want := minStdlibPackages; numPkgs < want {
 		t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want)
 	}
 
+	checked := 0
 	for pkg, info := range prog.AllPackages {
 		if info.Files == nil {
 			continue // empty directory
 		}
+		// Binary export does not support generic code.
+		inspect := inspector.New(info.Files)
+		if genericfeatures.ForPackage(inspect, &info.Info) != 0 {
+			t.Logf("skipping package %q which uses generics", pkg.Path())
+			continue
+		}
+		checked++
 		exportdata, err := gcimporter.BExportData(conf.Fset, pkg)
 		if err != nil {
 			t.Fatal(err)
@@ -114,6 +132,9 @@
 			}
 		}
 	}
+	if want := minStdlibPackages; checked < want {
+		t.Errorf("Checked only %d packages, want at least %d", checked, want)
+	}
 }
 
 func fileLine(fset *token.FileSet, obj types.Object) string {
@@ -199,6 +220,9 @@
 				return fmt.Errorf("mismatched %s method: %s", xm.Name(), err)
 			}
 		}
+		// Constraints are handled explicitly in the *TypeParam case below, so we
+		// don't yet need to consider embeddeds here.
+		// TODO(rfindley): consider the type set here.
 	case *types.Array:
 		y := y.(*types.Array)
 		if x.Len() != y.Len() {
@@ -230,9 +254,7 @@
 		}
 	case *types.Named:
 		y := y.(*types.Named)
-		if x.String() != y.String() {
-			return fmt.Errorf("unequal named types: %s vs %s", x, y)
-		}
+		return cmpNamed(x, y)
 	case *types.Pointer:
 		y := y.(*types.Pointer)
 		if err := equalType(x.Elem(), y.Elem()); err != nil {
@@ -262,6 +284,12 @@
 			// 	return fmt.Errorf("receiver: %s", err)
 			// }
 		}
+		if err := equalTypeParams(typeparams.ForSignature(x), typeparams.ForSignature(y)); err != nil {
+			return fmt.Errorf("type params: %s", err)
+		}
+		if err := equalTypeParams(typeparams.RecvTypeParams(x), typeparams.RecvTypeParams(y)); err != nil {
+			return fmt.Errorf("recv type params: %s", err)
+		}
 	case *types.Slice:
 		y := y.(*types.Slice)
 		if err := equalType(x.Elem(), y.Elem()); err != nil {
@@ -297,6 +325,108 @@
 				return fmt.Errorf("tuple element %d: %s", i, err)
 			}
 		}
+	case *typeparams.TypeParam:
+		y := y.(*typeparams.TypeParam)
+		if x.String() != y.String() {
+			return fmt.Errorf("unequal named types: %s vs %s", x, y)
+		}
+		// For now, just compare constraints by type string to short-circuit
+		// cycles. We have to make interfaces explicit as export data currently
+		// doesn't support marking interfaces as implicit.
+		// TODO(rfindley): remove makeExplicit once export data contains an
+		// implicit bit.
+		xc := makeExplicit(x.Constraint()).String()
+		yc := makeExplicit(y.Constraint()).String()
+		if xc != yc {
+			return fmt.Errorf("unequal constraints: %s vs %s", xc, yc)
+		}
+
+	default:
+		panic(fmt.Sprintf("unexpected %T type", x))
+	}
+	return nil
+}
+
+// cmpNamed compares two named types x and y, returning an error for any
+// discrepancies. It does not compare their underlying types.
+func cmpNamed(x, y *types.Named) error {
+	xOrig := typeparams.NamedTypeOrigin(x)
+	yOrig := typeparams.NamedTypeOrigin(y)
+	if xOrig.String() != yOrig.String() {
+		return fmt.Errorf("unequal named types: %s vs %s", x, y)
+	}
+	if err := equalTypeParams(typeparams.ForNamed(x), typeparams.ForNamed(y)); err != nil {
+		return fmt.Errorf("type parameters: %s", err)
+	}
+	if err := equalTypeArgs(typeparams.NamedTypeArgs(x), typeparams.NamedTypeArgs(y)); err != nil {
+		return fmt.Errorf("type arguments: %s", err)
+	}
+	if x.NumMethods() != y.NumMethods() {
+		return fmt.Errorf("unequal methods: %d vs %d",
+			x.NumMethods(), y.NumMethods())
+	}
+	// Unfortunately method sorting is not canonical, so sort before comparing.
+	var xms, yms []*types.Func
+	for i := 0; i < x.NumMethods(); i++ {
+		xms = append(xms, x.Method(i))
+		yms = append(yms, y.Method(i))
+	}
+	for _, ms := range [][]*types.Func{xms, yms} {
+		sort.Slice(ms, func(i, j int) bool {
+			return ms[i].Name() < ms[j].Name()
+		})
+	}
+	for i, xm := range xms {
+		ym := yms[i]
+		if xm.Name() != ym.Name() {
+			return fmt.Errorf("mismatched %dth method: %s vs %s", i, xm, ym)
+		}
+		// Calling equalType here leads to infinite recursion, so just compare
+		// strings.
+		if xm.String() != ym.String() {
+			return fmt.Errorf("unequal methods: %s vs %s", x, y)
+		}
+	}
+	return nil
+}
+
+// makeExplicit returns an explicit version of typ, if typ is an implicit
+// interface. Otherwise it returns typ unmodified.
+func makeExplicit(typ types.Type) types.Type {
+	if iface, _ := typ.(*types.Interface); iface != nil && typeparams.IsImplicit(iface) {
+		var methods []*types.Func
+		for i := 0; i < iface.NumExplicitMethods(); i++ {
+			methods = append(methods, iface.Method(i))
+		}
+		var embeddeds []types.Type
+		for i := 0; i < iface.NumEmbeddeds(); i++ {
+			embeddeds = append(embeddeds, iface.EmbeddedType(i))
+		}
+		return types.NewInterfaceType(methods, embeddeds)
+	}
+	return typ
+}
+
+func equalTypeArgs(x, y *typeparams.TypeList) error {
+	if x.Len() != y.Len() {
+		return fmt.Errorf("unequal lengths: %d vs %d", x.Len(), y.Len())
+	}
+	for i := 0; i < x.Len(); i++ {
+		if err := equalType(x.At(i), y.At(i)); err != nil {
+			return fmt.Errorf("type %d: %s", i, err)
+		}
+	}
+	return nil
+}
+
+func equalTypeParams(x, y *typeparams.TypeParamList) error {
+	if x.Len() != y.Len() {
+		return fmt.Errorf("unequal lengths: %d vs %d", x.Len(), y.Len())
+	}
+	for i := 0; i < x.Len(); i++ {
+		if err := equalType(x.At(i), y.At(i)); err != nil {
+			return fmt.Errorf("type parameter %d: %s", i, err)
+		}
 	}
 	return nil
 }
diff --git a/go/internal/gcimporter/bimport.go b/go/internal/gcimporter/bimport.go
index e9f73d1..b85de01 100644
--- a/go/internal/gcimporter/bimport.go
+++ b/go/internal/gcimporter/bimport.go
@@ -74,9 +74,10 @@
 		pathList:   []string{""}, // empty string is mapped to 0
 		fake: fakeFileSet{
 			fset:  fset,
-			files: make(map[string]*token.File),
+			files: make(map[string]*fileInfo),
 		},
 	}
+	defer p.fake.setLines() // set lines for files in fset
 
 	// read version info
 	var versionstr string
@@ -338,37 +339,49 @@
 // Synthesize a token.Pos
 type fakeFileSet struct {
 	fset  *token.FileSet
-	files map[string]*token.File
+	files map[string]*fileInfo
 }
 
+type fileInfo struct {
+	file     *token.File
+	lastline int
+}
+
+const maxlines = 64 * 1024
+
 func (s *fakeFileSet) pos(file string, line, column int) token.Pos {
 	// TODO(mdempsky): Make use of column.
 
-	// Since we don't know the set of needed file positions, we
-	// reserve maxlines positions per file.
-	const maxlines = 64 * 1024
+	// Since we don't know the set of needed file positions, we reserve maxlines
+	// positions per file. We delay calling token.File.SetLines until all
+	// positions have been calculated (by way of fakeFileSet.setLines), so that
+	// we can avoid setting unnecessary lines. See also golang/go#46586.
 	f := s.files[file]
 	if f == nil {
-		f = s.fset.AddFile(file, -1, maxlines)
+		f = &fileInfo{file: s.fset.AddFile(file, -1, maxlines)}
 		s.files[file] = f
-		// Allocate the fake linebreak indices on first use.
-		// TODO(adonovan): opt: save ~512KB using a more complex scheme?
-		fakeLinesOnce.Do(func() {
-			fakeLines = make([]int, maxlines)
-			for i := range fakeLines {
-				fakeLines[i] = i
-			}
-		})
-		f.SetLines(fakeLines)
 	}
-
 	if line > maxlines {
 		line = 1
 	}
+	if line > f.lastline {
+		f.lastline = line
+	}
 
-	// Treat the file as if it contained only newlines
-	// and column=1: use the line number as the offset.
-	return f.Pos(line - 1)
+	// Return a fake position assuming that f.file consists only of newlines.
+	return token.Pos(f.file.Base() + line - 1)
+}
+
+func (s *fakeFileSet) setLines() {
+	fakeLinesOnce.Do(func() {
+		fakeLines = make([]int, maxlines)
+		for i := range fakeLines {
+			fakeLines[i] = i
+		}
+	})
+	for _, f := range s.files {
+		f.file.SetLines(fakeLines[:f.lastline])
+	}
 }
 
 var (
@@ -1029,6 +1042,7 @@
 			// used internally by gc; never used by this package or in .a files
 			anyType{},
 		}
+		predecl = append(predecl, additionalPredeclared()...)
 	})
 	return predecl
 }
diff --git a/go/internal/gcimporter/exportdata.go b/go/internal/gcimporter/exportdata.go
index f33dc56..f6437fe 100644
--- a/go/internal/gcimporter/exportdata.go
+++ b/go/internal/gcimporter/exportdata.go
@@ -16,7 +16,7 @@
 	"strings"
 )
 
-func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
+func readGopackHeader(r *bufio.Reader) (name string, size int64, err error) {
 	// See $GOROOT/include/ar.h.
 	hdr := make([]byte, 16+12+6+6+8+10+2)
 	_, err = io.ReadFull(r, hdr)
@@ -28,7 +28,8 @@
 		fmt.Printf("header: %s", hdr)
 	}
 	s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
-	size, err = strconv.Atoi(s)
+	length, err := strconv.Atoi(s)
+	size = int64(length)
 	if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
 		err = fmt.Errorf("invalid archive header")
 		return
@@ -42,8 +43,8 @@
 // file by reading from it. The reader must be positioned at the
 // start of the file before calling this function. The hdr result
 // is the string before the export data, either "$$" or "$$B".
-//
-func FindExportData(r *bufio.Reader) (hdr string, err error) {
+// The size result is the length of the export data in bytes, or -1 if not known.
+func FindExportData(r *bufio.Reader) (hdr string, size int64, err error) {
 	// Read first line to make sure this is an object file.
 	line, err := r.ReadSlice('\n')
 	if err != nil {
@@ -54,7 +55,7 @@
 	if string(line) == "!<arch>\n" {
 		// Archive file. Scan to __.PKGDEF.
 		var name string
-		if name, _, err = readGopackHeader(r); err != nil {
+		if name, size, err = readGopackHeader(r); err != nil {
 			return
 		}
 
@@ -70,6 +71,7 @@
 			err = fmt.Errorf("can't find export data (%v)", err)
 			return
 		}
+		size -= int64(len(line))
 	}
 
 	// Now at __.PKGDEF in archive or still at beginning of file.
@@ -86,8 +88,12 @@
 			err = fmt.Errorf("can't find export data (%v)", err)
 			return
 		}
+		size -= int64(len(line))
 	}
 	hdr = string(line)
+	if size < 0 {
+		size = -1
+	}
 
 	return
 }
diff --git a/go/internal/gcimporter/gcimporter.go b/go/internal/gcimporter/gcimporter.go
index e8cba6b..3ab6683 100644
--- a/go/internal/gcimporter/gcimporter.go
+++ b/go/internal/gcimporter/gcimporter.go
@@ -29,8 +29,14 @@
 	"text/scanner"
 )
 
-// debugging/development support
-const debug = false
+const (
+	// Enable debug during development: it adds some additional checks, and
+	// prevents errors from being recovered.
+	debug = false
+
+	// If trace is set, debugging output is printed to std out.
+	trace = false
+)
 
 var pkgExts = [...]string{".a", ".o"}
 
@@ -179,7 +185,7 @@
 
 	var hdr string
 	buf := bufio.NewReader(rc)
-	if hdr, err = FindExportData(buf); err != nil {
+	if hdr, _, err = FindExportData(buf); err != nil {
 		return
 	}
 
diff --git a/go/internal/gcimporter/gcimporter_test.go b/go/internal/gcimporter/gcimporter_test.go
index e5eb9ed..6baab01 100644
--- a/go/internal/gcimporter/gcimporter_test.go
+++ b/go/internal/gcimporter/gcimporter_test.go
@@ -10,6 +10,7 @@
 import (
 	"bytes"
 	"fmt"
+	"go/build"
 	"go/constant"
 	"go/types"
 	"io/ioutil"
@@ -31,20 +32,6 @@
 
 // ----------------------------------------------------------------------------
 
-// skipSpecialPlatforms causes the test to be skipped for platforms where
-// builders (build.golang.org) don't have access to compiled packages for
-// import.
-func skipSpecialPlatforms(t *testing.T) {
-	switch platform := runtime.GOOS + "-" + runtime.GOARCH; platform {
-	case "nacl-amd64p32",
-		"nacl-386",
-		"nacl-arm",
-		"darwin-arm",
-		"darwin-arm64":
-		t.Skipf("no compiled packages available for import on %s", platform)
-	}
-}
-
 func needsCompiler(t *testing.T, compiler string) {
 	if runtime.Compiler == compiler {
 		return
@@ -66,7 +53,7 @@
 	}
 	basename := filepath.Base(filename)
 	outname := filepath.Join(outdirname, basename[:len(basename)-2]+"o")
-	cmd := exec.Command("go", "tool", "compile", "-o", outname, filename)
+	cmd := exec.Command("go", "tool", "compile", "-p=p", "-o", outname, filename)
 	cmd.Dir = dirname
 	out, err := cmd.CombinedOutput()
 	if err != nil {
@@ -162,7 +149,9 @@
 }
 
 func TestVersionHandling(t *testing.T) {
-	skipSpecialPlatforms(t) // we really only need to exclude nacl platforms, but this is fine
+	if debug {
+		t.Skip("TestVersionHandling panics in debug mode")
+	}
 
 	// This package only handles gc export data.
 	needsCompiler(t, "gc")
@@ -241,8 +230,6 @@
 }
 
 func TestImportStdLib(t *testing.T) {
-	skipSpecialPlatforms(t)
-
 	// This package only handles gc export data.
 	needsCompiler(t, "gc")
 
@@ -269,7 +256,7 @@
 	{"go/internal/gcimporter.FindPkg", "func FindPkg(path string, srcDir string) (filename string, id string)"},
 
 	// interfaces
-	{"context.Context", "type Context interface{Deadline() (deadline time.Time, ok bool); Done() <-chan struct{}; Err() error; Value(key interface{}) interface{}}"},
+	{"context.Context", "type Context interface{Deadline() (deadline time.Time, ok bool); Done() <-chan struct{}; Err() error; Value(key any) any}"},
 	{"crypto.Decrypter", "type Decrypter interface{Decrypt(rand io.Reader, msg []byte, opts DecrypterOpts) (plaintext []byte, err error); Public() PublicKey}"},
 	{"encoding.BinaryMarshaler", "type BinaryMarshaler interface{MarshalBinary() (data []byte, err error)}"},
 	{"io.Reader", "type Reader interface{Read(p []byte) (n int, err error)}"},
@@ -278,10 +265,20 @@
 	{"go/types.Type", "type Type interface{String() string; Underlying() Type}"},
 }
 
+// TODO(rsc): Delete this init func after x/tools no longer needs to test successfully with Go 1.17.
+func init() {
+	if build.Default.ReleaseTags[len(build.Default.ReleaseTags)-1] <= "go1.17" {
+		for i := range importedObjectTests {
+			if importedObjectTests[i].name == "context.Context" {
+				// Expand any to interface{}.
+				importedObjectTests[i].want = "type Context interface{Deadline() (deadline time.Time, ok bool); Done() <-chan struct{}; Err() error; Value(key interface{}) interface{}}"
+			}
+		}
+	}
+}
+
 func TestImportedTypes(t *testing.T) {
 	testenv.NeedsGo1Point(t, 11)
-	skipSpecialPlatforms(t)
-
 	// This package only handles gc export data.
 	needsCompiler(t, "gc")
 
@@ -291,6 +288,12 @@
 			continue // error reported elsewhere
 		}
 		got := types.ObjectString(obj, types.RelativeTo(obj.Pkg()))
+
+		// TODO(rsc): Delete this block once go.dev/cl/368254 lands.
+		if got != test.want && test.want == strings.ReplaceAll(got, "interface{}", "any") {
+			got = test.want
+		}
+
 		if got != test.want {
 			t.Errorf("%s: got %q; want %q", test.name, got, test.want)
 		}
@@ -303,8 +306,6 @@
 
 func TestImportedConsts(t *testing.T) {
 	testenv.NeedsGo1Point(t, 11)
-	skipSpecialPlatforms(t)
-
 	tests := []struct {
 		name string
 		want constant.Kind
@@ -386,8 +387,6 @@
 }
 
 func TestIssue5815(t *testing.T) {
-	skipSpecialPlatforms(t)
-
 	// This package only handles gc export data.
 	needsCompiler(t, "gc")
 
@@ -413,8 +412,6 @@
 
 // Smoke test to ensure that imported methods get the correct package.
 func TestCorrectMethodPackage(t *testing.T) {
-	skipSpecialPlatforms(t)
-
 	// This package only handles gc export data.
 	needsCompiler(t, "gc")
 
@@ -434,8 +431,6 @@
 }
 
 func TestIssue13566(t *testing.T) {
-	skipSpecialPlatforms(t)
-
 	// This package only handles gc export data.
 	needsCompiler(t, "gc")
 
@@ -471,8 +466,6 @@
 }
 
 func TestIssue13898(t *testing.T) {
-	skipSpecialPlatforms(t)
-
 	// This package only handles gc export data.
 	needsCompiler(t, "gc")
 
@@ -515,8 +508,6 @@
 }
 
 func TestIssue15517(t *testing.T) {
-	skipSpecialPlatforms(t)
-
 	// This package only handles gc export data.
 	needsCompiler(t, "gc")
 
@@ -552,8 +543,6 @@
 }
 
 func TestIssue15920(t *testing.T) {
-	skipSpecialPlatforms(t)
-
 	// This package only handles gc export data.
 	needsCompiler(t, "gc")
 
@@ -567,8 +556,6 @@
 }
 
 func TestIssue20046(t *testing.T) {
-	skipSpecialPlatforms(t)
-
 	// This package only handles gc export data.
 	needsCompiler(t, "gc")
 
@@ -588,8 +575,6 @@
 
 func TestIssue25301(t *testing.T) {
 	testenv.NeedsGo1Point(t, 11)
-	skipSpecialPlatforms(t)
-
 	// This package only handles gc export data.
 	needsCompiler(t, "gc")
 
diff --git a/go/internal/gcimporter/iexport.go b/go/internal/gcimporter/iexport.go
index d2fc8b6..9a4ff32 100644
--- a/go/internal/gcimporter/iexport.go
+++ b/go/internal/gcimporter/iexport.go
@@ -11,6 +11,7 @@
 import (
 	"bytes"
 	"encoding/binary"
+	"fmt"
 	"go/ast"
 	"go/constant"
 	"go/token"
@@ -19,11 +20,11 @@
 	"math/big"
 	"reflect"
 	"sort"
-)
+	"strconv"
+	"strings"
 
-// Current indexed export format version. Increase with each format change.
-// 0: Go1.11 encoding
-const iexportVersion = 0
+	"golang.org/x/tools/internal/typeparams"
+)
 
 // Current bundled export format version. Increase with each format change.
 // 0: initial implementation
@@ -35,31 +36,35 @@
 // The package path of the top-level package will not be recorded,
 // so that calls to IImportData can override with a provided package path.
 func IExportData(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
-	return iexportCommon(out, fset, false, []*types.Package{pkg})
+	return iexportCommon(out, fset, false, iexportVersion, []*types.Package{pkg})
 }
 
 // IExportBundle writes an indexed export bundle for pkgs to out.
 func IExportBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error {
-	return iexportCommon(out, fset, true, pkgs)
+	return iexportCommon(out, fset, true, iexportVersion, pkgs)
 }
 
-func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*types.Package) (err error) {
-	defer func() {
-		if e := recover(); e != nil {
-			if ierr, ok := e.(internalError); ok {
-				err = ierr
-				return
+func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, version int, pkgs []*types.Package) (err error) {
+	if !debug {
+		defer func() {
+			if e := recover(); e != nil {
+				if ierr, ok := e.(internalError); ok {
+					err = ierr
+					return
+				}
+				// Not an internal error; panic again.
+				panic(e)
 			}
-			// Not an internal error; panic again.
-			panic(e)
-		}
-	}()
+		}()
+	}
 
 	p := iexporter{
 		fset:        fset,
+		version:     version,
 		allPkgs:     map[*types.Package]bool{},
 		stringIndex: map[string]uint64{},
 		declIndex:   map[types.Object]uint64{},
+		tparamNames: map[types.Object]string{},
 		typIndex:    map[types.Type]uint64{},
 	}
 	if !bundle {
@@ -119,7 +124,7 @@
 	if bundle {
 		hdr.uint64(bundleVersion)
 	}
-	hdr.uint64(iexportVersion)
+	hdr.uint64(uint64(p.version))
 	hdr.uint64(uint64(p.strings.Len()))
 	hdr.uint64(dataLen)
 
@@ -136,8 +141,12 @@
 // non-compiler tools and includes a complete package description
 // (i.e., name and height).
 func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
+	type pkgObj struct {
+		obj  types.Object
+		name string // qualified name; differs from obj.Name for type params
+	}
 	// Build a map from packages to objects from that package.
-	pkgObjs := map[*types.Package][]types.Object{}
+	pkgObjs := map[*types.Package][]pkgObj{}
 
 	// For the main index, make sure to include every package that
 	// we reference, even if we're not exporting (or reexporting)
@@ -150,7 +159,8 @@
 	}
 
 	for obj := range index {
-		pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj)
+		name := w.p.exportName(obj)
+		pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], pkgObj{obj, name})
 	}
 
 	var pkgs []*types.Package
@@ -158,7 +168,7 @@
 		pkgs = append(pkgs, pkg)
 
 		sort.Slice(objs, func(i, j int) bool {
-			return objs[i].Name() < objs[j].Name()
+			return objs[i].name < objs[j].name
 		})
 	}
 
@@ -175,15 +185,25 @@
 		objs := pkgObjs[pkg]
 		w.uint64(uint64(len(objs)))
 		for _, obj := range objs {
-			w.string(obj.Name())
-			w.uint64(index[obj])
+			w.string(obj.name)
+			w.uint64(index[obj.obj])
 		}
 	}
 }
 
+// exportName returns the 'exported' name of an object. It differs from
+// obj.Name() only for type parameters (see tparamExportName for details).
+func (p *iexporter) exportName(obj types.Object) (res string) {
+	if name := p.tparamNames[obj]; name != "" {
+		return name
+	}
+	return obj.Name()
+}
+
 type iexporter struct {
-	fset *token.FileSet
-	out  *bytes.Buffer
+	fset    *token.FileSet
+	out     *bytes.Buffer
+	version int
 
 	localpkg *types.Package
 
@@ -197,9 +217,21 @@
 	strings     intWriter
 	stringIndex map[string]uint64
 
-	data0     intWriter
-	declIndex map[types.Object]uint64
-	typIndex  map[types.Type]uint64
+	data0       intWriter
+	declIndex   map[types.Object]uint64
+	tparamNames map[types.Object]string // typeparam->exported name
+	typIndex    map[types.Type]uint64
+
+	indent int // for tracing support
+}
+
+func (p *iexporter) trace(format string, args ...interface{}) {
+	if !trace {
+		// Call sites should also be guarded, but having this check here allows
+		// easily enabling/disabling debug trace statements.
+		return
+	}
+	fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...)
 }
 
 // stringOff returns the offset of s within the string section.
@@ -219,13 +251,16 @@
 // pushDecl adds n to the declaration work queue, if not already present.
 func (p *iexporter) pushDecl(obj types.Object) {
 	// Package unsafe is known to the compiler and predeclared.
-	assert(obj.Pkg() != types.Unsafe)
+	// Caller should not ask us to do export it.
+	if obj.Pkg() == types.Unsafe {
+		panic("cannot export package unsafe")
+	}
 
 	if _, ok := p.declIndex[obj]; ok {
 		return
 	}
 
-	p.declIndex[obj] = ^uint64(0) // mark n present in work queue
+	p.declIndex[obj] = ^uint64(0) // mark obj present in work queue
 	p.declTodo.pushTail(obj)
 }
 
@@ -233,10 +268,11 @@
 type exportWriter struct {
 	p *iexporter
 
-	data     intWriter
-	currPkg  *types.Package
-	prevFile string
-	prevLine int64
+	data       intWriter
+	currPkg    *types.Package
+	prevFile   string
+	prevLine   int64
+	prevColumn int64
 }
 
 func (w *exportWriter) exportPath(pkg *types.Package) string {
@@ -247,6 +283,14 @@
 }
 
 func (p *iexporter) doDecl(obj types.Object) {
+	if trace {
+		p.trace("exporting decl %v (%T)", obj, obj)
+		p.indent++
+		defer func() {
+			p.indent--
+			p.trace("=> %s", obj)
+		}()
+	}
 	w := p.newWriter()
 	w.setPkg(obj.Pkg(), false)
 
@@ -261,8 +305,24 @@
 		if sig.Recv() != nil {
 			panic(internalErrorf("unexpected method: %v", sig))
 		}
-		w.tag('F')
+
+		// Function.
+		if typeparams.ForSignature(sig).Len() == 0 {
+			w.tag('F')
+		} else {
+			w.tag('G')
+		}
 		w.pos(obj.Pos())
+		// The tparam list of the function type is the declaration of the type
+		// params. So, write out the type params right now. Then those type params
+		// will be referenced via their type offset (via typOff) in all other
+		// places in the signature and function where they are used.
+		//
+		// While importing the type parameters, tparamList computes and records
+		// their export name, so that it can be later used when writing the index.
+		if tparams := typeparams.ForSignature(sig); tparams.Len() > 0 {
+			w.tparamList(obj.Name(), tparams, obj.Pkg())
+		}
 		w.signature(sig)
 
 	case *types.Const:
@@ -271,30 +331,56 @@
 		w.value(obj.Type(), obj.Val())
 
 	case *types.TypeName:
+		t := obj.Type()
+
+		if tparam, ok := t.(*typeparams.TypeParam); ok {
+			w.tag('P')
+			w.pos(obj.Pos())
+			constraint := tparam.Constraint()
+			if p.version >= iexportVersionGo1_18 {
+				implicit := false
+				if iface, _ := constraint.(*types.Interface); iface != nil {
+					implicit = typeparams.IsImplicit(iface)
+				}
+				w.bool(implicit)
+			}
+			w.typ(constraint, obj.Pkg())
+			break
+		}
+
 		if obj.IsAlias() {
 			w.tag('A')
 			w.pos(obj.Pos())
-			w.typ(obj.Type(), obj.Pkg())
+			w.typ(t, obj.Pkg())
 			break
 		}
 
 		// Defined type.
-		w.tag('T')
+		named, ok := t.(*types.Named)
+		if !ok {
+			panic(internalErrorf("%s is not a defined type", t))
+		}
+
+		if typeparams.ForNamed(named).Len() == 0 {
+			w.tag('T')
+		} else {
+			w.tag('U')
+		}
 		w.pos(obj.Pos())
 
+		if typeparams.ForNamed(named).Len() > 0 {
+			// While importing the type parameters, tparamList computes and records
+			// their export name, so that it can be later used when writing the index.
+			w.tparamList(obj.Name(), typeparams.ForNamed(named), obj.Pkg())
+		}
+
 		underlying := obj.Type().Underlying()
 		w.typ(underlying, obj.Pkg())
 
-		t := obj.Type()
 		if types.IsInterface(t) {
 			break
 		}
 
-		named, ok := t.(*types.Named)
-		if !ok {
-			panic(internalErrorf("%s is not a defined type", t))
-		}
-
 		n := named.NumMethods()
 		w.uint64(uint64(n))
 		for i := 0; i < n; i++ {
@@ -302,6 +388,17 @@
 			w.pos(m.Pos())
 			w.string(m.Name())
 			sig, _ := m.Type().(*types.Signature)
+
+			// Receiver type parameters are type arguments of the receiver type, so
+			// their name must be qualified before exporting recv.
+			if rparams := typeparams.RecvTypeParams(sig); rparams.Len() > 0 {
+				prefix := obj.Name() + "." + m.Name()
+				for i := 0; i < rparams.Len(); i++ {
+					rparam := rparams.At(i)
+					name := tparamExportName(prefix, rparam)
+					w.p.tparamNames[rparam.Obj()] = name
+				}
+			}
 			w.param(sig.Recv())
 			w.signature(sig)
 		}
@@ -318,6 +415,48 @@
 }
 
 func (w *exportWriter) pos(pos token.Pos) {
+	if w.p.version >= iexportVersionPosCol {
+		w.posV1(pos)
+	} else {
+		w.posV0(pos)
+	}
+}
+
+func (w *exportWriter) posV1(pos token.Pos) {
+	if w.p.fset == nil {
+		w.int64(0)
+		return
+	}
+
+	p := w.p.fset.Position(pos)
+	file := p.Filename
+	line := int64(p.Line)
+	column := int64(p.Column)
+
+	deltaColumn := (column - w.prevColumn) << 1
+	deltaLine := (line - w.prevLine) << 1
+
+	if file != w.prevFile {
+		deltaLine |= 1
+	}
+	if deltaLine != 0 {
+		deltaColumn |= 1
+	}
+
+	w.int64(deltaColumn)
+	if deltaColumn&1 != 0 {
+		w.int64(deltaLine)
+		if deltaLine&1 != 0 {
+			w.string(file)
+		}
+	}
+
+	w.prevFile = file
+	w.prevLine = line
+	w.prevColumn = column
+}
+
+func (w *exportWriter) posV0(pos token.Pos) {
 	if w.p.fset == nil {
 		w.int64(0)
 		return
@@ -359,10 +498,11 @@
 }
 
 func (w *exportWriter) qualifiedIdent(obj types.Object) {
+	name := w.p.exportName(obj)
+
 	// Ensure any referenced declarations are written out too.
 	w.p.pushDecl(obj)
-
-	w.string(obj.Name())
+	w.string(name)
 	w.pkg(obj.Pkg())
 }
 
@@ -396,11 +536,32 @@
 }
 
 func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
+	if trace {
+		w.p.trace("exporting type %s (%T)", t, t)
+		w.p.indent++
+		defer func() {
+			w.p.indent--
+			w.p.trace("=> %s", t)
+		}()
+	}
 	switch t := t.(type) {
 	case *types.Named:
+		if targs := typeparams.NamedTypeArgs(t); targs.Len() > 0 {
+			w.startType(instanceType)
+			// TODO(rfindley): investigate if this position is correct, and if it
+			// matters.
+			w.pos(t.Obj().Pos())
+			w.typeList(targs, pkg)
+			w.typ(typeparams.NamedTypeOrigin(t), pkg)
+			return
+		}
 		w.startType(definedType)
 		w.qualifiedIdent(t.Obj())
 
+	case *typeparams.TypeParam:
+		w.startType(typeParamType)
+		w.qualifiedIdent(t.Obj())
+
 	case *types.Pointer:
 		w.startType(pointerType)
 		w.typ(t.Elem(), pkg)
@@ -461,9 +622,14 @@
 		n := t.NumEmbeddeds()
 		w.uint64(uint64(n))
 		for i := 0; i < n; i++ {
-			f := t.Embedded(i)
-			w.pos(f.Obj().Pos())
-			w.typ(f.Obj().Type(), f.Obj().Pkg())
+			ft := t.EmbeddedType(i)
+			tPkg := pkg
+			if named, _ := ft.(*types.Named); named != nil {
+				w.pos(named.Obj().Pos())
+			} else {
+				w.pos(token.NoPos)
+			}
+			w.typ(ft, tPkg)
 		}
 
 		n = t.NumExplicitMethods()
@@ -476,6 +642,16 @@
 			w.signature(sig)
 		}
 
+	case *typeparams.Union:
+		w.startType(unionType)
+		nt := t.Len()
+		w.uint64(uint64(nt))
+		for i := 0; i < nt; i++ {
+			term := t.Term(i)
+			w.bool(term.Tilde())
+			w.typ(term.Type(), pkg)
+		}
+
 	default:
 		panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t)))
 	}
@@ -497,6 +673,56 @@
 	}
 }
 
+func (w *exportWriter) typeList(ts *typeparams.TypeList, pkg *types.Package) {
+	w.uint64(uint64(ts.Len()))
+	for i := 0; i < ts.Len(); i++ {
+		w.typ(ts.At(i), pkg)
+	}
+}
+
+func (w *exportWriter) tparamList(prefix string, list *typeparams.TypeParamList, pkg *types.Package) {
+	ll := uint64(list.Len())
+	w.uint64(ll)
+	for i := 0; i < list.Len(); i++ {
+		tparam := list.At(i)
+		// Set the type parameter exportName before exporting its type.
+		exportName := tparamExportName(prefix, tparam)
+		w.p.tparamNames[tparam.Obj()] = exportName
+		w.typ(list.At(i), pkg)
+	}
+}
+
+const blankMarker = "$"
+
+// tparamExportName returns the 'exported' name of a type parameter, which
+// differs from its actual object name: it is prefixed with a qualifier, and
+// blank type parameter names are disambiguated by their index in the type
+// parameter list.
+func tparamExportName(prefix string, tparam *typeparams.TypeParam) string {
+	assert(prefix != "")
+	name := tparam.Obj().Name()
+	if name == "_" {
+		name = blankMarker + strconv.Itoa(tparam.Index())
+	}
+	return prefix + "." + name
+}
+
+// tparamName returns the real name of a type parameter, after stripping its
+// qualifying prefix and reverting blank-name encoding. See tparamExportName
+// for details.
+func tparamName(exportName string) string {
+	// Remove the "path" from the type param name that makes it unique.
+	ix := strings.LastIndex(exportName, ".")
+	if ix < 0 {
+		errorf("malformed type parameter export name %s: missing prefix", exportName)
+	}
+	name := exportName[ix+1:]
+	if strings.HasPrefix(name, blankMarker) {
+		return "_"
+	}
+	return name
+}
+
 func (w *exportWriter) paramList(tup *types.Tuple) {
 	n := tup.Len()
 	w.uint64(uint64(n))
@@ -513,6 +739,9 @@
 
 func (w *exportWriter) value(typ types.Type, v constant.Value) {
 	w.typ(typ, nil)
+	if w.p.version >= iexportVersionGo1_18 {
+		w.int64(int64(v.Kind()))
+	}
 
 	switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
 	case types.IsBoolean:
diff --git a/go/internal/gcimporter/iexport_common_test.go b/go/internal/gcimporter/iexport_common_test.go
new file mode 100644
index 0000000..abc6aa6
--- /dev/null
+++ b/go/internal/gcimporter/iexport_common_test.go
@@ -0,0 +1,16 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gcimporter
+
+// Temporarily expose version-related functionality so that we can test at
+// specific export data versions.
+
+var IExportCommon = iexportCommon
+
+const (
+	IExportVersion         = iexportVersion
+	IExportVersionGenerics = iexportVersionGenerics
+	IExportVersionGo1_18   = iexportVersionGo1_18
+)
diff --git a/go/internal/gcimporter/iexport_go118_test.go b/go/internal/gcimporter/iexport_go118_test.go
new file mode 100644
index 0000000..5dfa258
--- /dev/null
+++ b/go/internal/gcimporter/iexport_go118_test.go
@@ -0,0 +1,254 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package gcimporter_test
+
+import (
+	"bytes"
+	"fmt"
+	"go/ast"
+	"go/importer"
+	"go/parser"
+	"go/token"
+	"go/types"
+	"os"
+	"path/filepath"
+	"runtime"
+	"strings"
+	"testing"
+
+	"golang.org/x/tools/go/internal/gcimporter"
+)
+
+// TODO(rfindley): migrate this to testdata, as has been done in the standard library.
+func TestGenericExport(t *testing.T) {
+	const src = `
+package generic
+
+type Any any
+
+type T[A, B any] struct { Left A; Right B }
+
+func (T[P, Q]) m() {}
+
+var X T[int, string] = T[int, string]{1, "hi"}
+
+func ToInt[P interface{ ~int }](p P) int { return int(p) }
+
+var IntID = ToInt[int]
+
+type G[C comparable] int
+
+func ImplicitFunc[T ~int]() {}
+
+type ImplicitType[T ~int] int
+
+// Exercise constant import/export
+const C1 = 42
+const C2 int = 42
+const C3 float64 = 42
+
+type Constraint[T any] interface {
+       m(T)
+}
+
+// TODO(rfindley): revert to multiple blanks once the restriction on multiple
+// blanks is removed from the type checker.
+// type Blanks[_ any, _ Constraint[int]] int
+// func (Blanks[_, _]) m() {}
+type Blanks[_ any] int
+func (Blanks[_]) m() {}
+`
+	testExportSrc(t, []byte(src))
+}
+
+func testExportSrc(t *testing.T, src []byte) {
+	// This package only handles gc export data.
+	if runtime.Compiler != "gc" {
+		t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler)
+	}
+
+	fset := token.NewFileSet()
+	f, err := parser.ParseFile(fset, "g.go", src, 0)
+	if err != nil {
+		t.Fatal(err)
+	}
+	conf := types.Config{
+		Importer: importer.Default(),
+	}
+	pkg, err := conf.Check("", fset, []*ast.File{f}, nil)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	// export
+	version := gcimporter.IExportVersion
+	data, err := iexport(fset, version, pkg)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	testPkgData(t, fset, version, pkg, data)
+}
+
+func TestImportTypeparamTests(t *testing.T) {
+	// Check go files in test/typeparam.
+	rootDir := filepath.Join(runtime.GOROOT(), "test", "typeparam")
+	list, err := os.ReadDir(rootDir)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	if isUnifiedBuilder() {
+		t.Skip("unified export data format is currently unsupported")
+	}
+
+	for _, entry := range list {
+		if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") {
+			// For now, only consider standalone go files.
+			continue
+		}
+
+		t.Run(entry.Name(), func(t *testing.T) {
+			filename := filepath.Join(rootDir, entry.Name())
+			src, err := os.ReadFile(filename)
+			if err != nil {
+				t.Fatal(err)
+			}
+
+			if !bytes.HasPrefix(src, []byte("// run")) && !bytes.HasPrefix(src, []byte("// compile")) {
+				// We're bypassing the logic of run.go here, so be conservative about
+				// the files we consider in an attempt to make this test more robust to
+				// changes in test/typeparams.
+				t.Skipf("not detected as a run test")
+			}
+
+			testExportSrc(t, src)
+		})
+	}
+}
+
+func TestRecursiveExport_Issue51219(t *testing.T) {
+	const srca = `
+package a
+
+type Interaction[DataT InteractionDataConstraint] struct {
+}
+
+type InteractionDataConstraint interface {
+	[]byte |
+		UserCommandInteractionData
+}
+
+type UserCommandInteractionData struct {
+	resolvedInteractionWithOptions
+}
+
+type resolvedInteractionWithOptions struct {
+	Resolved Resolved
+}
+
+type Resolved struct {
+	Users ResolvedData[User]
+}
+
+type ResolvedData[T ResolvedDataConstraint] map[uint64]T
+
+type ResolvedDataConstraint interface {
+	User | Message
+}
+
+type User struct{}
+
+type Message struct {
+	Interaction *Interaction[[]byte]
+}
+`
+
+	const srcb = `
+package b
+
+import (
+	"a"
+)
+
+// InteractionRequest is an incoming request Interaction
+type InteractionRequest[T a.InteractionDataConstraint] struct {
+	a.Interaction[T]
+}
+`
+
+	const srcp = `
+package p
+
+import (
+	"b"
+)
+
+// ResponseWriterMock mocks corde's ResponseWriter interface
+type ResponseWriterMock struct {
+	x b.InteractionRequest[[]byte]
+}
+`
+
+	importer := &testImporter{
+		src: map[string][]byte{
+			"a": []byte(srca),
+			"b": []byte(srcb),
+			"p": []byte(srcp),
+		},
+		pkgs: make(map[string]*types.Package),
+	}
+	_, err := importer.Import("p")
+	if err != nil {
+		t.Fatal(err)
+	}
+}
+
+// testImporter is a helper to test chains of imports using export data.
+type testImporter struct {
+	src  map[string][]byte         // original source
+	pkgs map[string]*types.Package // memoized imported packages
+}
+
+func (t *testImporter) Import(path string) (*types.Package, error) {
+	if pkg, ok := t.pkgs[path]; ok {
+		return pkg, nil
+	}
+	src, ok := t.src[path]
+	if !ok {
+		return nil, fmt.Errorf("unknown path %v", path)
+	}
+
+	// Type-check, but don't return this package directly.
+	fset := token.NewFileSet()
+	f, err := parser.ParseFile(fset, path+".go", src, 0)
+	if err != nil {
+		return nil, err
+	}
+	conf := types.Config{
+		Importer: t,
+	}
+	pkg, err := conf.Check(path, fset, []*ast.File{f}, nil)
+	if err != nil {
+		return nil, err
+	}
+
+	// Export and import to get the package imported from export data.
+	exportdata, err := iexport(fset, gcimporter.IExportVersion, pkg)
+	if err != nil {
+		return nil, err
+	}
+	imports := make(map[string]*types.Package)
+	fset2 := token.NewFileSet()
+	_, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path())
+	if err != nil {
+		return nil, err
+	}
+	t.pkgs[path] = pkg2
+	return pkg2, nil
+}
diff --git a/go/internal/gcimporter/iexport_test.go b/go/internal/gcimporter/iexport_test.go
index 5385011..f0e83e5 100644
--- a/go/internal/gcimporter/iexport_test.go
+++ b/go/internal/gcimporter/iexport_test.go
@@ -28,9 +28,11 @@
 	"strings"
 	"testing"
 
+	"golang.org/x/tools/go/ast/inspector"
 	"golang.org/x/tools/go/buildutil"
 	"golang.org/x/tools/go/internal/gcimporter"
 	"golang.org/x/tools/go/loader"
+	"golang.org/x/tools/internal/typeparams/genericfeatures"
 )
 
 func readExportFile(filename string) ([]byte, error) {
@@ -41,7 +43,7 @@
 	defer f.Close()
 
 	buf := bufio.NewReader(f)
-	if _, err := gcimporter.FindExportData(buf); err != nil {
+	if _, _, err := gcimporter.FindExportData(buf); err != nil {
 		return nil, err
 	}
 
@@ -54,14 +56,22 @@
 	return ioutil.ReadAll(buf)
 }
 
-func iexport(fset *token.FileSet, pkg *types.Package) ([]byte, error) {
+func iexport(fset *token.FileSet, version int, pkg *types.Package) ([]byte, error) {
 	var buf bytes.Buffer
-	if err := gcimporter.IExportData(&buf, fset, pkg); err != nil {
+	if err := gcimporter.IExportCommon(&buf, fset, false, version, []*types.Package{pkg}); err != nil {
 		return nil, err
 	}
 	return buf.Bytes(), nil
 }
 
+// isUnifiedBuilder reports whether we are executing on a go builder that uses
+// unified export data.
+func isUnifiedBuilder() bool {
+	return os.Getenv("GO_BUILDER_NAME") == "linux-amd64-unified"
+}
+
+const minStdlibPackages = 248
+
 func TestIExportData_stdlib(t *testing.T) {
 	if runtime.Compiler == "gccgo" {
 		t.Skip("gccgo standard library is inaccessible")
@@ -72,6 +82,9 @@
 	if isRace {
 		t.Skipf("stdlib tests take too long in race mode and flake on builders")
 	}
+	if testing.Short() {
+		t.Skip("skipping RAM hungry test in -short mode")
+	}
 
 	// Load, parse and type-check the program.
 	ctxt := build.Default // copy
@@ -81,6 +94,7 @@
 		AllowErrors: true,
 		TypeChecker: types.Config{
 			Sizes: types.SizesFor(ctxt.Compiler, ctxt.GOARCH),
+			Error: func(err error) { t.Log(err) },
 		},
 	}
 	for _, path := range buildutil.AllPackages(conf.Build) {
@@ -103,13 +117,19 @@
 		t.Fatalf("Load failed: %v", err)
 	}
 
-	numPkgs := len(prog.AllPackages)
-	if want := 248; numPkgs < want {
-		t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want)
-	}
-
 	var sorted []*types.Package
+	isUnified := isUnifiedBuilder()
 	for pkg, info := range prog.AllPackages {
+		// Temporarily skip packages that use generics on the unified builder, to
+		// fix TryBots.
+		//
+		// TODO(#48595): fix this test with GOEXPERIMENT=unified.
+		inspect := inspector.New(info.Files)
+		features := genericfeatures.ForPackage(inspect, &info.Info)
+		if isUnified && features != 0 {
+			t.Logf("skipping package %q which uses generics", pkg.Path())
+			continue
+		}
 		if info.Files != nil { // non-empty directory
 			sorted = append(sorted, pkg)
 		}
@@ -118,11 +138,17 @@
 		return sorted[i].Path() < sorted[j].Path()
 	})
 
+	version := gcimporter.IExportVersion
+	numPkgs := len(sorted)
+	if want := minStdlibPackages; numPkgs < want {
+		t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want)
+	}
+
 	for _, pkg := range sorted {
-		if exportdata, err := iexport(conf.Fset, pkg); err != nil {
+		if exportdata, err := iexport(conf.Fset, version, pkg); err != nil {
 			t.Error(err)
 		} else {
-			testPkgData(t, conf.Fset, pkg, exportdata)
+			testPkgData(t, conf.Fset, version, pkg, exportdata)
 		}
 
 		if pkg.Name() == "main" || pkg.Name() == "haserrors" {
@@ -132,7 +158,7 @@
 		} else if exportdata, err := readExportFile(bp.PkgObj); err != nil {
 			t.Log("warning:", err)
 		} else {
-			testPkgData(t, conf.Fset, pkg, exportdata)
+			testPkgData(t, conf.Fset, version, pkg, exportdata)
 		}
 	}
 
@@ -148,11 +174,11 @@
 	}
 
 	for i, pkg := range sorted {
-		testPkg(t, conf.Fset, pkg, fset2, pkgs2[i])
+		testPkg(t, conf.Fset, version, pkg, fset2, pkgs2[i])
 	}
 }
 
-func testPkgData(t *testing.T, fset *token.FileSet, pkg *types.Package, exportdata []byte) {
+func testPkgData(t *testing.T, fset *token.FileSet, version int, pkg *types.Package, exportdata []byte) {
 	imports := make(map[string]*types.Package)
 	fset2 := token.NewFileSet()
 	_, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path())
@@ -160,11 +186,11 @@
 		t.Errorf("IImportData(%s): %v", pkg.Path(), err)
 	}
 
-	testPkg(t, fset, pkg, fset2, pkg2)
+	testPkg(t, fset, version, pkg, fset2, pkg2)
 }
 
-func testPkg(t *testing.T, fset *token.FileSet, pkg *types.Package, fset2 *token.FileSet, pkg2 *types.Package) {
-	if _, err := iexport(fset2, pkg2); err != nil {
+func testPkg(t *testing.T, fset *token.FileSet, version int, pkg *types.Package, fset2 *token.FileSet, pkg2 *types.Package) {
+	if _, err := iexport(fset2, version, pkg2); err != nil {
 		t.Errorf("reexport %q: %v", pkg.Path(), err)
 	}
 
@@ -212,7 +238,7 @@
 	}
 
 	// export
-	exportdata, err := iexport(fset1, pkg)
+	exportdata, err := iexport(fset1, gcimporter.IExportVersion, pkg)
 	if err != nil {
 		t.Fatal(err)
 	}
@@ -255,7 +281,7 @@
 
 	// export
 	// use a nil fileset here to confirm that it doesn't panic
-	exportdata, err := iexport(nil, pkg1)
+	exportdata, err := iexport(nil, gcimporter.IExportVersion, pkg1)
 	if err != nil {
 		t.Fatal(err)
 	}
@@ -279,11 +305,11 @@
 	}
 	xt := x.Type()
 	yt := y.Type()
-	switch x.(type) {
+	switch x := x.(type) {
 	case *types.Var, *types.Func:
 		// ok
 	case *types.Const:
-		xval := x.(*types.Const).Val()
+		xval := x.Val()
 		yval := y.(*types.Const).Val()
 		equal := constant.Compare(xval, token.EQL, yval)
 		if !equal {
@@ -303,6 +329,25 @@
 			return fmt.Errorf("unequal constants %s vs %s", xval, yval)
 		}
 	case *types.TypeName:
+		if xalias, yalias := x.IsAlias(), y.(*types.TypeName).IsAlias(); xalias != yalias {
+			return fmt.Errorf("mismatching IsAlias(): %s vs %s", x, y)
+		}
+		// equalType does not recurse into the underlying types of named types, so
+		// we must pass the underlying type explicitly here. However, in doing this
+		// we may skip checking the features of the named types themselves, in
+		// situations where the type name is not referenced by the underlying or
+		// any other top-level declarations. Therefore, we must explicitly compare
+		// named types here, before passing their underlying types into equalType.
+		xn, _ := xt.(*types.Named)
+		yn, _ := yt.(*types.Named)
+		if (xn == nil) != (yn == nil) {
+			return fmt.Errorf("mismatching types: %T vs %T", xt, yt)
+		}
+		if xn != nil {
+			if err := cmpNamed(xn, yn); err != nil {
+				return err
+			}
+		}
 		xt = xt.Underlying()
 		yt = yt.Underlying()
 	default:
diff --git a/go/internal/gcimporter/iimport.go b/go/internal/gcimporter/iimport.go
index 8ed8bc6..1d5650a 100644
--- a/go/internal/gcimporter/iimport.go
+++ b/go/internal/gcimporter/iimport.go
@@ -18,6 +18,9 @@
 	"go/types"
 	"io"
 	"sort"
+	"strings"
+
+	"golang.org/x/tools/internal/typeparams"
 )
 
 type intReader struct {
@@ -41,6 +44,19 @@
 	return i
 }
 
+// Keep this in sync with constants in iexport.go.
+const (
+	iexportVersionGo1_11   = 0
+	iexportVersionPosCol   = 1
+	iexportVersionGo1_18   = 2
+	iexportVersionGenerics = 2
+)
+
+type ident struct {
+	pkg  string
+	name string
+}
+
 const predeclReserved = 32
 
 type itag uint64
@@ -56,6 +72,9 @@
 	signatureType
 	structType
 	interfaceType
+	typeParamType
+	instanceType
+	unionType
 )
 
 // IImportData imports a package from the serialized package data
@@ -78,15 +97,19 @@
 func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data []byte, bundle bool, path string) (pkgs []*types.Package, err error) {
 	const currentVersion = 1
 	version := int64(-1)
-	defer func() {
-		if e := recover(); e != nil {
-			if version > currentVersion {
-				err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
-			} else {
-				err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
+	if !debug {
+		defer func() {
+			if e := recover(); e != nil {
+				if bundle {
+					err = fmt.Errorf("%v", e)
+				} else if version > currentVersion {
+					err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
+				} else {
+					err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
+				}
 			}
-		}
-	}()
+		}()
+	}
 
 	r := &intReader{bytes.NewReader(data), path}
 
@@ -101,9 +124,13 @@
 
 	version = int64(r.uint64())
 	switch version {
-	case currentVersion, 0:
+	case iexportVersionGo1_18, iexportVersionPosCol, iexportVersionGo1_11:
 	default:
-		errorf("unknown iexport format version %d", version)
+		if version > iexportVersionGo1_18 {
+			errorf("unstable iexport format version %d, just rebuild compiler and std library", version)
+		} else {
+			errorf("unknown iexport format version %d", version)
+		}
 	}
 
 	sLen := int64(r.uint64())
@@ -115,8 +142,8 @@
 	r.Seek(sLen+dLen, io.SeekCurrent)
 
 	p := iimporter{
-		ipath:   path,
 		version: int(version),
+		ipath:   path,
 
 		stringData:  stringData,
 		stringCache: make(map[uint64]string),
@@ -125,12 +152,16 @@
 		declData: declData,
 		pkgIndex: make(map[*types.Package]map[string]uint64),
 		typCache: make(map[uint64]types.Type),
+		// Separate map for typeparams, keyed by their package and unique
+		// name.
+		tparamIndex: make(map[ident]types.Type),
 
 		fake: fakeFileSet{
 			fset:  fset,
-			files: make(map[string]*token.File),
+			files: make(map[string]*fileInfo),
 		},
 	}
+	defer p.fake.setLines() // set lines for files in fset
 
 	for i, pt := range predeclared() {
 		p.typCache[uint64(i)] = pt
@@ -208,6 +239,15 @@
 		pkg.MarkComplete()
 	}
 
+	// SetConstraint can't be called if the constraint type is not yet complete.
+	// When type params are created in the 'P' case of (*importReader).obj(),
+	// the associated constraint type may not be complete due to recursion.
+	// Therefore, we defer calling SetConstraint there, and call it here instead
+	// after all types are complete.
+	for _, d := range p.later {
+		typeparams.SetTypeParamConstraint(d.t, d.constraint)
+	}
+
 	for _, typ := range p.interfaceList {
 		typ.Complete()
 	}
@@ -215,23 +255,51 @@
 	return pkgs, nil
 }
 
+type setConstraintArgs struct {
+	t          *typeparams.TypeParam
+	constraint types.Type
+}
+
 type iimporter struct {
-	ipath   string
 	version int
+	ipath   string
 
 	stringData  []byte
 	stringCache map[uint64]string
 	pkgCache    map[uint64]*types.Package
 
-	declData []byte
-	pkgIndex map[*types.Package]map[string]uint64
-	typCache map[uint64]types.Type
+	declData    []byte
+	pkgIndex    map[*types.Package]map[string]uint64
+	typCache    map[uint64]types.Type
+	tparamIndex map[ident]types.Type
 
 	fake          fakeFileSet
 	interfaceList []*types.Interface
+
+	// Arguments for calls to SetConstraint that are deferred due to recursive types
+	later []setConstraintArgs
+
+	indent int // for tracing support
+}
+
+func (p *iimporter) trace(format string, args ...interface{}) {
+	if !trace {
+		// Call sites should also be guarded, but having this check here allows
+		// easily enabling/disabling debug trace statements.
+		return
+	}
+	fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...)
 }
 
 func (p *iimporter) doDecl(pkg *types.Package, name string) {
+	if debug {
+		p.trace("import decl %s", name)
+		p.indent++
+		defer func() {
+			p.indent--
+			p.trace("=> %s", name)
+		}()
+	}
 	// See if we've already imported this declaration.
 	if obj := pkg.Scope().Lookup(name); obj != nil {
 		return
@@ -273,7 +341,7 @@
 }
 
 func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
-	if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) {
+	if t, ok := p.typCache[off]; ok && canReuse(base, t) {
 		return t
 	}
 
@@ -285,12 +353,30 @@
 	r.declReader.Reset(p.declData[off-predeclReserved:])
 	t := r.doType(base)
 
-	if base == nil || !isInterface(t) {
+	if canReuse(base, t) {
 		p.typCache[off] = t
 	}
 	return t
 }
 
+// canReuse reports whether the type rhs on the RHS of the declaration for def
+// may be re-used.
+//
+// Specifically, if def is non-nil and rhs is an interface type with methods, it
+// may not be re-used because we have a convention of setting the receiver type
+// for interface methods to def.
+func canReuse(def *types.Named, rhs types.Type) bool {
+	if def == nil {
+		return true
+	}
+	iface, _ := rhs.(*types.Interface)
+	if iface == nil {
+		return true
+	}
+	// Don't use iface.Empty() here as iface may not be complete.
+	return iface.NumEmbeddeds() == 0 && iface.NumExplicitMethods() == 0
+}
+
 type importReader struct {
 	p          *iimporter
 	declReader bytes.Reader
@@ -315,17 +401,26 @@
 
 		r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
 
-	case 'F':
-		sig := r.signature(nil)
-
+	case 'F', 'G':
+		var tparams []*typeparams.TypeParam
+		if tag == 'G' {
+			tparams = r.tparamList()
+		}
+		sig := r.signature(nil, nil, tparams)
 		r.declare(types.NewFunc(pos, r.currPkg, name, sig))
 
-	case 'T':
+	case 'T', 'U':
 		// Types can be recursive. We need to setup a stub
 		// declaration before recursing.
 		obj := types.NewTypeName(pos, r.currPkg, name, nil)
 		named := types.NewNamed(obj, nil, nil)
+		// Declare obj before calling r.tparamList, so the new type name is recognized
+		// if used in the constraint of one of its own typeparams (see #48280).
 		r.declare(obj)
+		if tag == 'U' {
+			tparams := r.tparamList()
+			typeparams.SetForNamed(named, tparams)
+		}
 
 		underlying := r.p.typAt(r.uint64(), named).Underlying()
 		named.SetUnderlying(underlying)
@@ -335,12 +430,59 @@
 				mpos := r.pos()
 				mname := r.ident()
 				recv := r.param()
-				msig := r.signature(recv)
+
+				// If the receiver has any targs, set those as the
+				// rparams of the method (since those are the
+				// typeparams being used in the method sig/body).
+				base := baseType(recv.Type())
+				assert(base != nil)
+				targs := typeparams.NamedTypeArgs(base)
+				var rparams []*typeparams.TypeParam
+				if targs.Len() > 0 {
+					rparams = make([]*typeparams.TypeParam, targs.Len())
+					for i := range rparams {
+						rparams[i] = targs.At(i).(*typeparams.TypeParam)
+					}
+				}
+				msig := r.signature(recv, rparams, nil)
 
 				named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
 			}
 		}
 
+	case 'P':
+		// We need to "declare" a typeparam in order to have a name that
+		// can be referenced recursively (if needed) in the type param's
+		// bound.
+		if r.p.version < iexportVersionGenerics {
+			errorf("unexpected type param type")
+		}
+		name0 := tparamName(name)
+		tn := types.NewTypeName(pos, r.currPkg, name0, nil)
+		t := typeparams.NewTypeParam(tn, nil)
+
+		// To handle recursive references to the typeparam within its
+		// bound, save the partial type in tparamIndex before reading the bounds.
+		id := ident{r.currPkg.Name(), name}
+		r.p.tparamIndex[id] = t
+		var implicit bool
+		if r.p.version >= iexportVersionGo1_18 {
+			implicit = r.bool()
+		}
+		constraint := r.typ()
+		if implicit {
+			iface, _ := constraint.(*types.Interface)
+			if iface == nil {
+				errorf("non-interface constraint marked implicit")
+			}
+			typeparams.MarkImplicit(iface)
+		}
+		// The constraint type may not be complete, if we
+		// are in the middle of a type recursion involving type
+		// constraints. So, we defer SetConstraint until we have
+		// completely set up all types in ImportData.
+		r.p.later = append(r.p.later, setConstraintArgs{t: t, constraint: constraint})
+
 	case 'V':
 		typ := r.typ()
 
@@ -357,6 +499,10 @@
 
 func (r *importReader) value() (typ types.Type, val constant.Value) {
 	typ = r.typ()
+	if r.p.version >= iexportVersionGo1_18 {
+		// TODO: add support for using the kind.
+		_ = constant.Kind(r.int64())
+	}
 
 	switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
 	case types.IsBoolean:
@@ -499,7 +645,7 @@
 }
 
 func (r *importReader) pos() token.Pos {
-	if r.p.version >= 1 {
+	if r.p.version >= iexportVersionPosCol {
 		r.posv1()
 	} else {
 		r.posv0()
@@ -547,8 +693,17 @@
 func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) }
 func (r *importReader) string() string      { return r.p.stringAt(r.uint64()) }
 
-func (r *importReader) doType(base *types.Named) types.Type {
-	switch k := r.kind(); k {
+func (r *importReader) doType(base *types.Named) (res types.Type) {
+	k := r.kind()
+	if debug {
+		r.p.trace("importing type %d (base: %s)", k, base)
+		r.p.indent++
+		defer func() {
+			r.p.indent--
+			r.p.trace("=> %s", res)
+		}()
+	}
+	switch k {
 	default:
 		errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
 		return nil
@@ -571,7 +726,7 @@
 		return types.NewMap(r.typ(), r.typ())
 	case signatureType:
 		r.currPkg = r.pkg()
-		return r.signature(nil)
+		return r.signature(nil, nil, nil)
 
 	case structType:
 		r.currPkg = r.pkg()
@@ -611,13 +766,56 @@
 				recv = types.NewVar(token.NoPos, r.currPkg, "", base)
 			}
 
-			msig := r.signature(recv)
+			msig := r.signature(recv, nil, nil)
 			methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig)
 		}
 
 		typ := newInterface(methods, embeddeds)
 		r.p.interfaceList = append(r.p.interfaceList, typ)
 		return typ
+
+	case typeParamType:
+		if r.p.version < iexportVersionGenerics {
+			errorf("unexpected type param type")
+		}
+		pkg, name := r.qualifiedIdent()
+		id := ident{pkg.Name(), name}
+		if t, ok := r.p.tparamIndex[id]; ok {
+			// We're already in the process of importing this typeparam.
+			return t
+		}
+		// Otherwise, import the definition of the typeparam now.
+		r.p.doDecl(pkg, name)
+		return r.p.tparamIndex[id]
+
+	case instanceType:
+		if r.p.version < iexportVersionGenerics {
+			errorf("unexpected instantiation type")
+		}
+		// pos does not matter for instances: they are positioned on the original
+		// type.
+		_ = r.pos()
+		len := r.uint64()
+		targs := make([]types.Type, len)
+		for i := range targs {
+			targs[i] = r.typ()
+		}
+		baseType := r.typ()
+		// The imported instantiated type doesn't include any methods, so
+		// we must always use the methods of the base (orig) type.
+		// TODO provide a non-nil *Environment
+		t, _ := typeparams.Instantiate(nil, baseType, targs, false)
+		return t
+
+	case unionType:
+		if r.p.version < iexportVersionGenerics {
+			errorf("unexpected instantiation type")
+		}
+		terms := make([]*typeparams.Term, r.uint64())
+		for i := range terms {
+			terms[i] = typeparams.NewTerm(r.bool(), r.typ())
+		}
+		return typeparams.NewUnion(terms)
 	}
 }
 
@@ -625,11 +823,25 @@
 	return itag(r.uint64())
 }
 
-func (r *importReader) signature(recv *types.Var) *types.Signature {
+func (r *importReader) signature(recv *types.Var, rparams []*typeparams.TypeParam, tparams []*typeparams.TypeParam) *types.Signature {
 	params := r.paramList()
 	results := r.paramList()
 	variadic := params.Len() > 0 && r.bool()
-	return types.NewSignature(recv, params, results, variadic)
+	return typeparams.NewSignatureType(recv, rparams, tparams, params, results, variadic)
+}
+
+func (r *importReader) tparamList() []*typeparams.TypeParam {
+	n := r.uint64()
+	if n == 0 {
+		return nil
+	}
+	xs := make([]*typeparams.TypeParam, n)
+	for i := range xs {
+		// Note: the standard library importer is tolerant of nil types here,
+		// though would panic in SetTypeParams.
+		xs[i] = r.typ().(*typeparams.TypeParam)
+	}
+	return xs
 }
 
 func (r *importReader) paramList() *types.Tuple {
@@ -674,3 +886,13 @@
 	}
 	return x
 }
+
+func baseType(typ types.Type) *types.Named {
+	// pointer receivers are never types.Named types
+	if p, _ := typ.(*types.Pointer); p != nil {
+		typ = p.Elem()
+	}
+	// receiver base types are always (possibly generic) types.Named types
+	n, _ := typ.(*types.Named)
+	return n
+}
diff --git a/go/internal/gcimporter/support_go117.go b/go/internal/gcimporter/support_go117.go
new file mode 100644
index 0000000..d892273
--- /dev/null
+++ b/go/internal/gcimporter/support_go117.go
@@ -0,0 +1,16 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.18
+// +build !go1.18
+
+package gcimporter
+
+import "go/types"
+
+const iexportVersion = iexportVersionGo1_11
+
+func additionalPredeclared() []types.Type {
+	return nil
+}
diff --git a/go/internal/gcimporter/support_go118.go b/go/internal/gcimporter/support_go118.go
new file mode 100644
index 0000000..a993843
--- /dev/null
+++ b/go/internal/gcimporter/support_go118.go
@@ -0,0 +1,23 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package gcimporter
+
+import "go/types"
+
+const iexportVersion = iexportVersionGenerics
+
+// additionalPredeclared returns additional predeclared types in go.1.18.
+func additionalPredeclared() []types.Type {
+	return []types.Type{
+		// comparable
+		types.Universe.Lookup("comparable").Type(),
+
+		// any
+		types.Universe.Lookup("any").Type(),
+	}
+}
diff --git a/go/loader/loader.go b/go/loader/loader.go
index 508a1fd..3ba91f7 100644
--- a/go/loader/loader.go
+++ b/go/loader/loader.go
@@ -23,6 +23,7 @@
 
 	"golang.org/x/tools/go/ast/astutil"
 	"golang.org/x/tools/go/internal/cgo"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 var ignoreVendor build.ImportMode
@@ -1053,6 +1054,7 @@
 		errorFunc: imp.conf.TypeChecker.Error,
 		dir:       dir,
 	}
+	typeparams.InitInstanceInfo(&info.Info)
 
 	// Copy the types.Config so we can vary it across PackageInfos.
 	tc := imp.conf.TypeChecker
diff --git a/go/packages/golist.go b/go/packages/golist.go
index 0e1e7f1..7aa97f7 100644
--- a/go/packages/golist.go
+++ b/go/packages/golist.go
@@ -393,6 +393,8 @@
 	CompiledGoFiles   []string
 	IgnoredGoFiles    []string
 	IgnoredOtherFiles []string
+	EmbedPatterns     []string
+	EmbedFiles        []string
 	CFiles            []string
 	CgoFiles          []string
 	CXXFiles          []string
@@ -565,6 +567,8 @@
 			GoFiles:         absJoin(p.Dir, p.GoFiles, p.CgoFiles),
 			CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
 			OtherFiles:      absJoin(p.Dir, otherFiles(p)...),
+			EmbedFiles:      absJoin(p.Dir, p.EmbedFiles),
+			EmbedPatterns:   absJoin(p.Dir, p.EmbedPatterns),
 			IgnoredFiles:    absJoin(p.Dir, p.IgnoredGoFiles, p.IgnoredOtherFiles),
 			forTest:         p.ForTest,
 			depsErrors:      p.DepsErrors,
@@ -815,7 +819,7 @@
 		fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0),
 		// go list doesn't let you pass -test and -find together,
 		// probably because you'd just get the TestMain.
-		fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0),
+		fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0 && !usesExportData(cfg)),
 	}
 	fullargs = append(fullargs, cfg.BuildFlags...)
 	fullargs = append(fullargs, "--")
diff --git a/go/packages/loadmode_string.go b/go/packages/loadmode_string.go
index 7ea37e7..5c080d2 100644
--- a/go/packages/loadmode_string.go
+++ b/go/packages/loadmode_string.go
@@ -15,7 +15,7 @@
 	NeedCompiledGoFiles,
 	NeedImports,
 	NeedDeps,
-	NeedExportsFile,
+	NeedExportFile,
 	NeedTypes,
 	NeedSyntax,
 	NeedTypesInfo,
@@ -28,7 +28,7 @@
 	"NeedCompiledGoFiles",
 	"NeedImports",
 	"NeedDeps",
-	"NeedExportsFile",
+	"NeedExportFile",
 	"NeedTypes",
 	"NeedSyntax",
 	"NeedTypesInfo",
diff --git a/go/packages/packages.go b/go/packages/packages.go
index 8a1a2d6..2442845 100644
--- a/go/packages/packages.go
+++ b/go/packages/packages.go
@@ -26,6 +26,7 @@
 	"golang.org/x/tools/go/gcexportdata"
 	"golang.org/x/tools/internal/gocommand"
 	"golang.org/x/tools/internal/packagesinternal"
+	"golang.org/x/tools/internal/typeparams"
 	"golang.org/x/tools/internal/typesinternal"
 )
 
@@ -38,9 +39,6 @@
 // Load may return more information than requested.
 type LoadMode int
 
-// TODO(matloob): When a V2 of go/packages is released, rename NeedExportsFile to
-// NeedExportFile to make it consistent with the Package field it's adding.
-
 const (
 	// NeedName adds Name and PkgPath.
 	NeedName LoadMode = 1 << iota
@@ -58,8 +56,8 @@
 	// NeedDeps adds the fields requested by the LoadMode in the packages in Imports.
 	NeedDeps
 
-	// NeedExportsFile adds ExportFile.
-	NeedExportsFile
+	// NeedExportFile adds ExportFile.
+	NeedExportFile
 
 	// NeedTypes adds Types, Fset, and IllTyped.
 	NeedTypes
@@ -79,6 +77,12 @@
 
 	// NeedModule adds Module.
 	NeedModule
+
+	// NeedEmbedFiles adds EmbedFiles.
+	NeedEmbedFiles
+
+	// NeedEmbedPatterns adds EmbedPatterns.
+	NeedEmbedPatterns
 )
 
 const (
@@ -101,6 +105,9 @@
 	// Deprecated: LoadAllSyntax exists for historical compatibility
 	// and should not be used. Please directly specify the needed fields using the Need values.
 	LoadAllSyntax = LoadSyntax | NeedDeps
+
+	// Deprecated: NeedExportsFile is a historical misspelling of NeedExportFile.
+	NeedExportsFile = NeedExportFile
 )
 
 // A Config specifies details about how packages should be loaded.
@@ -295,6 +302,14 @@
 	// including assembly, C, C++, Fortran, Objective-C, SWIG, and so on.
 	OtherFiles []string
 
+	// EmbedFiles lists the absolute file paths of the package's files
+	// embedded with go:embed.
+	EmbedFiles []string
+
+	// EmbedPatterns lists the absolute file patterns of the package's
+	// files embedded with go:embed.
+	EmbedPatterns []string
+
 	// IgnoredFiles lists source files that are not part of the package
 	// using the current build configuration but that might be part of
 	// the package using other build configurations.
@@ -327,6 +342,9 @@
 	// The NeedSyntax LoadMode bit populates this field for packages matching the patterns.
 	// If NeedDeps and NeedImports are also set, this field will also be populated
 	// for dependencies.
+	//
+	// Syntax is kept in the same order as CompiledGoFiles, with the caveat that nils are
+	// removed.  If parsing returned nil, Syntax may be shorter than CompiledGoFiles.
 	Syntax []*ast.File
 
 	// TypesInfo provides type information about the package's syntax trees.
@@ -427,6 +445,8 @@
 	GoFiles         []string          `json:",omitempty"`
 	CompiledGoFiles []string          `json:",omitempty"`
 	OtherFiles      []string          `json:",omitempty"`
+	EmbedFiles      []string          `json:",omitempty"`
+	EmbedPatterns   []string          `json:",omitempty"`
 	IgnoredFiles    []string          `json:",omitempty"`
 	ExportFile      string            `json:",omitempty"`
 	Imports         map[string]string `json:",omitempty"`
@@ -450,6 +470,8 @@
 		GoFiles:         p.GoFiles,
 		CompiledGoFiles: p.CompiledGoFiles,
 		OtherFiles:      p.OtherFiles,
+		EmbedFiles:      p.EmbedFiles,
+		EmbedPatterns:   p.EmbedPatterns,
 		IgnoredFiles:    p.IgnoredFiles,
 		ExportFile:      p.ExportFile,
 	}
@@ -477,6 +499,8 @@
 		GoFiles:         flat.GoFiles,
 		CompiledGoFiles: flat.CompiledGoFiles,
 		OtherFiles:      flat.OtherFiles,
+		EmbedFiles:      flat.EmbedFiles,
+		EmbedPatterns:   flat.EmbedPatterns,
 		ExportFile:      flat.ExportFile,
 	}
 	if len(flat.Imports) > 0 {
@@ -610,7 +634,7 @@
 		needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) ||
 			// ... or if we need types and the exportData is invalid. We fall back to (incompletely)
 			// typechecking packages from source if they fail to compile.
-			(ld.Mode&NeedTypes|NeedTypesInfo != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
+			(ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
 		lpkg := &loaderPackage{
 			Package:   pkg,
 			needtypes: needtypes,
@@ -748,13 +772,19 @@
 			ld.pkgs[i].OtherFiles = nil
 			ld.pkgs[i].IgnoredFiles = nil
 		}
+		if ld.requestedMode&NeedEmbedFiles == 0 {
+			ld.pkgs[i].EmbedFiles = nil
+		}
+		if ld.requestedMode&NeedEmbedPatterns == 0 {
+			ld.pkgs[i].EmbedPatterns = nil
+		}
 		if ld.requestedMode&NeedCompiledGoFiles == 0 {
 			ld.pkgs[i].CompiledGoFiles = nil
 		}
 		if ld.requestedMode&NeedImports == 0 {
 			ld.pkgs[i].Imports = nil
 		}
-		if ld.requestedMode&NeedExportsFile == 0 {
+		if ld.requestedMode&NeedExportFile == 0 {
 			ld.pkgs[i].ExportFile = ""
 		}
 		if ld.requestedMode&NeedTypes == 0 {
@@ -910,6 +940,7 @@
 		Scopes:     make(map[ast.Node]*types.Scope),
 		Selections: make(map[*ast.SelectorExpr]*types.Selection),
 	}
+	typeparams.InitInstanceInfo(lpkg.TypesInfo)
 	lpkg.TypesSizes = ld.sizes
 
 	importer := importerFunc(func(path string) (*types.Package, error) {
@@ -1206,7 +1237,7 @@
 		return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
 	}
 	if viewLen != len(view) {
-		log.Fatalf("Unexpected package creation during export data loading")
+		log.Panicf("golang.org/x/tools/go/packages: unexpected new packages during load of %s", lpkg.PkgPath)
 	}
 
 	lpkg.Types = tpkg
@@ -1217,17 +1248,8 @@
 
 // impliedLoadMode returns loadMode with its dependencies.
 func impliedLoadMode(loadMode LoadMode) LoadMode {
-	if loadMode&NeedTypesInfo != 0 && loadMode&NeedImports == 0 {
-		// If NeedTypesInfo, go/packages needs to do typechecking itself so it can
-		// associate type info with the AST. To do so, we need the export data
-		// for dependencies, which means we need to ask for the direct dependencies.
-		// NeedImports is used to ask for the direct dependencies.
-		loadMode |= NeedImports
-	}
-
-	if loadMode&NeedDeps != 0 && loadMode&NeedImports == 0 {
-		// With NeedDeps we need to load at least direct dependencies.
-		// NeedImports is used to ask for the direct dependencies.
+	if loadMode&(NeedDeps|NeedTypes|NeedTypesInfo) != 0 {
+		// All these things require knowing the import graph.
 		loadMode |= NeedImports
 	}
 
@@ -1235,5 +1257,5 @@
 }
 
 func usesExportData(cfg *Config) bool {
-	return cfg.Mode&NeedExportsFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
+	return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
 }
diff --git a/go/packages/packages_test.go b/go/packages/packages_test.go
index 6549fd6..796edb6 100644
--- a/go/packages/packages_test.go
+++ b/go/packages/packages_test.go
@@ -1077,6 +1077,10 @@
 			"e/e2.go":         `package main; import _ "golang.org/fake/c"`,
 			"f/f.go":          `package f`,
 			"f/f.s":           ``,
+			"g/g.go":          `package g; import _ "embed";` + "\n//go:embed g2.txt\n" + `var s string`,
+			"g/g2.txt":        "hello",
+			"h/h.go":          `package g; import _ "embed";` + "\n//go:embed a*.txt\n" + `var s string`,
+			"h/aa.txt":        "hello",
 		}}})
 	defer exported.Cleanup()
 	exported.Config.Dir = filepath.Dir(filepath.Dir(exported.File("golang.org/fake", "a/a.go")))
@@ -1103,6 +1107,8 @@
 		{"golang.org/fake/subdir/e", "d.go"},
 		{"golang.org/fake/e", "e.go e2.go"},
 		{"golang.org/fake/f", "f.go f.s"},
+		{"golang.org/fake/g", "g.go g2.txt"},
+		{"golang.org/fake/h", "h.go aa.txt"},
 		// Relative paths
 		{"./a", "a.go"},
 		{"./b/vendor/a", "a.go"},
@@ -1112,8 +1118,10 @@
 		{"./subdir/e", "d.go"},
 		{"./e", "e.go e2.go"},
 		{"./f", "f.go f.s"},
+		{"./g", "g.go g2.txt"},
+		{"./h", "h.go aa.txt"},
 	} {
-		exported.Config.Mode = packages.LoadFiles
+		exported.Config.Mode = packages.LoadFiles | packages.NeedEmbedFiles
 		pkgs, err := packages.Load(exported.Config, test.pattern)
 		if err != nil {
 			t.Errorf("pattern %s: %v", test.pattern, err)
@@ -1133,6 +1141,9 @@
 			for _, filename := range pkg.OtherFiles {
 				checkFile(filename)
 			}
+			for _, filename := range pkg.EmbedFiles {
+				checkFile(filename)
+			}
 			for _, filename := range pkg.IgnoredFiles {
 				checkFile(filename)
 			}
@@ -2233,8 +2244,8 @@
 			"LoadMode(NeedDeps)",
 		},
 		{
-			packages.NeedExportsFile,
-			"LoadMode(NeedExportsFile)",
+			packages.NeedExportFile,
+			"LoadMode(NeedExportFile)",
 		},
 		{
 			packages.NeedTypes,
@@ -2253,12 +2264,12 @@
 			"LoadMode(NeedTypesSizes)",
 		},
 		{
-			packages.NeedName | packages.NeedExportsFile,
-			"LoadMode(NeedName|NeedExportsFile)",
+			packages.NeedName | packages.NeedExportFile,
+			"LoadMode(NeedName|NeedExportFile)",
 		},
 		{
-			packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedImports | packages.NeedDeps | packages.NeedExportsFile | packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo | packages.NeedTypesSizes,
-			"LoadMode(NeedName|NeedFiles|NeedCompiledGoFiles|NeedImports|NeedDeps|NeedExportsFile|NeedTypes|NeedSyntax|NeedTypesInfo|NeedTypesSizes)",
+			packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedImports | packages.NeedDeps | packages.NeedExportFile | packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo | packages.NeedTypesSizes,
+			"LoadMode(NeedName|NeedFiles|NeedCompiledGoFiles|NeedImports|NeedDeps|NeedExportFile|NeedTypes|NeedSyntax|NeedTypesInfo|NeedTypesSizes)",
 		},
 		{
 			packages.NeedName | 8192,
@@ -2707,7 +2718,7 @@
 }
 
 func srcs(p *packages.Package) []string {
-	return cleanPaths(append(p.GoFiles[:len(p.GoFiles):len(p.GoFiles)], p.OtherFiles...))
+	return cleanPaths(append(append(p.GoFiles[:len(p.GoFiles):len(p.GoFiles)], p.OtherFiles...), p.EmbedFiles...))
 }
 
 // cleanPaths attempts to reduce path names to stable forms
@@ -2832,3 +2843,11 @@
 		return nil
 	})
 }
+
+func TestExportFile(t *testing.T) {
+	// This used to trigger the log.Fatal in loadFromExportData.
+	// See go.dev/issue/45584.
+	cfg := new(packages.Config)
+	cfg.Mode = packages.NeedTypes
+	packages.Load(cfg, "fmt")
+}
diff --git a/go/packages/packagestest/export.go b/go/packages/packagestest/export.go
index 5dea613..d792c3c 100644
--- a/go/packages/packagestest/export.go
+++ b/go/packages/packagestest/export.go
@@ -582,7 +582,7 @@
 
 // MustCopyFileTree returns a file set for a module based on a real directory tree.
 // It scans the directory tree anchored at root and adds a Copy writer to the
-// map for every file found.
+// map for every file found. It skips copying files in nested modules.
 // This is to enable the common case in tests where you have a full copy of the
 // package in your testdata.
 // This will panic if there is any kind of error trying to walk the file tree.
@@ -593,6 +593,12 @@
 			return err
 		}
 		if info.IsDir() {
+			// skip nested modules.
+			if path != root {
+				if fi, err := os.Stat(filepath.Join(path, "go.mod")); err == nil && !fi.IsDir() {
+					return filepath.SkipDir
+				}
+			}
 			return nil
 		}
 		fragment, err := filepath.Rel(root, path)
diff --git a/go/packages/packagestest/export_test.go b/go/packages/packagestest/export_test.go
index 356dd4b..1172f7c 100644
--- a/go/packages/packagestest/export_test.go
+++ b/go/packages/packagestest/export_test.go
@@ -5,8 +5,11 @@
 package packagestest_test
 
 import (
+	"io/ioutil"
 	"os"
 	"path/filepath"
+	"reflect"
+	"sort"
 	"testing"
 
 	"golang.org/x/tools/go/packages/packagestest"
@@ -180,3 +183,53 @@
 		})
 	}
 }
+
+func TestMustCopyFiles(t *testing.T) {
+	// Create the following test directory structure in a temporary directory.
+	src := map[string]string{
+		// copies all files under the specified directory.
+		"go.mod": "module example.com",
+		"m.go":   "package m",
+		"a/a.go": "package a",
+		// contents from a nested module shouldn't be copied.
+		"nested/go.mod": "module example.com/nested",
+		"nested/m.go":   "package nested",
+		"nested/b/b.go": "package b",
+	}
+
+	tmpDir, err := ioutil.TempDir("", t.Name())
+	if err != nil {
+		t.Fatalf("failed to create a temporary directory: %v", err)
+	}
+	defer os.RemoveAll(tmpDir)
+
+	for fragment, contents := range src {
+		fullpath := filepath.Join(tmpDir, filepath.FromSlash(fragment))
+		if err := os.MkdirAll(filepath.Dir(fullpath), 0755); err != nil {
+			t.Fatal(err)
+		}
+		if err := ioutil.WriteFile(fullpath, []byte(contents), 0644); err != nil {
+			t.Fatal(err)
+		}
+	}
+
+	copied := packagestest.MustCopyFileTree(tmpDir)
+	var got []string
+	for fragment := range copied {
+		got = append(got, filepath.ToSlash(fragment))
+	}
+	want := []string{"go.mod", "m.go", "a/a.go"}
+
+	sort.Strings(got)
+	sort.Strings(want)
+	if !reflect.DeepEqual(got, want) {
+		t.Errorf("packagestest.MustCopyFileTree = %v, want %v", got, want)
+	}
+
+	// packagestest.Export is happy.
+	exported := packagestest.Export(t, packagestest.Modules, []packagestest.Module{{
+		Name:  "example.com",
+		Files: packagestest.MustCopyFileTree(tmpDir),
+	}})
+	defer exported.Cleanup()
+}
diff --git a/go/pointer/pointer_go117_test.go b/go/pointer/pointer_go117_test.go
index 7546a06..50eb213 100644
--- a/go/pointer/pointer_go117_test.go
+++ b/go/pointer/pointer_go117_test.go
@@ -13,6 +13,7 @@
 	"fmt"
 	"io/ioutil"
 	"os"
+	"path/filepath"
 	"testing"
 )
 
@@ -34,8 +35,12 @@
 	if err != nil {
 		t.Fatalf("couldn't read file '%s': %s", filename, err)
 	}
+	fpath, err := filepath.Abs(filename)
+	if err != nil {
+		t.Errorf("couldn't get absolute path for '%s': %s", filename, err)
+	}
 
-	if !doOneInput(string(content), filename) {
+	if !doOneInput(t, string(content), fpath) {
 		t.Fail()
 	}
 }
diff --git a/go/pointer/pointer_test.go b/go/pointer/pointer_test.go
index 2f6e069..1ac5b6c 100644
--- a/go/pointer/pointer_test.go
+++ b/go/pointer/pointer_test.go
@@ -21,13 +21,15 @@
 	"go/types"
 	"io/ioutil"
 	"os"
+	"path/filepath"
 	"regexp"
 	"strconv"
 	"strings"
 	"testing"
+	"unsafe"
 
 	"golang.org/x/tools/go/callgraph"
-	"golang.org/x/tools/go/loader"
+	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/go/pointer"
 	"golang.org/x/tools/go/ssa"
 	"golang.org/x/tools/go/ssa/ssautil"
@@ -123,7 +125,7 @@
 //
 type expectation struct {
 	kind     string // "pointsto" | "pointstoquery" | "types" | "calls" | "warning"
-	filename string
+	filepath string
 	linenum  int // source line number, 1-based
 	args     []string
 	query    string           // extended query
@@ -136,7 +138,7 @@
 }
 
 func (e *expectation) errorf(format string, args ...interface{}) {
-	fmt.Printf("%s:%d: ", e.filename, e.linenum)
+	fmt.Printf("%s:%d: ", e.filepath, e.linenum)
 	fmt.Printf(format, args...)
 	fmt.Println()
 }
@@ -149,44 +151,50 @@
 func findProbe(prog *ssa.Program, probes map[*ssa.CallCommon]bool, queries map[ssa.Value]pointer.Pointer, e *expectation) (site *ssa.CallCommon, pts pointer.PointsToSet) {
 	for call := range probes {
 		pos := prog.Fset.Position(call.Pos())
-		if pos.Line == e.linenum && pos.Filename == e.filename {
+		if pos.Line == e.linenum && pos.Filename == e.filepath {
 			// TODO(adonovan): send this to test log (display only on failure).
 			// fmt.Printf("%s:%d: info: found probe for %s: %s\n",
-			// 	e.filename, e.linenum, e, p.arg0) // debugging
+			// 	e.filepath, e.linenum, e, p.arg0) // debugging
 			return call, queries[call.Args[0]].PointsTo()
 		}
 	}
 	return // e.g. analysis didn't reach this call
 }
 
-func doOneInput(input, filename string) bool {
-	var conf loader.Config
-
-	// Parsing.
-	f, err := conf.ParseFile(filename, input)
+func doOneInput(t *testing.T, input, fpath string) bool {
+	cfg := &packages.Config{
+		Mode:  packages.LoadAllSyntax,
+		Tests: true,
+	}
+	pkgs, err := packages.Load(cfg, fpath)
 	if err != nil {
 		fmt.Println(err)
 		return false
 	}
-
-	// Create single-file main package and import its dependencies.
-	conf.CreateFromFiles("main", f)
-	iprog, err := conf.Load()
-	if err != nil {
-		fmt.Println(err)
+	if packages.PrintErrors(pkgs) > 0 {
+		fmt.Println("loaded packages have errors")
 		return false
 	}
-	mainPkgInfo := iprog.Created[0].Pkg
 
 	// SSA creation + building.
-	prog := ssautil.CreateProgram(iprog, ssa.SanityCheckFunctions)
+	prog, ssaPkgs := ssautil.AllPackages(pkgs, ssa.SanityCheckFunctions)
 	prog.Build()
 
-	mainpkg := prog.Package(mainPkgInfo)
+	// main underlying packages.Package.
+	mainPpkg := pkgs[0]
+	mainpkg := ssaPkgs[0]
 	ptrmain := mainpkg // main package for the pointer analysis
 	if mainpkg.Func("main") == nil {
-		// No main function; assume it's a test.
-		ptrmain = prog.CreateTestMainPackage(mainpkg)
+		// For test programs without main, such as testdata/a_test.go,
+		// the package with the original code is "main [main.test]" and
+		// the package with the main is "main.test".
+		for i, pkg := range pkgs {
+			if pkg.ID == mainPpkg.ID+".test" {
+				ptrmain = ssaPkgs[i]
+			} else if pkg.ID == fmt.Sprintf("%s [%s.test]", mainPpkg.ID, mainPpkg.ID) {
+				mainpkg = ssaPkgs[i]
+			}
+		}
 	}
 
 	// Find all calls to the built-in print(x).  Analytically,
@@ -221,14 +229,14 @@
 		if matches := re.FindAllStringSubmatch(line, -1); matches != nil {
 			match := matches[0]
 			kind, rest := match[1], match[2]
-			e := &expectation{kind: kind, filename: filename, linenum: linenum}
+			e := &expectation{kind: kind, filepath: fpath, linenum: linenum}
 
 			if kind == "line" {
 				if rest == "" {
 					ok = false
 					e.errorf("@%s expectation requires identifier", kind)
 				} else {
-					lineMapping[fmt.Sprintf("%s:%d", filename, linenum)] = rest
+					lineMapping[fmt.Sprintf("%s:%d", fpath, linenum)] = rest
 				}
 				continue
 			}
@@ -251,7 +259,7 @@
 				for _, typstr := range split(rest, "|") {
 					var t types.Type = types.Typ[types.Invalid] // means "..."
 					if typstr != "..." {
-						tv, err := types.Eval(prog.Fset, mainpkg.Pkg, f.Pos(), typstr)
+						tv, err := types.Eval(prog.Fset, mainpkg.Pkg, mainPpkg.Syntax[0].Pos(), typstr)
 						if err != nil {
 							ok = false
 							// Don't print err since its location is bad.
@@ -294,7 +302,7 @@
 	}
 
 	var log bytes.Buffer
-	fmt.Fprintf(&log, "Input: %s\n", filename)
+	fmt.Fprintf(&log, "Input: %s\n", fpath)
 
 	// Run the analysis.
 	config := &pointer.Config{
@@ -308,7 +316,7 @@
 		v := probe.Args[0]
 		pos := prog.Fset.Position(probe.Pos())
 		for _, e := range exps {
-			if e.linenum == pos.Line && e.filename == pos.Filename && e.kind == "pointstoquery" {
+			if e.linenum == pos.Line && e.filepath == pos.Filename && e.kind == "pointstoquery" {
 				var err error
 				e.extended, err = config.AddExtendedQuery(v, e.query)
 				if err != nil {
@@ -546,6 +554,9 @@
 	if testing.Short() {
 		t.Skip("skipping in short mode; this test requires tons of memory; https://golang.org/issue/14113")
 	}
+	if unsafe.Sizeof(unsafe.Pointer(nil)) <= 4 {
+		t.Skip("skipping memory-intensive test on platform with small address space; https://golang.org/issue/14113")
+	}
 	ok := true
 
 	wd, err := os.Getwd()
@@ -567,7 +578,12 @@
 			continue
 		}
 
-		if !doOneInput(string(content), filename) {
+		fpath, err := filepath.Abs(filename)
+		if err != nil {
+			t.Errorf("couldn't get absolute path for '%s': %s", filename, err)
+		}
+
+		if !doOneInput(t, string(content), fpath) {
 			ok = false
 		}
 	}
diff --git a/go/pointer/stdlib_test.go b/go/pointer/stdlib_test.go
index 2d5097f..3ba42a1 100644
--- a/go/pointer/stdlib_test.go
+++ b/go/pointer/stdlib_test.go
@@ -19,13 +19,11 @@
 
 import (
 	"flag"
-	"go/build"
 	"go/token"
 	"testing"
 	"time"
 
-	"golang.org/x/tools/go/buildutil"
-	"golang.org/x/tools/go/loader"
+	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/go/ssa"
 	"golang.org/x/tools/go/ssa/ssautil"
 )
@@ -37,22 +35,18 @@
 		t.Skip("skipping (slow) stdlib test (use --stdlib)")
 	}
 
-	// Load, parse and type-check the program.
-	ctxt := build.Default // copy
-	ctxt.GOPATH = ""      // disable GOPATH
-	conf := loader.Config{Build: &ctxt}
-	if _, err := conf.FromArgs(buildutil.AllPackages(conf.Build), true); err != nil {
-		t.Errorf("FromArgs failed: %v", err)
-		return
+	cfg := &packages.Config{
+		Mode: packages.LoadAllSyntax,
+		// Create test main packages with a main function.
+		Tests: true,
 	}
-
-	iprog, err := conf.Load()
-	if err != nil {
+	pkgs, err := packages.Load(cfg, "std")
+	if err != nil || packages.PrintErrors(pkgs) > 0 {
 		t.Fatalf("Load failed: %v", err)
 	}
 
 	// Create SSA packages.
-	prog := ssautil.CreateProgram(iprog, 0)
+	prog, _ := ssautil.AllPackages(pkgs, 0)
 	prog.Build()
 
 	numPkgs := len(prog.AllPackages())
@@ -62,10 +56,9 @@
 
 	// Determine the set of packages/tests to analyze.
 	var mains []*ssa.Package
-	for _, info := range iprog.InitialPackages() {
-		ssapkg := prog.Package(info.Pkg)
-		if main := prog.CreateTestMainPackage(ssapkg); main != nil {
-			mains = append(mains, main)
+	for _, ssapkg := range prog.AllPackages() {
+		if ssapkg.Pkg.Name() == "main" && ssapkg.Func("main") != nil {
+			mains = append(mains, ssapkg)
 		}
 	}
 	if mains == nil {
diff --git a/go/pointer/testdata/a_test.go b/go/pointer/testdata/a_test.go
index 3baa9ac..c6058a0 100644
--- a/go/pointer/testdata/a_test.go
+++ b/go/pointer/testdata/a_test.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package a
@@ -11,7 +12,7 @@
 
 func log(f func(*testing.T)) {
 	// The PTS of f is the set of called tests.  TestingQuux is not present.
-	print(f) // @pointsto main.Test | main.TestFoo
+	print(f) // @pointsto command-line-arguments.Test | command-line-arguments.TestFoo
 }
 
 func Test(t *testing.T) {
@@ -33,10 +34,11 @@
 }
 
 func ExampleBar() {
+	// Output:
 }
 
 // Excludes TestingQuux.
-// @calls testing.tRunner -> main.Test
-// @calls testing.tRunner -> main.TestFoo
-// @calls testing.runExample -> main.ExampleBar
-// @calls (*testing.B).runN -> main.BenchmarkFoo
+// @calls testing.tRunner -> command-line-arguments.Test
+// @calls testing.tRunner -> command-line-arguments.TestFoo
+// @calls (*testing.B).runN -> command-line-arguments.BenchmarkFoo
+// @calls testing.runExample -> command-line-arguments.ExampleBar
diff --git a/go/pointer/testdata/another.go b/go/pointer/testdata/another.go
index 12ed690..75b92c5 100644
--- a/go/pointer/testdata/another.go
+++ b/go/pointer/testdata/another.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -29,8 +30,8 @@
 
 	// NB, an interface may never directly alias any global
 	// labels, even though it may contain pointers that do.
-	print(i)                 // @pointsto makeinterface:func(x int) int | makeinterface:func(x int, y int) | makeinterface:func(int, int) | makeinterface:int | makeinterface:main.S
-	print(i.(func(int) int)) // @pointsto main.incr
+	print(i)                 // @pointsto makeinterface:func(x int) int | makeinterface:func(x int, y int) | makeinterface:func(int, int) | makeinterface:int | makeinterface:command-line-arguments.S
+	print(i.(func(int) int)) // @pointsto command-line-arguments.incr
 
 	print() // regression test for crash
 }
diff --git a/go/pointer/testdata/arrayreflect.go b/go/pointer/testdata/arrayreflect.go
index 2b23674..18c8707 100644
--- a/go/pointer/testdata/arrayreflect.go
+++ b/go/pointer/testdata/arrayreflect.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -17,7 +18,7 @@
 	rvsl := reflect.ValueOf(slice).Slice(0, 0)
 	print(rvsl.Interface())              // @types []*int
 	print(rvsl.Interface().([]*int))     // @pointsto makeslice@slice:15
-	print(rvsl.Interface().([]*int)[42]) // @pointsto main.a
+	print(rvsl.Interface().([]*int)[42]) // @pointsto command-line-arguments.a
 
 	// reflect.Value contains an array (non-addressable).
 	array := [10]*int{&a} // @line array
@@ -30,7 +31,7 @@
 	rvparray := reflect.ValueOf(&array).Slice(0, 0)
 	print(rvparray.Interface())              // @types []*int
 	print(rvparray.Interface().([]*int))     // @pointsto array@array:2
-	print(rvparray.Interface().([]*int)[42]) // @pointsto main.a
+	print(rvparray.Interface().([]*int)[42]) // @pointsto command-line-arguments.a
 
 	// reflect.Value contains a string.
 	rvstring := reflect.ValueOf("hi").Slice(0, 0)
@@ -75,12 +76,12 @@
 	slice := []*int{&a} // @line ar6slice
 	rv1 := reflect.ValueOf(slice)
 	print(rv1.Index(42).Interface())        // @types *int
-	print(rv1.Index(42).Interface().(*int)) // @pointsto main.a
+	print(rv1.Index(42).Interface().(*int)) // @pointsto command-line-arguments.a
 
 	array := [10]*int{&a}
 	rv2 := reflect.ValueOf(array)
 	print(rv2.Index(42).Interface())        // @types *int
-	print(rv2.Index(42).Interface().(*int)) // @pointsto main.a
+	print(rv2.Index(42).Interface().(*int)) // @pointsto command-line-arguments.a
 
 	rv3 := reflect.ValueOf("string")
 	print(rv3.Index(42).Interface()) // @types rune
@@ -97,9 +98,9 @@
 	var iface interface{} = &a
 	rv1 := reflect.ValueOf(&iface).Elem()
 	print(rv1.Interface())               // @types *int
-	print(rv1.Interface().(*int))        // @pointsto main.a
+	print(rv1.Interface().(*int))        // @pointsto command-line-arguments.a
 	print(rv1.Elem().Interface())        // @types *int
-	print(rv1.Elem().Interface().(*int)) // @pointsto main.a
+	print(rv1.Elem().Interface().(*int)) // @pointsto command-line-arguments.a
 
 	print(reflect.ValueOf(new(interface{})).Elem().Elem()) // @types
 
@@ -107,7 +108,7 @@
 	ptr := &a
 	rv2 := reflect.ValueOf(&ptr)
 	print(rv2.Elem().Interface())        // @types *int
-	print(rv2.Elem().Interface().(*int)) // @pointsto main.a
+	print(rv2.Elem().Interface().(*int)) // @pointsto command-line-arguments.a
 
 	// No other type works with (rV).Elem, not even those that
 	// work with (rT).Elem: slice, array, map, chan.
diff --git a/go/pointer/testdata/arrays.go b/go/pointer/testdata/arrays.go
index e57a15b..96498f5 100644
--- a/go/pointer/testdata/arrays.go
+++ b/go/pointer/testdata/arrays.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -14,10 +15,10 @@
 	sliceB = append(sliceB, &b) // @line a1append
 
 	print(sliceA)    // @pointsto makeslice@a1make:16
-	print(sliceA[0]) // @pointsto main.a
+	print(sliceA[0]) // @pointsto command-line-arguments.a
 
 	print(sliceB)      // @pointsto append@a1append:17
-	print(sliceB[100]) // @pointsto main.b
+	print(sliceB[100]) // @pointsto command-line-arguments.b
 }
 
 func array2() {
@@ -27,10 +28,10 @@
 	sliceB := sliceA[:]
 
 	print(sliceA)    // @pointsto makeslice@a2make:16
-	print(sliceA[0]) // @pointsto main.a
+	print(sliceA[0]) // @pointsto command-line-arguments.a
 
 	print(sliceB)    // @pointsto makeslice@a2make:16
-	print(sliceB[0]) // @pointsto main.a
+	print(sliceB[0]) // @pointsto command-line-arguments.a
 }
 
 func array3() {
@@ -65,18 +66,18 @@
 	sl4a := append(sl4)  // @line a4L4
 	print(sl4a)          // @pointsto slicelit@a4L3:18 | append@a4L4:16
 	print(&sl4a[0])      // @pointsto slicelit[*]@a4L3:18 | append[*]@a4L4:16
-	print(sl4a[0])       // @pointsto main.a
+	print(sl4a[0])       // @pointsto command-line-arguments.a
 
 	var sl5 = []*int{&b} // @line a4L5
 	copy(sl5, sl4)
 	print(sl5)     // @pointsto slicelit@a4L5:18
 	print(&sl5[0]) // @pointsto slicelit[*]@a4L5:18
-	print(sl5[0])  // @pointsto main.b | main.a
+	print(sl5[0])  // @pointsto command-line-arguments.b | command-line-arguments.a
 
 	var sl6 = sl5[:0]
 	print(sl6)     // @pointsto slicelit@a4L5:18
 	print(&sl6[0]) // @pointsto slicelit[*]@a4L5:18
-	print(sl6[0])  // @pointsto main.b | main.a
+	print(sl6[0])  // @pointsto command-line-arguments.b | command-line-arguments.a
 }
 
 func array5() {
@@ -85,7 +86,7 @@
 	arr[1] = &b
 
 	var n int
-	print(arr[n]) // @pointsto main.a | main.b
+	print(arr[n]) // @pointsto command-line-arguments.a | command-line-arguments.b
 }
 
 func main() {
diff --git a/go/pointer/testdata/arrays_go117.go b/go/pointer/testdata/arrays_go117.go
index 7a66f67..7ad9f5f 100644
--- a/go/pointer/testdata/arrays_go117.go
+++ b/go/pointer/testdata/arrays_go117.go
@@ -18,10 +18,10 @@
 	sliceB = append(sliceB, &b) // @line a1append
 
 	print(sliceA)    // @pointsto makeslice@a1make:16
-	print(sliceA[0]) // @pointsto main.a
+	print(sliceA[0]) // @pointsto command-line-arguments.a
 
 	print(sliceB)      // @pointsto append@a1append:17
-	print(sliceB[100]) // @pointsto main.b
+	print(sliceB[100]) // @pointsto command-line-arguments.b
 }
 
 func array2() {
@@ -31,10 +31,10 @@
 	sliceB := sliceA[:]
 
 	print(sliceA)    // @pointsto makeslice@a2make:16
-	print(sliceA[0]) // @pointsto main.a
+	print(sliceA[0]) // @pointsto command-line-arguments.a
 
 	print(sliceB)    // @pointsto makeslice@a2make:16
-	print(sliceB[0]) // @pointsto main.a
+	print(sliceB[0]) // @pointsto command-line-arguments.a
 }
 
 func array3() {
@@ -69,18 +69,18 @@
 	sl4a := append(sl4)  // @line a4L4
 	print(sl4a)          // @pointsto slicelit@a4L3:18 | append@a4L4:16
 	print(&sl4a[0])      // @pointsto slicelit[*]@a4L3:18 | append[*]@a4L4:16
-	print(sl4a[0])       // @pointsto main.a
+	print(sl4a[0])       // @pointsto command-line-arguments.a
 
 	var sl5 = []*int{&b} // @line a4L5
 	copy(sl5, sl4)
 	print(sl5)     // @pointsto slicelit@a4L5:18
 	print(&sl5[0]) // @pointsto slicelit[*]@a4L5:18
-	print(sl5[0])  // @pointsto main.b | main.a
+	print(sl5[0])  // @pointsto command-line-arguments.b | command-line-arguments.a
 
 	var sl6 = sl5[:0]
 	print(sl6)     // @pointsto slicelit@a4L5:18
 	print(&sl6[0]) // @pointsto slicelit[*]@a4L5:18
-	print(sl6[0])  // @pointsto main.b | main.a
+	print(sl6[0])  // @pointsto command-line-arguments.b | command-line-arguments.a
 }
 
 func array5() {
@@ -89,7 +89,7 @@
 	arr[1] = &b
 
 	var n int
-	print(arr[n]) // @pointsto main.a | main.b
+	print(arr[n]) // @pointsto command-line-arguments.a | command-line-arguments.b
 }
 
 func array6() {
@@ -99,35 +99,35 @@
 	ap0 := (*[1]*int)(sl0)
 	ar0 := *ap0
 
-	print(ap0[n]) // @pointsto main.a
-	print(sl0[n]) // @pointsto main.a
-	print(ar0[n]) // @pointsto main.a
+	print(ap0[n]) // @pointsto command-line-arguments.a
+	print(sl0[n]) // @pointsto command-line-arguments.a
+	print(ar0[n]) // @pointsto command-line-arguments.a
 
 	sl1 := []*int{&a}
 	ap1 := (*[1]*int)(sl1)
 	ar1 := *ap1
 
 	ar1[0] = &b
-	print(ap1[n]) // @pointsto main.a
-	print(sl1[n]) // @pointsto main.a
-	print(ar1[n]) // @pointsto main.a | main.b
+	print(ap1[n]) // @pointsto command-line-arguments.a
+	print(sl1[n]) // @pointsto command-line-arguments.a
+	print(ar1[n]) // @pointsto command-line-arguments.a | command-line-arguments.b
 
 	sl2 := []*int{&a}
 	ap2 := (*[1]*int)(sl2)
 	ar2 := *ap2
 
 	ap2[0] = &b
-	print(ap2[n]) // @pointsto main.a | main.b
-	print(sl2[n]) // @pointsto main.a | main.b
-	print(ar2[n]) // @pointsto main.a | main.b
+	print(ap2[n]) // @pointsto command-line-arguments.a | command-line-arguments.b
+	print(sl2[n]) // @pointsto command-line-arguments.a | command-line-arguments.b
+	print(ar2[n]) // @pointsto command-line-arguments.a | command-line-arguments.b
 
 	sl3 := []*int{&b, nil}
 	ap3 := (*[1]*int)(sl3)
 	ar3 := *ap3
 
-	print(sl3[n]) // @pointsto main.b
-	print(ap3[n]) // @pointsto main.b
-	print(ar3[n]) // @pointsto main.b
+	print(sl3[n]) // @pointsto command-line-arguments.b
+	print(ap3[n]) // @pointsto command-line-arguments.b
+	print(ar3[n]) // @pointsto command-line-arguments.b
 }
 
 func array7() {
@@ -139,9 +139,9 @@
 
 	ap1[0] = &a
 
-	print(sl0[n]) // @pointsto main.a
-	print(ap0[n]) // @pointsto main.a
-	print(ap1[n]) // @pointsto main.a
+	print(sl0[n]) // @pointsto command-line-arguments.a
+	print(ap0[n]) // @pointsto command-line-arguments.a
+	print(ap1[n]) // @pointsto command-line-arguments.a
 }
 
 func array8() {
@@ -153,12 +153,12 @@
 	pa2 := (*[1]*int)(sl2)
 	sl1[0] = &a
 	sl2[0] = &b
-	print(pa1[n]) // @pointsto main.a
-	print(pa2[n]) // @pointsto main.b
+	print(pa1[n]) // @pointsto command-line-arguments.a
+	print(pa2[n]) // @pointsto command-line-arguments.b
 
 	pa2 = pa1
-	print(pa1[n]) // @pointsto main.a
-	print(pa2[n]) // @pointsto main.a
+	print(pa1[n]) // @pointsto command-line-arguments.a
+	print(pa2[n]) // @pointsto command-line-arguments.a
 }
 
 func main() {
diff --git a/go/pointer/testdata/channels.go b/go/pointer/testdata/channels.go
index 377b68a..c4f5150 100644
--- a/go/pointer/testdata/channels.go
+++ b/go/pointer/testdata/channels.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -16,10 +17,10 @@
 	chB <- func(int) int { return 1 }
 
 	print(chA)   // @pointsto makechan@c1makeA:13
-	print(<-chA) // @pointsto main.incr
+	print(<-chA) // @pointsto command-line-arguments.incr
 
 	print(chB)   // @pointsto makechan@c1makeB:13
-	print(<-chB) // @pointsto main.decr | main.chan1$1
+	print(<-chB) // @pointsto command-line-arguments.decr | command-line-arguments.chan1$1
 }
 
 func chan2() {
@@ -37,18 +38,18 @@
 	}
 
 	print(chA)   // @pointsto makechan@c2makeA:13
-	print(<-chA) // @pointsto main.incr
+	print(<-chA) // @pointsto command-line-arguments.incr
 
 	print(chB)   // @pointsto makechan@c2makeB:13
-	print(<-chB) // @pointsto main.decr | main.chan2$1
+	print(<-chB) // @pointsto command-line-arguments.decr | command-line-arguments.chan2$1
 
 	print(chAB)   // @pointsto makechan@c2makeA:13 | makechan@c2makeB:13
-	print(<-chAB) // @pointsto main.incr | main.decr | main.chan2$1
+	print(<-chAB) // @pointsto command-line-arguments.incr | command-line-arguments.decr | command-line-arguments.chan2$1
 
 	(<-chA)(3)
 }
 
-// @calls main.chan2 -> main.incr
+// @calls command-line-arguments.chan2 -> command-line-arguments.incr
 
 func chan3() {
 	chA := make(chan func(int) int, 0) // @line c3makeA
@@ -57,14 +58,14 @@
 	chB <- decr
 	chB <- func(int) int { return 1 }
 	print(chA)   // @pointsto makechan@c3makeA:13
-	print(<-chA) // @pointsto main.incr
+	print(<-chA) // @pointsto command-line-arguments.incr
 	print(chB)   // @pointsto makechan@c3makeB:13
-	print(<-chB) // @pointsto main.decr | main.chan3$1
+	print(<-chB) // @pointsto command-line-arguments.decr | command-line-arguments.chan3$1
 
 	(<-chA)(3)
 }
 
-// @calls main.chan3 -> main.incr
+// @calls command-line-arguments.chan3 -> command-line-arguments.incr
 
 func chan4() {
 	chA := make(chan func(int) int, 0) // @line c4makeA
@@ -74,16 +75,16 @@
 	case chA <- incr:
 	case chB <- decr:
 	case a := <-chA:
-		print(a) // @pointsto main.incr
+		print(a) // @pointsto command-line-arguments.incr
 	case b := <-chB:
-		print(b) // @pointsto main.decr
+		print(b) // @pointsto command-line-arguments.decr
 	default:
 		print(chA) // @pointsto makechan@c4makeA:13
 		print(chB) // @pointsto makechan@c4makeB:13
 	}
 
 	for k := range chA {
-		print(k) // @pointsto main.incr
+		print(k) // @pointsto command-line-arguments.incr
 	}
 	// Exercise constraint generation (regtest for a crash).
 	for range chA {
diff --git a/go/pointer/testdata/chanreflect.go b/go/pointer/testdata/chanreflect.go
index 7d22efe..21f78b6 100644
--- a/go/pointer/testdata/chanreflect.go
+++ b/go/pointer/testdata/chanreflect.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -14,7 +15,7 @@
 	crv.Send(reflect.ValueOf(&a))
 	print(crv.Interface())             // @types chan *int
 	print(crv.Interface().(chan *int)) // @pointsto makechan@cr1make:12
-	print(<-ch)                        // @pointsto main.a
+	print(<-ch)                        // @pointsto command-line-arguments.a
 }
 
 func chanreflect1i() {
@@ -24,7 +25,7 @@
 	reflect.ValueOf(ch).Send(reflect.ValueOf(&a))
 	v := <-ch
 	print(v)        // @types *int
-	print(v.(*int)) // @pointsto main.a
+	print(v.(*int)) // @pointsto command-line-arguments.a
 }
 
 func chanreflect2() {
@@ -33,7 +34,7 @@
 	crv := reflect.ValueOf(ch)
 	r, _ := crv.Recv()
 	print(r.Interface())        // @types *int
-	print(r.Interface().(*int)) // @pointsto main.b
+	print(r.Interface().(*int)) // @pointsto command-line-arguments.b
 }
 
 func chanOfRecv() {
@@ -60,8 +61,8 @@
 	ch.Send(reflect.ValueOf(&b))
 	ch.Interface().(chan *int) <- &a
 	r, _ := ch.Recv()
-	print(r.Interface().(*int))         // @pointsto main.a | main.b
-	print(<-ch.Interface().(chan *int)) // @pointsto main.a | main.b
+	print(r.Interface().(*int))         // @pointsto command-line-arguments.a | command-line-arguments.b
+	print(<-ch.Interface().(chan *int)) // @pointsto command-line-arguments.a | command-line-arguments.b
 }
 
 var unknownDir reflect.ChanDir // not a constant
diff --git a/go/pointer/testdata/context.go b/go/pointer/testdata/context.go
index ed616e7..b76c200 100644
--- a/go/pointer/testdata/context.go
+++ b/go/pointer/testdata/context.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -16,29 +17,29 @@
 	var t1, t2 T
 	t1.SetX(&a)
 	t2.SetX(&b)
-	print(t1.GetX()) // @pointsto main.a
-	print(t2.GetX()) // @pointsto main.b
+	print(t1.GetX()) // @pointsto command-line-arguments.a
+	print(t2.GetX()) // @pointsto command-line-arguments.b
 }
 
 func context2() {
 	id := func(x *int) *int {
-		print(x) // @pointsto main.a | main.b
+		print(x) // @pointsto command-line-arguments.a | command-line-arguments.b
 		return x
 	}
-	print(id(&a)) // @pointsto main.a
-	print(id(&b)) // @pointsto main.b
+	print(id(&a)) // @pointsto command-line-arguments.a
+	print(id(&b)) // @pointsto command-line-arguments.b
 
 	// Same again, but anon func has free vars.
 	var c int // @line context2c
 	id2 := func(x *int) (*int, *int) {
-		print(x) // @pointsto main.a | main.b
+		print(x) // @pointsto command-line-arguments.a | command-line-arguments.b
 		return x, &c
 	}
 	p, q := id2(&a)
-	print(p) // @pointsto main.a
+	print(p) // @pointsto command-line-arguments.a
 	print(q) // @pointsto c@context2c:6
 	r, s := id2(&b)
-	print(r) // @pointsto main.b
+	print(r) // @pointsto command-line-arguments.b
 	print(s) // @pointsto c@context2c:6
 }
 
diff --git a/go/pointer/testdata/conv.go b/go/pointer/testdata/conv.go
index 692f0ce..5ef1fdf 100644
--- a/go/pointer/testdata/conv.go
+++ b/go/pointer/testdata/conv.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -26,23 +27,23 @@
 	// Conversion of same underlying types.
 	type PI *int
 	pi := PI(&a)
-	print(pi) // @pointsto main.a
+	print(pi) // @pointsto command-line-arguments.a
 
 	pint := (*int)(pi)
-	print(pint) // @pointsto main.a
+	print(pint) // @pointsto command-line-arguments.a
 
 	// Conversions between pointers to identical base types.
 	var y *PI = &pi
 	var x **int = (**int)(y)
-	print(*x) // @pointsto main.a
-	print(*y) // @pointsto main.a
+	print(*x) // @pointsto command-line-arguments.a
+	print(*y) // @pointsto command-line-arguments.a
 	y = (*PI)(x)
-	print(*y) // @pointsto main.a
+	print(*y) // @pointsto command-line-arguments.a
 }
 
 func conv4() {
 	// Handling of unsafe.Pointer conversion is unsound:
-	// we lose the alias to main.a and get something like new(int) instead.
+	// we lose the alias to command-line-arguments.a and get something like new(int) instead.
 	p := (*int)(unsafe.Pointer(&a)) // @line c2p
 	print(p)                        // @pointsto convert@c2p:13
 }
diff --git a/go/pointer/testdata/extended.go b/go/pointer/testdata/extended.go
index b3dd203..a95449c 100644
--- a/go/pointer/testdata/extended.go
+++ b/go/pointer/testdata/extended.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -17,5 +18,5 @@
 
 func main() {
 	x := fn()
-	print(x) // @pointstoquery <-(*x[i].a)[key] main.a
+	print(x) // @pointstoquery <-(*x[i].a)[key] command-line-arguments.a
 }
diff --git a/go/pointer/testdata/finalizer.go b/go/pointer/testdata/finalizer.go
index 97f25c9..7ee03da 100644
--- a/go/pointer/testdata/finalizer.go
+++ b/go/pointer/testdata/finalizer.go
@@ -17,8 +17,8 @@
 	runtime.SetFinalizer(x, final1b) // param type mismatch: no effect
 }
 
-// @calls main.runtimeSetFinalizer1 -> main.final1a
-// @calls main.runtimeSetFinalizer1 -> main.final1b
+// @calls command-line-arguments.runtimeSetFinalizer1 -> command-line-arguments.final1a
+// @calls command-line-arguments.runtimeSetFinalizer1 -> command-line-arguments.final1b
 
 func final2a(x *bool) {
 	print(x) // @pointsto new@newbool1:10 | new@newbool2:10
@@ -38,8 +38,8 @@
 	runtime.SetFinalizer(x, f)
 }
 
-// @calls main.runtimeSetFinalizer2 -> main.final2a
-// @calls main.runtimeSetFinalizer2 -> main.final2b
+// @calls command-line-arguments.runtimeSetFinalizer2 -> command-line-arguments.final2a
+// @calls command-line-arguments.runtimeSetFinalizer2 -> command-line-arguments.final2b
 
 type T int
 
@@ -52,7 +52,7 @@
 	runtime.SetFinalizer(x, (*T).finalize)
 }
 
-// @calls main.runtimeSetFinalizer3 -> (*main.T).finalize$thunk
+// @calls command-line-arguments.runtimeSetFinalizer3 -> (*command-line-arguments.T).finalize$thunk
 
 // I hope I never live to see this code in the wild.
 var setFinalizer = runtime.SetFinalizer
@@ -75,8 +75,8 @@
 	runtime.SetFinalizer((*T).finalize, nil) // f is a non-pointer
 }
 
-// @calls main.runtimeSetFinalizerIndirect -> runtime.SetFinalizer
-// @calls runtime.SetFinalizer -> main.final4
+// @calls command-line-arguments.runtimeSetFinalizerIndirect -> runtime.SetFinalizer
+// @calls runtime.SetFinalizer -> command-line-arguments.final4
 
 func main() {
 	runtimeSetFinalizer1()
diff --git a/go/pointer/testdata/flow.go b/go/pointer/testdata/flow.go
index 6fb599e..9e8ce93 100644
--- a/go/pointer/testdata/flow.go
+++ b/go/pointer/testdata/flow.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -18,10 +19,10 @@
 	if somepred {
 		r = s
 	}
-	print(s) // @pointsto main.f1
-	print(p) // @pointsto main.f2
-	print(q) // @pointsto main.f2
-	print(r) // @pointsto main.f1 | main.f2
+	print(s) // @pointsto command-line-arguments.f1
+	print(p) // @pointsto command-line-arguments.f2
+	print(q) // @pointsto command-line-arguments.f2
+	print(r) // @pointsto command-line-arguments.f1 | command-line-arguments.f2
 }
 
 // Tracking concrete types in interfaces.
@@ -50,10 +51,10 @@
 	if somepred {
 		r = s
 	}
-	print(s) // @pointsto main.g1
-	print(p) // @pointsto main.g2
-	print(q) // @pointsto main.g2
-	print(r) // @pointsto main.g2 | main.g1
+	print(s) // @pointsto command-line-arguments.g1
+	print(p) // @pointsto command-line-arguments.g2
+	print(q) // @pointsto command-line-arguments.g2
+	print(r) // @pointsto command-line-arguments.g2 | command-line-arguments.g1
 }
 
 func main() {
diff --git a/go/pointer/testdata/fmtexcerpt.go b/go/pointer/testdata/fmtexcerpt.go
index ee2a0e7..422e31d 100644
--- a/go/pointer/testdata/fmtexcerpt.go
+++ b/go/pointer/testdata/fmtexcerpt.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 // This is a slice of the fmt package.
@@ -39,4 +40,4 @@
 	Println("Hello, World!", S(0))
 }
 
-// @calls (*main.pp).doPrint -> (main.S).String
+// @calls (*command-line-arguments.pp).doPrint -> (command-line-arguments.S).String
diff --git a/go/pointer/testdata/func.go b/go/pointer/testdata/func.go
index 2155f8e..11a7138 100644
--- a/go/pointer/testdata/func.go
+++ b/go/pointer/testdata/func.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -23,14 +24,14 @@
 		return f(x)
 	}
 
-	print(g(&a)) // @pointsto main.a | main.b | h@f1h:6
-	print(f(&a)) // @pointsto main.a | main.b
-	print(&a)    // @pointsto main.a
+	print(g(&a)) // @pointsto command-line-arguments.a | command-line-arguments.b | h@f1h:6
+	print(f(&a)) // @pointsto command-line-arguments.a | command-line-arguments.b
+	print(&a)    // @pointsto command-line-arguments.a
 }
 
-// @calls main.func1 -> main.func1$2
-// @calls main.func1 -> main.func1$1
-// @calls main.func1$2 ->  main.func1$1
+// @calls command-line-arguments.func1 -> command-line-arguments.func1$2
+// @calls command-line-arguments.func1 -> command-line-arguments.func1$1
+// @calls command-line-arguments.func1$2 ->  command-line-arguments.func1$1
 
 func func2() {
 	var x, y *int
@@ -40,8 +41,8 @@
 	go func() {
 		y = &b
 	}()
-	print(x) // @pointsto main.a
-	print(y) // @pointsto main.b
+	print(x) // @pointsto command-line-arguments.a
+	print(y) // @pointsto command-line-arguments.b
 }
 
 func func3() {
@@ -53,8 +54,8 @@
 		}
 		return
 	}()
-	print(x) // @pointsto main.a
-	print(y) // @pointsto main.b | main.c
+	print(x) // @pointsto command-line-arguments.a
+	print(y) // @pointsto command-line-arguments.b | command-line-arguments.c
 }
 
 func swap(x, y *int) (*int, *int) { // @line swap
@@ -74,26 +75,26 @@
 	print(q) // @pointsto makeslice[*]@func4make:11
 
 	f := &b
-	print(f) // @pointsto main.b
+	print(f) // @pointsto command-line-arguments.b
 }
 
 type T int
 
 func (t *T) f(x *int) *int {
-	print(t) // @pointsto main.a
-	print(x) // @pointsto main.c
+	print(t) // @pointsto command-line-arguments.a
+	print(x) // @pointsto command-line-arguments.c
 	return &b
 }
 
 func (t *T) g(x *int) *int {
-	print(t) // @pointsto main.a
-	print(x) // @pointsto main.b
+	print(t) // @pointsto command-line-arguments.a
+	print(x) // @pointsto command-line-arguments.b
 	return &c
 }
 
 func (t *T) h(x *int) *int {
-	print(t) // @pointsto main.a
-	print(x) // @pointsto main.b
+	print(t) // @pointsto command-line-arguments.a
+	print(x) // @pointsto command-line-arguments.b
 	return &c
 }
 
@@ -102,29 +103,29 @@
 func func5() {
 	// Static call of method.
 	t := (*T)(&a)
-	print(t.f(&c)) // @pointsto main.b
+	print(t.f(&c)) // @pointsto command-line-arguments.b
 
 	// Static call of method as function
-	print((*T).g(t, &b)) // @pointsto main.c
+	print((*T).g(t, &b)) // @pointsto command-line-arguments.c
 
 	// Dynamic call (not invoke) of method.
 	h = (*T).h
-	print(h(t, &b)) // @pointsto main.c
+	print(h(t, &b)) // @pointsto command-line-arguments.c
 }
 
-// @calls main.func5 -> (*main.T).f
-// @calls main.func5 -> (*main.T).g$thunk
-// @calls main.func5 -> (*main.T).h$thunk
+// @calls command-line-arguments.func5 -> (*command-line-arguments.T).f
+// @calls command-line-arguments.func5 -> (*command-line-arguments.T).g$thunk
+// @calls command-line-arguments.func5 -> (*command-line-arguments.T).h$thunk
 
 func func6() {
 	A := &a
 	f := func() *int {
 		return A // (free variable)
 	}
-	print(f()) // @pointsto main.a
+	print(f()) // @pointsto command-line-arguments.a
 }
 
-// @calls main.func6 -> main.func6$1
+// @calls command-line-arguments.func6 -> command-line-arguments.func6$1
 
 type I interface {
 	f()
@@ -138,18 +139,18 @@
 	var i I = D{}
 	imethodClosure := i.f
 	imethodClosure()
-	// @calls main.func7 -> (main.I).f$bound
-	// @calls (main.I).f$bound -> (main.D).f
+	// @calls command-line-arguments.func7 -> (command-line-arguments.I).f$bound
+	// @calls (command-line-arguments.I).f$bound -> (command-line-arguments.D).f
 
 	var d D
 	cmethodClosure := d.f
 	cmethodClosure()
-	// @calls main.func7 -> (main.D).f$bound
-	// @calls (main.D).f$bound ->(main.D).f
+	// @calls command-line-arguments.func7 -> (command-line-arguments.D).f$bound
+	// @calls (command-line-arguments.D).f$bound ->(command-line-arguments.D).f
 
 	methodExpr := D.f
 	methodExpr(d)
-	// @calls main.func7 -> (main.D).f$thunk
+	// @calls command-line-arguments.func7 -> (command-line-arguments.D).f$thunk
 }
 
 func func8(x ...int) {
@@ -182,11 +183,11 @@
 		i.f() // must not crash the solver
 	}(new(D))
 
-	print(e.x1) // @pointsto main.a
-	print(e.x2) // @pointsto main.a
-	print(e.x3) // @pointsto main.a
-	print(e.x4) // @pointsto main.a
-	print(e.x5) // @pointsto main.a
+	print(e.x1) // @pointsto command-line-arguments.a
+	print(e.x2) // @pointsto command-line-arguments.a
+	print(e.x3) // @pointsto command-line-arguments.a
+	print(e.x4) // @pointsto command-line-arguments.a
+	print(e.x5) // @pointsto command-line-arguments.a
 }
 
 func main() {
@@ -201,5 +202,5 @@
 	func9()
 }
 
-// @calls <root> -> main.main
-// @calls <root> -> main.init
+// @calls <root> -> command-line-arguments.main
+// @calls <root> -> command-line-arguments.init
diff --git a/go/pointer/testdata/funcreflect.go b/go/pointer/testdata/funcreflect.go
index a0a9a5f..2b4315b 100644
--- a/go/pointer/testdata/funcreflect.go
+++ b/go/pointer/testdata/funcreflect.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -8,7 +9,7 @@
 var false2 bool
 
 func f(p *int, q hasF) *int {
-	print(p)      // @pointsto main.a
+	print(p)      // @pointsto command-line-arguments.a
 	print(q)      // @types *T
 	print(q.(*T)) // @pointsto new@newT1:22
 	return &b
@@ -26,10 +27,10 @@
 		reflect.ValueOf(&a),
 	})
 	print(res[0].Interface())        // @types *int
-	print(res[0].Interface().(*int)) // @pointsto main.b
+	print(res[0].Interface().(*int)) // @pointsto command-line-arguments.b
 }
 
-// @calls main.reflectValueCall -> main.f
+// @calls command-line-arguments.reflectValueCall -> command-line-arguments.f
 
 func reflectValueCallIndirect() {
 	rvf := reflect.ValueOf(g)
@@ -45,14 +46,14 @@
 	})
 	res0 := res[0].Interface()
 	print(res0)         // @types *int | *bool | *T
-	print(res0.(*int))  // @pointsto main.b
-	print(res0.(*bool)) // @pointsto main.false2
+	print(res0.(*int))  // @pointsto command-line-arguments.b
+	print(res0.(*bool)) // @pointsto command-line-arguments.false2
 	print(res0.(hasF))  // @types *T
 	print(res0.(*T))    // @pointsto new@newT2:19
 }
 
-// @calls main.reflectValueCallIndirect -> (reflect.Value).Call$bound
-// @calls (reflect.Value).Call$bound -> main.g
+// @calls command-line-arguments.reflectValueCallIndirect -> (reflect.Value).Call$bound
+// @calls (reflect.Value).Call$bound -> command-line-arguments.g
 
 func reflectTypeInOut() {
 	var f func(float64, bool) (string, int)
@@ -94,17 +95,17 @@
 
 	F, _ := TU.MethodByName("F")
 	print(reflect.Zero(F.Type)) // @types func(T) | func(U, int)
-	print(F.Func)               // @pointsto (main.T).F | (main.U).F
+	print(F.Func)               // @pointsto (command-line-arguments.T).F | (command-line-arguments.U).F
 
 	g, _ := TU.MethodByName("g")
 	print(reflect.Zero(g.Type)) // @types func(T, int) | func(U, string)
-	print(g.Func)               // @pointsto (main.T).g | (main.U).g
+	print(g.Func)               // @pointsto (command-line-arguments.T).g | (command-line-arguments.U).g
 
 	// Non-literal method names are treated less precisely.
 	U := reflect.TypeOf(U{})
 	X, _ := U.MethodByName(nonconst)
 	print(reflect.Zero(X.Type)) // @types func(U, int) | func(U, string)
-	print(X.Func)               // @pointsto (main.U).F | (main.U).g
+	print(X.Func)               // @pointsto (command-line-arguments.U).F | (command-line-arguments.U).g
 
 	// Interface methods.
 	rThasF := reflect.TypeOf(new(hasF)).Elem()
@@ -118,7 +119,7 @@
 func reflectTypeMethod() {
 	m := reflect.TypeOf(T{}).Method(0)
 	print(reflect.Zero(m.Type)) // @types func(T) | func(T, int)
-	print(m.Func)               // @pointsto (main.T).F | (main.T).g
+	print(m.Func)               // @pointsto (command-line-arguments.T).F | (command-line-arguments.T).g
 }
 
 func main() {
diff --git a/go/pointer/testdata/hello.go b/go/pointer/testdata/hello.go
index b81784b..3967cbe 100644
--- a/go/pointer/testdata/hello.go
+++ b/go/pointer/testdata/hello.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -12,7 +13,7 @@
 var theS S
 
 func (s *S) String() string {
-	print(s) // @pointsto main.theS
+	print(s) // @pointsto command-line-arguments.theS
 	return ""
 }
 
@@ -23,5 +24,5 @@
 	fmt.Println("Hello, World!", &theS)
 }
 
-// @calls main.main               -> fmt.Println
-// @calls (*fmt.pp).handleMethods -> (*main.S).String
+// @calls command-line-arguments.main               -> fmt.Println
+// @calls (*fmt.pp).handleMethods -> (*command-line-arguments.S).String
diff --git a/go/pointer/testdata/interfaces.go b/go/pointer/testdata/interfaces.go
index 91c0fa9..2312e13 100644
--- a/go/pointer/testdata/interfaces.go
+++ b/go/pointer/testdata/interfaces.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -34,13 +35,13 @@
 	print(j) // @types D
 	print(k) // @types *int | D
 
-	print(i.(*int)) // @pointsto main.a
+	print(i.(*int)) // @pointsto command-line-arguments.a
 	print(j.(*int)) // @pointsto
-	print(k.(*int)) // @pointsto main.a
+	print(k.(*int)) // @pointsto command-line-arguments.a
 
 	print(i.(D).ptr) // @pointsto
-	print(j.(D).ptr) // @pointsto main.b
-	print(k.(D).ptr) // @pointsto main.b
+	print(j.(D).ptr) // @pointsto command-line-arguments.b
+	print(k.(D).ptr) // @pointsto command-line-arguments.b
 }
 
 func interface2() {
@@ -54,21 +55,21 @@
 	print(i) // @types *C
 	print(j) // @types D
 	print(k) // @types *C | D
-	print(k) // @pointsto makeinterface:main.D | makeinterface:*main.C
+	print(k) // @pointsto makeinterface:command-line-arguments.D | makeinterface:*command-line-arguments.C
 
 	k.f()
-	// @calls main.interface2 -> (*main.C).f
-	// @calls main.interface2 -> (main.D).f
+	// @calls command-line-arguments.interface2 -> (*command-line-arguments.C).f
+	// @calls command-line-arguments.interface2 -> (command-line-arguments.D).f
 
-	print(i.(*C))    // @pointsto main.a
-	print(j.(D).ptr) // @pointsto main.a
-	print(k.(*C))    // @pointsto main.a
+	print(i.(*C))    // @pointsto command-line-arguments.a
+	print(j.(D).ptr) // @pointsto command-line-arguments.a
+	print(k.(*C))    // @pointsto command-line-arguments.a
 
 	switch x := k.(type) {
 	case *C:
-		print(x) // @pointsto main.a
+		print(x) // @pointsto command-line-arguments.a
 	case D:
-		print(x.ptr) // @pointsto main.a
+		print(x.ptr) // @pointsto command-line-arguments.a
 	case *E:
 		print(x) // @pointsto
 	}
@@ -94,15 +95,15 @@
 
 	j := i.(I)       // interface narrowing type-assertion
 	print(j)         // @types D
-	print(j.(D).ptr) // @pointsto main.a
+	print(j.(D).ptr) // @pointsto command-line-arguments.a
 
 	var l interface{} = j // interface widening assignment.
 	print(l)              // @types D
-	print(l.(D).ptr)      // @pointsto main.a
+	print(l.(D).ptr)      // @pointsto command-line-arguments.a
 
 	m := j.(interface{}) // interface widening type-assertion.
 	print(m)             // @types D
-	print(m.(D).ptr)     // @pointsto main.a
+	print(m.(D).ptr)     // @pointsto command-line-arguments.a
 }
 
 // Interface method calls and value flow:
@@ -128,19 +129,19 @@
 	print(j.f(&i)) // @pointsto p.x@i5p:6
 	print(&i)      // @pointsto i@i5i:6
 
-	print(j) // @pointsto makeinterface:*main.P
+	print(j) // @pointsto makeinterface:*command-line-arguments.P
 }
 
-// @calls main.interface5 -> (*main.P).f
+// @calls command-line-arguments.interface5 -> (*command-line-arguments.P).f
 
 func interface6() {
 	f := I.f
-	print(f) // @pointsto (main.I).f$thunk
+	print(f) // @pointsto (command-line-arguments.I).f$thunk
 	f(new(struct{ D }))
 }
 
-// @calls main.interface6 -> (main.I).f$thunk
-// @calls (main.I).f$thunk -> (*struct{main.D}).f
+// @calls command-line-arguments.interface6 -> (command-line-arguments.I).f$thunk
+// @calls (command-line-arguments.I).f$thunk -> (*struct{command-line-arguments.D}).f
 
 func main() {
 	interface1()
diff --git a/go/pointer/testdata/mapreflect.go b/go/pointer/testdata/mapreflect.go
index bc5e7e6..d8c1d5a 100644
--- a/go/pointer/testdata/mapreflect.go
+++ b/go/pointer/testdata/mapreflect.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -25,11 +26,11 @@
 		print(k)                    // @pointsto <alloc in (reflect.Value).MapKeys>
 		print(k)                    // @types *int
 		print(k.Interface())        // @types *int
-		print(k.Interface().(*int)) // @pointsto main.a
+		print(k.Interface().(*int)) // @pointsto command-line-arguments.a
 
 		v := mrv.MapIndex(k)
 		print(v.Interface())         // @types *bool
-		print(v.Interface().(*bool)) // @pointsto main.b
+		print(v.Interface().(*bool)) // @pointsto command-line-arguments.b
 	}
 }
 
@@ -38,11 +39,11 @@
 	mrv := reflect.ValueOf(m)
 	mrv.SetMapIndex(reflect.ValueOf(&a), reflect.ValueOf(&b))
 
-	print(m[nil]) // @pointsto main.b
+	print(m[nil]) // @pointsto command-line-arguments.b
 
 	for _, k := range mrv.MapKeys() {
 		print(k.Interface())        // @types *int
-		print(k.Interface().(*int)) // @pointsto main.a
+		print(k.Interface().(*int)) // @pointsto command-line-arguments.a
 	}
 
 	tmap := reflect.TypeOf(m)
@@ -71,9 +72,9 @@
 	reflect.ValueOf(m).SetMapIndex(reflect.ValueOf(&a), reflect.ValueOf(&b))
 	for k, v := range m {
 		print(k)         // @types *int
-		print(k.(*int))  // @pointsto main.a
+		print(k.(*int))  // @pointsto command-line-arguments.a
 		print(v)         // @types *bool
-		print(v.(*bool)) // @pointsto main.b
+		print(v.(*bool)) // @pointsto command-line-arguments.b
 	}
 }
 
diff --git a/go/pointer/testdata/maps.go b/go/pointer/testdata/maps.go
index f73a6ea..cce4a10 100644
--- a/go/pointer/testdata/maps.go
+++ b/go/pointer/testdata/maps.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -11,8 +12,8 @@
 	m2 := make(map[*int]*int)   // @line m1m2
 	m2[&b] = &a
 
-	print(m1[nil]) // @pointsto main.b | main.c
-	print(m2[nil]) // @pointsto main.a
+	print(m1[nil]) // @pointsto command-line-arguments.b | command-line-arguments.c
+	print(m2[nil]) // @pointsto command-line-arguments.a
 
 	print(m1) // @pointsto makemap@m1m1:21
 	print(m2) // @pointsto makemap@m1m2:12
@@ -20,19 +21,19 @@
 	m1[&b] = &c
 
 	for k, v := range m1 {
-		print(k) // @pointsto main.a | main.b
-		print(v) // @pointsto main.b | main.c
+		print(k) // @pointsto command-line-arguments.a | command-line-arguments.b
+		print(v) // @pointsto command-line-arguments.b | command-line-arguments.c
 	}
 
 	for k, v := range m2 {
-		print(k) // @pointsto main.b
-		print(v) // @pointsto main.a
+		print(k) // @pointsto command-line-arguments.b
+		print(v) // @pointsto command-line-arguments.a
 	}
 
 	// Lookup doesn't create any aliases.
-	print(m2[&c]) // @pointsto main.a
+	print(m2[&c]) // @pointsto command-line-arguments.a
 	if _, ok := m2[&a]; ok {
-		print(m2[&c]) // @pointsto main.a
+		print(m2[&c]) // @pointsto command-line-arguments.a
 	}
 }
 
@@ -41,8 +42,8 @@
 	m2 := map[*int]*int{&b: &c}
 	_ = []map[*int]*int{m1, m2} // (no spurious merging of m1, m2)
 
-	print(m1[nil]) // @pointsto main.b
-	print(m2[nil]) // @pointsto main.c
+	print(m1[nil]) // @pointsto command-line-arguments.b
+	print(m2[nil]) // @pointsto command-line-arguments.c
 }
 
 var g int
@@ -54,7 +55,7 @@
 	// v components, so copying the map key or value may cause
 	// miswiring if the key has >1 components.  In the worst case,
 	// this causes a crash.  The test below used to report that
-	// pts(v) includes not just main.g but new(float64) too, which
+	// pts(v) includes not just command-line-arguments.g but new(float64) too, which
 	// is ill-typed.
 
 	// sizeof(K) > 1, abstractly
@@ -63,7 +64,7 @@
 	m := map[K]*int{k: &g}
 
 	for _, v := range m {
-		print(v) // @pointsto main.g
+		print(v) // @pointsto command-line-arguments.g
 	}
 }
 
@@ -78,17 +79,17 @@
 	m := map[K]*int{k: &g}
 
 	for x, y := range m {
-		print(x.a) // @pointsto main.v
-		print(y)   // @pointsto main.g
+		print(x.a) // @pointsto command-line-arguments.v
+		print(y)   // @pointsto command-line-arguments.g
 	}
 	var i struct{ a *float64 }
 	for i, _ = range m {
-		print(i.a) // @pointsto main.v
+		print(i.a) // @pointsto command-line-arguments.v
 	}
 	var j interface{}
 	for _, j = range m {
 		// TODO support the statement `print(j.(*int))`
-		print(j) // @pointsto main.g
+		print(j) // @pointsto command-line-arguments.g
 	}
 	for _, _ = range m {
 	}
@@ -96,7 +97,7 @@
 	// effects of indexing
 	for _, j = range m {
 		// TODO support the statement `print(j.(*int))`
-		print(j) // @pointsto main.g
+		print(j) // @pointsto command-line-arguments.g
 	}
 }
 
diff --git a/go/pointer/testdata/panic.go b/go/pointer/testdata/panic.go
index ee8a766..3377d83 100644
--- a/go/pointer/testdata/panic.go
+++ b/go/pointer/testdata/panic.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -31,6 +32,6 @@
 	}
 	ex := recover()
 	print(ex)                 // @types myPanic | string | func(int) | func() string
-	print(ex.(func(int)))     // @pointsto main.f
-	print(ex.(func() string)) // @pointsto main.g
+	print(ex.(func(int)))     // @pointsto command-line-arguments.f
+	print(ex.(func() string)) // @pointsto command-line-arguments.g
 }
diff --git a/go/pointer/testdata/recur.go b/go/pointer/testdata/recur.go
index 4c7229d..0656763 100644
--- a/go/pointer/testdata/recur.go
+++ b/go/pointer/testdata/recur.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -8,4 +9,4 @@
 	main()
 }
 
-// @calls main.main -> main.main
+// @calls command-line-arguments.main -> command-line-arguments.main
diff --git a/go/pointer/testdata/reflect.go b/go/pointer/testdata/reflect.go
index 6b8d0f2..cf3195a 100644
--- a/go/pointer/testdata/reflect.go
+++ b/go/pointer/testdata/reflect.go
@@ -1,9 +1,12 @@
+//go:build ignore
 // +build ignore
 
 package main
 
-import "reflect"
-import "unsafe"
+import (
+	"reflect"
+	"unsafe"
+)
 
 var a, b int
 var unknown bool
@@ -11,9 +14,9 @@
 func reflectIndirect() {
 	ptr := &a
 	// Pointer:
-	print(reflect.Indirect(reflect.ValueOf(&ptr)).Interface().(*int)) // @pointsto main.a
+	print(reflect.Indirect(reflect.ValueOf(&ptr)).Interface().(*int)) // @pointsto command-line-arguments.a
 	// Non-pointer:
-	print(reflect.Indirect(reflect.ValueOf([]*int{ptr})).Interface().([]*int)[0]) // @pointsto main.a
+	print(reflect.Indirect(reflect.ValueOf([]*int{ptr})).Interface().([]*int)[0]) // @pointsto command-line-arguments.a
 }
 
 func reflectNewAt() {
@@ -21,7 +24,7 @@
 	print(reflect.NewAt(reflect.TypeOf(3), unsafe.Pointer(&x)).Interface()) // @types *int
 }
 
-// @warning "unsound: main.reflectNewAt contains a reflect.NewAt.. call"
+// @warning "unsound: command-line-arguments.reflectNewAt contains a reflect.NewAt.. call"
 
 func reflectTypeOf() {
 	t := reflect.TypeOf(3)
@@ -61,7 +64,7 @@
 	print(v1a)                             // @types reflect.Value
 	v0a := v1a.Interface().(reflect.Value) // unbox
 	print(v0a)                             // @types *int
-	print(v0a.Interface().(*int))          // @pointsto main.a
+	print(v0a.Interface().(*int))          // @pointsto command-line-arguments.a
 
 	// "box" an interface{} lvalue twice, unbox it twice.
 	var iface interface{} = 3
diff --git a/go/pointer/testdata/rtti.go b/go/pointer/testdata/rtti.go
index 88e1798..05b4a88 100644
--- a/go/pointer/testdata/rtti.go
+++ b/go/pointer/testdata/rtti.go
@@ -25,5 +25,5 @@
 
 func main() {
 	type Y struct{ X }
-	print(reflect.Indirect(reflect.ValueOf(new(Y))).Interface().(I).F()) // @pointsto main.a
+	print(reflect.Indirect(reflect.ValueOf(new(Y))).Interface().(I).F()) // @pointsto command-line-arguments.a
 }
diff --git a/go/pointer/testdata/structs.go b/go/pointer/testdata/structs.go
index 9036d60..085439e 100644
--- a/go/pointer/testdata/structs.go
+++ b/go/pointer/testdata/structs.go
@@ -1,3 +1,4 @@
+//go:build ignore
 // +build ignore
 
 package main
@@ -12,12 +13,12 @@
 }
 
 func (a A) m1() {
-	print(a.f) // @pointsto main.p
+	print(a.f) // @pointsto command-line-arguments.p
 }
 
 func (a *A) m2() {
 	print(a)   // @pointsto complit.A@struct1s:9
-	print(a.f) // @pointsto main.p
+	print(a.f) // @pointsto command-line-arguments.p
 }
 
 type B struct {
@@ -32,21 +33,21 @@
 	b.f = &p
 	b.g = b
 
-	print(b.h) // @pointsto main.q
-	print(b.f) // @pointsto main.p
+	print(b.h) // @pointsto command-line-arguments.q
+	print(b.f) // @pointsto command-line-arguments.p
 	print(b.g) // @types *B
 
 	ptr := &b.f
-	print(*ptr) // @pointsto main.p
+	print(*ptr) // @pointsto command-line-arguments.p
 
 	b.m1()
 	b.m2()
 }
 
-// @calls main.structs1 -> (main.A).m1
-// @calls main.structs1 -> (*main.A).m2
-// @calls (*main.B).m1 -> (main.A).m1
-// @calls (*main.B).m2 -> (*main.A).m2
+// @calls command-line-arguments.structs1 -> (command-line-arguments.A).m1
+// @calls command-line-arguments.structs1 -> (*command-line-arguments.A).m2
+// @calls (*command-line-arguments.B).m1 -> (command-line-arguments.A).m1
+// @calls (*command-line-arguments.B).m2 -> (*command-line-arguments.A).m2
 
 type T struct {
 	x int
diff --git a/go/ssa/block.go b/go/ssa/block.go
new file mode 100644
index 0000000..35f3173
--- /dev/null
+++ b/go/ssa/block.go
@@ -0,0 +1,118 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+import "fmt"
+
+// This file implements the BasicBlock type.
+
+// addEdge adds a control-flow graph edge from from to to.
+func addEdge(from, to *BasicBlock) {
+	from.Succs = append(from.Succs, to)
+	to.Preds = append(to.Preds, from)
+}
+
+// Parent returns the function that contains block b.
+func (b *BasicBlock) Parent() *Function { return b.parent }
+
+// String returns a human-readable label of this block.
+// It is not guaranteed unique within the function.
+//
+func (b *BasicBlock) String() string {
+	return fmt.Sprintf("%d", b.Index)
+}
+
+// emit appends an instruction to the current basic block.
+// If the instruction defines a Value, it is returned.
+//
+func (b *BasicBlock) emit(i Instruction) Value {
+	i.setBlock(b)
+	b.Instrs = append(b.Instrs, i)
+	v, _ := i.(Value)
+	return v
+}
+
+// predIndex returns the i such that b.Preds[i] == c or panics if
+// there is none.
+func (b *BasicBlock) predIndex(c *BasicBlock) int {
+	for i, pred := range b.Preds {
+		if pred == c {
+			return i
+		}
+	}
+	panic(fmt.Sprintf("no edge %s -> %s", c, b))
+}
+
+// hasPhi returns true if b.Instrs contains φ-nodes.
+func (b *BasicBlock) hasPhi() bool {
+	_, ok := b.Instrs[0].(*Phi)
+	return ok
+}
+
+// phis returns the prefix of b.Instrs containing all the block's φ-nodes.
+func (b *BasicBlock) phis() []Instruction {
+	for i, instr := range b.Instrs {
+		if _, ok := instr.(*Phi); !ok {
+			return b.Instrs[:i]
+		}
+	}
+	return nil // unreachable in well-formed blocks
+}
+
+// replacePred replaces all occurrences of p in b's predecessor list with q.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) replacePred(p, q *BasicBlock) {
+	for i, pred := range b.Preds {
+		if pred == p {
+			b.Preds[i] = q
+		}
+	}
+}
+
+// replaceSucc replaces all occurrences of p in b's successor list with q.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) replaceSucc(p, q *BasicBlock) {
+	for i, succ := range b.Succs {
+		if succ == p {
+			b.Succs[i] = q
+		}
+	}
+}
+
+// removePred removes all occurrences of p in b's
+// predecessor list and φ-nodes.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) removePred(p *BasicBlock) {
+	phis := b.phis()
+
+	// We must preserve edge order for φ-nodes.
+	j := 0
+	for i, pred := range b.Preds {
+		if pred != p {
+			b.Preds[j] = b.Preds[i]
+			// Strike out φ-edge too.
+			for _, instr := range phis {
+				phi := instr.(*Phi)
+				phi.Edges[j] = phi.Edges[i]
+			}
+			j++
+		}
+	}
+	// Nil out b.Preds[j:] and φ-edges[j:] to aid GC.
+	for i := j; i < len(b.Preds); i++ {
+		b.Preds[i] = nil
+		for _, instr := range phis {
+			instr.(*Phi).Edges[i] = nil
+		}
+	}
+	b.Preds = b.Preds[:j]
+	for _, instr := range phis {
+		phi := instr.(*Phi)
+		phi.Edges = phi.Edges[:j]
+	}
+}
diff --git a/go/ssa/builder.go b/go/ssa/builder.go
index e1540db..ac85541 100644
--- a/go/ssa/builder.go
+++ b/go/ssa/builder.go
@@ -125,7 +125,7 @@
 	// T(e) = T(e.X) = T(e.Y) after untyped constants have been
 	// eliminated.
 	// TODO(adonovan): not true; MyBool==MyBool yields UntypedBool.
-	t := fn.Pkg.typeOf(e)
+	t := fn.typeOf(e)
 
 	var short Value // value of the short-circuit path
 	switch e.Op {
@@ -180,7 +180,7 @@
 // is token.ARROW).
 //
 func (b *builder) exprN(fn *Function, e ast.Expr) Value {
-	typ := fn.Pkg.typeOf(e).(*types.Tuple)
+	typ := fn.typeOf(e).(*types.Tuple)
 	switch e := e.(type) {
 	case *ast.ParenExpr:
 		return b.exprN(fn, e.X)
@@ -195,7 +195,7 @@
 		return fn.emit(&c)
 
 	case *ast.IndexExpr:
-		mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
+		mapt := fn.typeOf(e.X).Underlying().(*types.Map)
 		lookup := &Lookup{
 			X:       b.expr(fn, e.X),
 			Index:   emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
@@ -293,7 +293,7 @@
 		// We must still evaluate the value, though.  (If it
 		// was side-effect free, the whole call would have
 		// been constant-folded.)
-		t := deref(fn.Pkg.typeOf(args[0])).Underlying()
+		t := deref(fn.typeOf(args[0])).Underlying()
 		if at, ok := t.(*types.Array); ok {
 			b.expr(fn, args[0]) // for effects only
 			return intConst(at.Len())
@@ -340,15 +340,17 @@
 		if isBlankIdent(e) {
 			return blank{}
 		}
-		obj := fn.Pkg.objectOf(e)
-		v := fn.Prog.packageLevelValue(obj) // var (address)
-		if v == nil {
+		obj := fn.objectOf(e)
+		var v Value
+		if g := fn.Prog.packageLevelMember(obj); g != nil {
+			v = g.(*Global) // var (address)
+		} else {
 			v = fn.lookup(obj, escaping)
 		}
 		return &address{addr: v, pos: e.Pos(), expr: e}
 
 	case *ast.CompositeLit:
-		t := deref(fn.Pkg.typeOf(e))
+		t := deref(fn.typeOf(e))
 		var v *Alloc
 		if escaping {
 			v = emitNew(fn, t, e.Lbrace)
@@ -365,7 +367,7 @@
 		return b.addr(fn, e.X, escaping)
 
 	case *ast.SelectorExpr:
-		sel, ok := fn.Pkg.info.Selections[e]
+		sel, ok := fn.info.Selections[e]
 		if !ok {
 			// qualified identifier
 			return b.addr(fn, e.Sel, escaping)
@@ -385,7 +387,7 @@
 	case *ast.IndexExpr:
 		var x Value
 		var et types.Type
-		switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
+		switch t := fn.typeOf(e.X).Underlying().(type) {
 		case *types.Array:
 			x = b.addr(fn, e.X, escaping).address(fn)
 			et = types.NewPointer(t.Elem())
@@ -513,7 +515,7 @@
 func (b *builder) expr(fn *Function, e ast.Expr) Value {
 	e = unparen(e)
 
-	tv := fn.Pkg.info.Types[e]
+	tv := fn.info.Types[e]
 
 	// Is expression a constant?
 	if tv.Value != nil {
@@ -543,12 +545,13 @@
 	case *ast.FuncLit:
 		fn2 := &Function{
 			name:      fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)),
-			Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature),
+			Signature: fn.typeOf(e.Type).Underlying().(*types.Signature),
 			pos:       e.Type.Func,
 			parent:    fn,
 			Pkg:       fn.Pkg,
 			Prog:      fn.Prog,
 			syntax:    e,
+			info:      fn.info,
 		}
 		fn.AnonFuncs = append(fn.AnonFuncs, fn2)
 		b.buildFunction(fn2)
@@ -567,7 +570,7 @@
 		return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen)
 
 	case *ast.CallExpr:
-		if fn.Pkg.info.Types[e.Fun].IsType() {
+		if fn.info.Types[e.Fun].IsType() {
 			// Explicit type conversion, e.g. string(x) or big.Int(x)
 			x := b.expr(fn, e.Args[0])
 			y := emitConv(fn, x, tv.Type)
@@ -587,7 +590,7 @@
 		}
 		// Call to "intrinsic" built-ins, e.g. new, make, panic.
 		if id, ok := unparen(e.Fun).(*ast.Ident); ok {
-			if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok {
+			if obj, ok := fn.info.Uses[id].(*types.Builtin); ok {
 				if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil {
 					return v
 				}
@@ -645,7 +648,7 @@
 	case *ast.SliceExpr:
 		var low, high, max Value
 		var x Value
-		switch fn.Pkg.typeOf(e.X).Underlying().(type) {
+		switch fn.typeOf(e.X).Underlying().(type) {
 		case *types.Array:
 			// Potentially escaping.
 			x = b.addr(fn, e.X, true).address(fn)
@@ -674,7 +677,7 @@
 		return fn.emit(v)
 
 	case *ast.Ident:
-		obj := fn.Pkg.info.Uses[e]
+		obj := fn.info.Uses[e]
 		// Universal built-in or nil?
 		switch obj := obj.(type) {
 		case *types.Builtin:
@@ -683,20 +686,20 @@
 			return nilConst(tv.Type)
 		}
 		// Package-level func or var?
-		if v := fn.Prog.packageLevelValue(obj); v != nil {
-			if _, ok := obj.(*types.Var); ok {
-				return emitLoad(fn, v) // var (address)
+		if v := fn.Prog.packageLevelMember(obj); v != nil {
+			if g, ok := v.(*Global); ok {
+				return emitLoad(fn, g) // var (address)
 			}
-			return v // (func)
+			return v.(*Function) // (func)
 		}
 		// Local var.
 		return emitLoad(fn, fn.lookup(obj, false)) // var (address)
 
 	case *ast.SelectorExpr:
-		sel, ok := fn.Pkg.info.Selections[e]
+		sel, ok := fn.info.Selections[e]
 		if !ok {
 			// builtin unsafe.{Add,Slice}
-			if obj, ok := fn.Pkg.info.Uses[e.Sel].(*types.Builtin); ok {
+			if obj, ok := fn.info.Uses[e.Sel].(*types.Builtin); ok {
 				return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)}
 			}
 			// qualified identifier
@@ -742,7 +745,7 @@
 		panic("unexpected expression-relative selector")
 
 	case *ast.IndexExpr:
-		switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
+		switch t := fn.typeOf(e.X).Underlying().(type) {
 		case *types.Array:
 			// Non-addressable array (in a register).
 			v := &Index{
@@ -755,7 +758,7 @@
 
 		case *types.Map:
 			// Maps are not addressable.
-			mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
+			mapt := fn.typeOf(e.X).Underlying().(*types.Map)
 			v := &Lookup{
 				X:     b.expr(fn, e.X),
 				Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
@@ -810,7 +813,7 @@
 //
 func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value {
 	var v Value
-	if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) {
+	if wantAddr && !sel.Indirect() && !isPointer(fn.typeOf(e)) {
 		v = b.addr(fn, e, escaping).address(fn)
 	} else {
 		v = b.expr(fn, e)
@@ -833,7 +836,7 @@
 
 	// Is this a method call?
 	if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok {
-		sel, ok := fn.Pkg.info.Selections[selector]
+		sel, ok := fn.info.Selections[selector]
 		if ok && sel.Kind() == types.MethodVal {
 			obj := sel.Obj().(*types.Func)
 			recv := recvType(obj)
@@ -968,7 +971,7 @@
 	b.setCallFunc(fn, e, c)
 
 	// Then append the other actual parameters.
-	sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature)
+	sig, _ := fn.typeOf(e.Fun).Underlying().(*types.Signature)
 	if sig == nil {
 		panic(fmt.Sprintf("no signature for call of %s", e.Fun))
 	}
@@ -1035,7 +1038,7 @@
 		var lval lvalue = blank{}
 		if !isBlankIdent(lhs) {
 			if isDef {
-				if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil {
+				if obj := fn.info.Defs[lhs.(*ast.Ident)]; obj != nil {
 					fn.addNamedLocal(obj)
 					isZero[i] = true
 				}
@@ -1103,7 +1106,7 @@
 // In that case, addr must hold a T, not a *T.
 //
 func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) {
-	typ := deref(fn.Pkg.typeOf(e))
+	typ := deref(fn.typeOf(e))
 	switch t := typ.Underlying().(type) {
 	case *types.Struct:
 		if !isZero && len(e.Elts) != t.NumFields() {
@@ -1294,7 +1297,7 @@
 			// instead of BinOp(EQL, tag, b.expr(cond))
 			// followed by If.  Don't forget conversions
 			// though.
-			cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos)
+			cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), cond.Pos())
 			emitIf(fn, cond, body, nextCond)
 			fn.currentBlock = nextCond
 		}
@@ -1372,7 +1375,6 @@
 	// 	...SD...
 	// 	goto done
 	// .done:
-
 	if s.Init != nil {
 		b.stmt(fn, s.Init)
 	}
@@ -1402,10 +1404,10 @@
 		var ti Value // ti, ok := typeassert,ok x <Ti>
 		for _, cond := range cc.List {
 			next = fn.newBasicBlock("typeswitch.next")
-			casetype = fn.Pkg.typeOf(cond)
+			casetype = fn.typeOf(cond)
 			var condv Value
 			if casetype == tUntypedNil {
-				condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos)
+				condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), cond.Pos())
 				ti = x
 			} else {
 				yok := emitTypeTest(fn, x, casetype, cc.Case)
@@ -1431,7 +1433,7 @@
 }
 
 func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) {
-	if obj := fn.Pkg.info.Implicits[cc]; obj != nil {
+	if obj := fn.info.Implicits[cc]; obj != nil {
 		// In a switch y := x.(type), each case clause
 		// implicitly declares a distinct object y.
 		// In a single-type case, y has that type.
@@ -1891,10 +1893,10 @@
 func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) {
 	var tk, tv types.Type
 	if s.Key != nil && !isBlankIdent(s.Key) {
-		tk = fn.Pkg.typeOf(s.Key)
+		tk = fn.typeOf(s.Key)
 	}
 	if s.Value != nil && !isBlankIdent(s.Value) {
-		tv = fn.Pkg.typeOf(s.Value)
+		tv = fn.typeOf(s.Value)
 	}
 
 	// If iteration variables are defined (:=), this
@@ -1997,7 +1999,7 @@
 		fn.emit(&Send{
 			Chan: b.expr(fn, s.Chan),
 			X: emitConv(fn, b.expr(fn, s.Value),
-				fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()),
+				fn.typeOf(s.Chan).Underlying().(*types.Chan).Elem()),
 			pos: s.Arrow,
 		})
 
@@ -2224,7 +2226,7 @@
 	if isBlankIdent(id) {
 		return // discard
 	}
-	fn := pkg.values[pkg.info.Defs[id]].(*Function)
+	fn := pkg.objects[pkg.info.Defs[id]].(*Function)
 	if decl.Recv == nil && id.Name == "init" {
 		var v Call
 		v.Call.Value = fn
@@ -2324,7 +2326,7 @@
 			// 1:1 initialization: var x, y = a(), b()
 			var lval lvalue
 			if v := varinit.Lhs[0]; v.Name() != "_" {
-				lval = &address{addr: p.values[v].(*Global), pos: v.Pos()}
+				lval = &address{addr: p.objects[v].(*Global), pos: v.Pos()}
 			} else {
 				lval = blank{}
 			}
@@ -2336,7 +2338,7 @@
 				if v.Name() == "_" {
 					continue
 				}
-				emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos())
+				emitStore(init, p.objects[v].(*Global), emitExtract(init, tuple, i), v.Pos())
 			}
 		}
 	}
@@ -2366,23 +2368,3 @@
 		sanityCheckPackage(p)
 	}
 }
-
-// Like ObjectOf, but panics instead of returning nil.
-// Only valid during p's create and build phases.
-func (p *Package) objectOf(id *ast.Ident) types.Object {
-	if o := p.info.ObjectOf(id); o != nil {
-		return o
-	}
-	panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s",
-		id.Name, p.Prog.Fset.Position(id.Pos())))
-}
-
-// Like TypeOf, but panics instead of returning nil.
-// Only valid during p's create and build phases.
-func (p *Package) typeOf(e ast.Expr) types.Type {
-	if T := p.info.TypeOf(e); T != nil {
-		return T
-	}
-	panic(fmt.Sprintf("no type for %T @ %s",
-		e, p.Prog.Fset.Position(e.Pos())))
-}
diff --git a/go/ssa/create.go b/go/ssa/create.go
index 85163a0..69cd937 100644
--- a/go/ssa/create.go
+++ b/go/ssa/create.go
@@ -34,7 +34,6 @@
 
 	h := typeutil.MakeHasher() // protected by methodsMu, in effect
 	prog.methodSets.SetHasher(h)
-	prog.canon.SetHasher(h)
 
 	return prog
 }
@@ -66,7 +65,7 @@
 			Value:  NewConst(obj.Val(), obj.Type()),
 			pkg:    pkg,
 		}
-		pkg.values[obj] = c.Value
+		pkg.objects[obj] = c
 		pkg.Members[name] = c
 
 	case *types.Var:
@@ -77,7 +76,7 @@
 			typ:    types.NewPointer(obj.Type()), // address
 			pos:    obj.Pos(),
 		}
-		pkg.values[obj] = g
+		pkg.objects[obj] = g
 		pkg.Members[name] = g
 
 	case *types.Func:
@@ -94,12 +93,13 @@
 			pos:       obj.Pos(),
 			Pkg:       pkg,
 			Prog:      pkg.Prog,
+			info:      pkg.info,
 		}
 		if syntax == nil {
 			fn.Synthetic = "loaded from gc object file"
 		}
 
-		pkg.values[obj] = fn
+		pkg.objects[obj] = fn
 		if sig.Recv() == nil {
 			pkg.Members[name] = fn // package-level function
 		}
@@ -166,7 +166,7 @@
 	p := &Package{
 		Prog:    prog,
 		Members: make(map[string]Member),
-		values:  make(map[types.Object]Value),
+		objects: make(map[types.Object]Member),
 		Pkg:     pkg,
 		info:    info,  // transient (CREATE and BUILD phases)
 		files:   files, // transient (CREATE and BUILD phases)
@@ -179,6 +179,7 @@
 		Synthetic: "package initializer",
 		Pkg:       p,
 		Prog:      prog,
+		info:      p.info,
 	}
 	p.Members[p.init.name] = p.init
 
diff --git a/go/ssa/doc.go b/go/ssa/doc.go
index fe0099b..6885bed 100644
--- a/go/ssa/doc.go
+++ b/go/ssa/doc.go
@@ -25,8 +25,8 @@
 //
 // The simplest way to create the SSA representation of a package is
 // to load typed syntax trees using golang.org/x/tools/go/packages, then
-// invoke the ssautil.Packages helper function. See ExampleLoadPackages
-// and ExampleWholeProgram for examples.
+// invoke the ssautil.Packages helper function. See Example_loadPackages
+// and Example_loadWholeProgram for examples.
 // The resulting ssa.Program contains all the packages and their
 // members, but SSA code is not created for function bodies until a
 // subsequent call to (*Package).Build or (*Program).Build.
@@ -59,7 +59,6 @@
 //   *ChangeType           βœ”               βœ”
 //   *Const                βœ”
 //   *Convert              βœ”               βœ”
-//   *SliceToArrayPointer  βœ”               βœ”
 //   *DebugRef                             βœ”
 //   *Defer                                βœ”
 //   *Extract              βœ”               βœ”
@@ -91,6 +90,7 @@
 //   *Select               βœ”               βœ”
 //   *Send                                 βœ”
 //   *Slice                βœ”               βœ”
+//   *SliceToArrayPointer  βœ”               βœ”
 //   *Store                                βœ”
 //   *Type                                                 βœ” (type)
 //   *TypeAssert           βœ”               βœ”
diff --git a/go/ssa/emit.go b/go/ssa/emit.go
index 02d0e4b..576e024 100644
--- a/go/ssa/emit.go
+++ b/go/ssa/emit.go
@@ -50,7 +50,7 @@
 		if isBlankIdent(id) {
 			return
 		}
-		obj = f.Pkg.objectOf(id)
+		obj = f.objectOf(id)
 		switch obj.(type) {
 		case *types.Nil, *types.Const, *types.Builtin:
 			return
@@ -74,9 +74,16 @@
 	case token.SHL, token.SHR:
 		x = emitConv(f, x, t)
 		// y may be signed or an 'untyped' constant.
-		// TODO(adonovan): whence signed values?
-		if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 {
-			y = emitConv(f, y, types.Typ[types.Uint64])
+
+		// There is a runtime panic if y is signed and <0. Instead of inserting a check for y<0
+		// and converting to an unsigned value (like the compiler) leave y as is.
+
+		if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 {
+			// Untyped conversion:
+			// Spec https://go.dev/ref/spec#Operators:
+			// The right operand in a shift expression must have integer type or be an untyped constant
+			// representable by a value of type uint.
+			y = emitConv(f, y, types.Typ[types.Uint])
 		}
 
 	case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
@@ -231,7 +238,7 @@
 	// Conversion from slice to array pointer?
 	if slice, ok := ut_src.(*types.Slice); ok {
 		if ptr, ok := ut_dst.(*types.Pointer); ok {
-			if arr, ok := ptr.Elem().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) {
+			if arr, ok := ptr.Elem().Underlying().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) {
 				c := &SliceToArrayPointer{X: val}
 				c.setType(ut_dst)
 				return f.emit(c)
diff --git a/go/ssa/example_test.go b/go/ssa/example_test.go
index de5ed5e..2ab9e99 100644
--- a/go/ssa/example_test.go
+++ b/go/ssa/example_test.go
@@ -50,6 +50,9 @@
 // golang.org/x/tools/cmd/ssadump.
 //
 func Example_buildPackage() {
+	// Replace interface{} with any for this test.
+	ssa.SetNormalizeAnyForTesting(true)
+	defer ssa.SetNormalizeAnyForTesting(false)
 	// Parse the source files.
 	fset := token.NewFileSet()
 	f, err := parser.ParseFile(fset, "hello.go", hello, parser.ParseComments)
@@ -105,11 +108,11 @@
 	// # Location: hello.go:8:6
 	// func main():
 	// 0:                                                                entry P:0 S:0
-	// 	t0 = new [1]interface{} (varargs)                       *[1]interface{}
-	// 	t1 = &t0[0:int]                                            *interface{}
-	// 	t2 = make interface{} <- string ("Hello, World!":string)    interface{}
+	// 	t0 = new [1]any (varargs)                                       *[1]any
+	// 	t1 = &t0[0:int]                                                    *any
+	// 	t2 = make any <- string ("Hello, World!":string)                    any
 	// 	*t1 = t2
-	// 	t3 = slice t0[:]                                          []interface{}
+	// 	t3 = slice t0[:]                                                  []any
 	// 	t4 = fmt.Println(t3...)                              (n int, err error)
 	// 	return
 }
diff --git a/go/ssa/func.go b/go/ssa/func.go
index 0b99bc9..8fc089e 100644
--- a/go/ssa/func.go
+++ b/go/ssa/func.go
@@ -4,7 +4,7 @@
 
 package ssa
 
-// This file implements the Function and BasicBlock types.
+// This file implements the Function type.
 
 import (
 	"bytes"
@@ -17,113 +17,23 @@
 	"strings"
 )
 
-// addEdge adds a control-flow graph edge from from to to.
-func addEdge(from, to *BasicBlock) {
-	from.Succs = append(from.Succs, to)
-	to.Preds = append(to.Preds, from)
-}
-
-// Parent returns the function that contains block b.
-func (b *BasicBlock) Parent() *Function { return b.parent }
-
-// String returns a human-readable label of this block.
-// It is not guaranteed unique within the function.
-//
-func (b *BasicBlock) String() string {
-	return fmt.Sprintf("%d", b.Index)
-}
-
-// emit appends an instruction to the current basic block.
-// If the instruction defines a Value, it is returned.
-//
-func (b *BasicBlock) emit(i Instruction) Value {
-	i.setBlock(b)
-	b.Instrs = append(b.Instrs, i)
-	v, _ := i.(Value)
-	return v
-}
-
-// predIndex returns the i such that b.Preds[i] == c or panics if
-// there is none.
-func (b *BasicBlock) predIndex(c *BasicBlock) int {
-	for i, pred := range b.Preds {
-		if pred == c {
-			return i
-		}
+// Like ObjectOf, but panics instead of returning nil.
+// Only valid during f's create and build phases.
+func (f *Function) objectOf(id *ast.Ident) types.Object {
+	if o := f.info.ObjectOf(id); o != nil {
+		return o
 	}
-	panic(fmt.Sprintf("no edge %s -> %s", c, b))
+	panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s",
+		id.Name, f.Prog.Fset.Position(id.Pos())))
 }
 
-// hasPhi returns true if b.Instrs contains φ-nodes.
-func (b *BasicBlock) hasPhi() bool {
-	_, ok := b.Instrs[0].(*Phi)
-	return ok
-}
-
-// phis returns the prefix of b.Instrs containing all the block's φ-nodes.
-func (b *BasicBlock) phis() []Instruction {
-	for i, instr := range b.Instrs {
-		if _, ok := instr.(*Phi); !ok {
-			return b.Instrs[:i]
-		}
+// Like TypeOf, but panics instead of returning nil.
+// Only valid during f's create and build phases.
+func (f *Function) typeOf(e ast.Expr) types.Type {
+	if T := f.info.TypeOf(e); T != nil {
+		return T
 	}
-	return nil // unreachable in well-formed blocks
-}
-
-// replacePred replaces all occurrences of p in b's predecessor list with q.
-// Ordinarily there should be at most one.
-//
-func (b *BasicBlock) replacePred(p, q *BasicBlock) {
-	for i, pred := range b.Preds {
-		if pred == p {
-			b.Preds[i] = q
-		}
-	}
-}
-
-// replaceSucc replaces all occurrences of p in b's successor list with q.
-// Ordinarily there should be at most one.
-//
-func (b *BasicBlock) replaceSucc(p, q *BasicBlock) {
-	for i, succ := range b.Succs {
-		if succ == p {
-			b.Succs[i] = q
-		}
-	}
-}
-
-// removePred removes all occurrences of p in b's
-// predecessor list and φ-nodes.
-// Ordinarily there should be at most one.
-//
-func (b *BasicBlock) removePred(p *BasicBlock) {
-	phis := b.phis()
-
-	// We must preserve edge order for φ-nodes.
-	j := 0
-	for i, pred := range b.Preds {
-		if pred != p {
-			b.Preds[j] = b.Preds[i]
-			// Strike out φ-edge too.
-			for _, instr := range phis {
-				phi := instr.(*Phi)
-				phi.Edges[j] = phi.Edges[i]
-			}
-			j++
-		}
-	}
-	// Nil out b.Preds[j:] and φ-edges[j:] to aid GC.
-	for i := j; i < len(b.Preds); i++ {
-		b.Preds[i] = nil
-		for _, instr := range phis {
-			instr.(*Phi).Edges[i] = nil
-		}
-	}
-	b.Preds = b.Preds[:j]
-	for _, instr := range phis {
-		phi := instr.(*Phi)
-		phi.Edges = phi.Edges[:j]
-	}
+	panic(fmt.Sprintf("no type for %T @ %s", e, f.Prog.Fset.Position(e.Pos())))
 }
 
 // Destinations associated with unlabelled for/switch/select stmts.
@@ -214,7 +124,7 @@
 // syntax.  In addition it populates the f.objects mapping.
 //
 // Preconditions:
-// f.startBody() was called.
+// f.startBody() was called. f.info != nil.
 // Postcondition:
 // len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0)
 //
@@ -223,7 +133,7 @@
 	if recv != nil {
 		for _, field := range recv.List {
 			for _, n := range field.Names {
-				f.addSpilledParam(f.Pkg.info.Defs[n])
+				f.addSpilledParam(f.info.Defs[n])
 			}
 			// Anonymous receiver?  No need to spill.
 			if field.Names == nil {
@@ -237,7 +147,7 @@
 		n := len(f.Params) // 1 if has recv, 0 otherwise
 		for _, field := range functype.Params.List {
 			for _, n := range field.Names {
-				f.addSpilledParam(f.Pkg.info.Defs[n])
+				f.addSpilledParam(f.info.Defs[n])
 			}
 			// Anonymous parameter?  No need to spill.
 			if field.Names == nil {
@@ -335,7 +245,9 @@
 		lift(f)
 	}
 
+	// clear remaining stateful variables
 	f.namedResults = nil // (used by lifting)
+	f.info = nil
 
 	numberRegisters(f)
 
@@ -396,7 +308,7 @@
 }
 
 func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc {
-	return f.addNamedLocal(f.Pkg.info.Defs[id])
+	return f.addNamedLocal(f.info.Defs[id])
 }
 
 // addLocal creates an anonymous local variable of type typ, adds it
@@ -502,7 +414,7 @@
 
 	// Package-level function?
 	// Prefix with package name for cross-package references only.
-	if p := f.pkg(); p != nil && p != from {
+	if p := f.relPkg(); p != nil && p != from {
 		return fmt.Sprintf("%s.%s", p.Path(), f.name)
 	}
 
@@ -530,9 +442,26 @@
 	types.WriteSignature(buf, sig, types.RelativeTo(from))
 }
 
-func (f *Function) pkg() *types.Package {
-	if f.Pkg != nil {
-		return f.Pkg.Pkg
+// declaredPackage returns the package fn is declared in or nil if the
+// function is not declared in a package.
+func (fn *Function) declaredPackage() *Package {
+	switch {
+	case fn.Pkg != nil:
+		return fn.Pkg // non-generic function
+		// generics:
+	// case fn.Origin != nil:
+	// 	return fn.Origin.pkg // instance of a named generic function
+	case fn.parent != nil:
+		return fn.parent.declaredPackage() // instance of an anonymous [generic] function
+	default:
+		return nil // function is not declared in a package, e.g. a wrapper.
+	}
+}
+
+// relPkg returns types.Package fn is printed in relationship to.
+func (fn *Function) relPkg() *types.Package {
+	if p := fn.declaredPackage(); p != nil {
+		return p.Pkg
 	}
 	return nil
 }
@@ -567,7 +496,7 @@
 		fmt.Fprintf(buf, "# Recover: %s\n", f.Recover)
 	}
 
-	from := f.pkg()
+	from := f.relPkg()
 
 	if f.FreeVars != nil {
 		buf.WriteString("# Free variables:\n")
diff --git a/go/ssa/interp/external.go b/go/ssa/interp/external.go
index 68ddee3..51b3be0 100644
--- a/go/ssa/interp/external.go
+++ b/go/ssa/interp/external.go
@@ -12,6 +12,8 @@
 	"math"
 	"os"
 	"runtime"
+	"sort"
+	"strconv"
 	"strings"
 	"time"
 	"unicode/utf8"
@@ -79,6 +81,7 @@
 		"math.Log":                        extΫ°mathΫ°Log,
 		"math.Min":                        extΫ°mathΫ°Min,
 		"math.NaN":                        extΫ°mathΫ°NaN,
+		"math.Sqrt":                       extΫ°mathΫ°Sqrt,
 		"os.Exit":                         extΫ°osΫ°Exit,
 		"os.Getenv":                       extΫ°osΫ°Getenv,
 		"reflect.New":                     extΫ°reflectΫ°New,
@@ -93,10 +96,18 @@
 		"runtime.Goexit":                  extΫ°runtimeΫ°Goexit,
 		"runtime.Gosched":                 extΫ°runtimeΫ°Gosched,
 		"runtime.NumCPU":                  extΫ°runtimeΫ°NumCPU,
+		"sort.Float64s":                   extΫ°sortΫ°Float64s,
+		"sort.Ints":                       extΫ°sortΫ°Ints,
+		"sort.Strings":                    extΫ°sortΫ°Strings,
+		"strconv.Atoi":                    extΫ°strconvΫ°Atoi,
+		"strconv.Itoa":                    extΫ°strconvΫ°Itoa,
+		"strconv.FormatFloat":             extΫ°strconvΫ°FormatFloat,
 		"strings.Count":                   extΫ°stringsΫ°Count,
+		"strings.EqualFold":               extΫ°stringsΫ°EqualFold,
 		"strings.Index":                   extΫ°stringsΫ°Index,
 		"strings.IndexByte":               extΫ°stringsΫ°IndexByte,
 		"strings.Replace":                 extΫ°stringsΫ°Replace,
+		"strings.ToLower":                 extΫ°stringsΫ°ToLower,
 		"time.Sleep":                      extΫ°timeΫ°Sleep,
 		"unicode/utf8.DecodeRuneInString": extΫ°unicodeΫ°utf8Ϋ°DecodeRuneInString,
 	} {
@@ -179,15 +190,58 @@
 	return math.Log(args[0].(float64))
 }
 
+func extΫ°mathΫ°Sqrt(fr *frame, args []value) value {
+	return math.Sqrt(args[0].(float64))
+}
+
 func extΫ°runtimeΫ°Breakpoint(fr *frame, args []value) value {
 	runtime.Breakpoint()
 	return nil
 }
 
+func extΫ°sortΫ°Ints(fr *frame, args []value) value {
+	x := args[0].([]value)
+	sort.Slice(x, func(i, j int) bool {
+		return x[i].(int) < x[j].(int)
+	})
+	return nil
+}
+func extΫ°sortΫ°Strings(fr *frame, args []value) value {
+	x := args[0].([]value)
+	sort.Slice(x, func(i, j int) bool {
+		return x[i].(string) < x[j].(string)
+	})
+	return nil
+}
+func extΫ°sortΫ°Float64s(fr *frame, args []value) value {
+	x := args[0].([]value)
+	sort.Slice(x, func(i, j int) bool {
+		return x[i].(float64) < x[j].(float64)
+	})
+	return nil
+}
+
+func extΫ°strconvΫ°Atoi(fr *frame, args []value) value {
+	i, e := strconv.Atoi(args[0].(string))
+	if e != nil {
+		return tuple{i, iface{fr.i.runtimeErrorString, e.Error()}}
+	}
+	return tuple{i, iface{}}
+}
+func extΫ°strconvΫ°Itoa(fr *frame, args []value) value {
+	return strconv.Itoa(args[0].(int))
+}
+func extΫ°strconvΫ°FormatFloat(fr *frame, args []value) value {
+	return strconv.FormatFloat(args[0].(float64), args[1].(byte), args[2].(int), args[3].(int))
+}
+
 func extΫ°stringsΫ°Count(fr *frame, args []value) value {
 	return strings.Count(args[0].(string), args[1].(string))
 }
 
+func extΫ°stringsΫ°EqualFold(fr *frame, args []value) value {
+	return strings.EqualFold(args[0].(string), args[1].(string))
+}
 func extΫ°stringsΫ°IndexByte(fr *frame, args []value) value {
 	return strings.IndexByte(args[0].(string), args[1].(byte))
 }
@@ -205,6 +259,10 @@
 	return strings.Replace(s, old, new, n)
 }
 
+func extΫ°stringsΫ°ToLower(fr *frame, args []value) value {
+	return strings.ToLower(args[0].(string))
+}
+
 func extΫ°runtimeΫ°GOMAXPROCS(fr *frame, args []value) value {
 	// Ignore args[0]; don't let the interpreted program
 	// set the interpreter's GOMAXPROCS!
diff --git a/go/ssa/interp/interp_test.go b/go/ssa/interp/interp_test.go
index 28ebf5f..1b43742 100644
--- a/go/ssa/interp/interp_test.go
+++ b/go/ssa/interp/interp_test.go
@@ -109,6 +109,7 @@
 var testdataTests = []string{
 	"boundmeth.go",
 	"complit.go",
+	"convert.go",
 	"coverage.go",
 	"defer.go",
 	"fieldprom.go",
diff --git a/go/ssa/interp/ops.go b/go/ssa/interp/ops.go
index 6af7847..3bc6a4e 100644
--- a/go/ssa/interp/ops.go
+++ b/go/ssa/interp/ops.go
@@ -137,6 +137,26 @@
 	panic(fmt.Sprintf("cannot convert %T to uint64", x))
 }
 
+// asUnsigned returns the value of x, which must be an integer type, as its equivalent unsigned type,
+// and returns true if x is non-negative.
+func asUnsigned(x value) (value, bool) {
+	switch x := x.(type) {
+	case int:
+		return uint(x), x >= 0
+	case int8:
+		return uint8(x), x >= 0
+	case int16:
+		return uint16(x), x >= 0
+	case int32:
+		return uint32(x), x >= 0
+	case int64:
+		return uint64(x), x >= 0
+	case uint, uint8, uint32, uint64, uintptr:
+		return x, true
+	}
+	panic(fmt.Sprintf("cannot convert %T to unsigned", x))
+}
+
 // zero returns a new "zero" value of the specified type.
 func zero(t types.Type) value {
 	switch t := t.(type) {
@@ -576,7 +596,11 @@
 		}
 
 	case token.SHL:
-		y := asUint64(y)
+		u, ok := asUnsigned(y)
+		if !ok {
+			panic("negative shift amount")
+		}
+		y := asUint64(u)
 		switch x.(type) {
 		case int:
 			return x.(int) << y
@@ -603,7 +627,11 @@
 		}
 
 	case token.SHR:
-		y := asUint64(y)
+		u, ok := asUnsigned(y)
+		if !ok {
+			panic("negative shift amount")
+		}
+		y := asUint64(u)
 		switch x.(type) {
 		case int:
 			return x.(int) >> y
diff --git a/go/ssa/interp/testdata/convert.go b/go/ssa/interp/testdata/convert.go
new file mode 100644
index 0000000..0dcf13b
--- /dev/null
+++ b/go/ssa/interp/testdata/convert.go
@@ -0,0 +1,38 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Test conversion operations.
+
+package main
+
+func left(x int)  { _ = 1 << x }
+func right(x int) { _ = 1 >> x }
+
+func main() {
+	wantPanic(
+		func() {
+			left(-1)
+		},
+		"runtime error: negative shift amount",
+	)
+	wantPanic(
+		func() {
+			right(-1)
+		},
+		"runtime error: negative shift amount",
+	)
+}
+
+func wantPanic(fn func(), s string) {
+	defer func() {
+		err := recover()
+		if err == nil {
+			panic("expected panic")
+		}
+		if got := err.(error).Error(); got != s {
+			panic("expected panic " + s + " got " + got)
+		}
+	}()
+	fn()
+}
diff --git a/go/ssa/interp/testdata/slice2arrayptr.go b/go/ssa/interp/testdata/slice2arrayptr.go
index ad37a18..ff2d9b5 100644
--- a/go/ssa/interp/testdata/slice2arrayptr.go
+++ b/go/ssa/interp/testdata/slice2arrayptr.go
@@ -34,6 +34,13 @@
 	)
 }
 
+type arr [2]int
+
+func f() {
+	s := []int{1, 2, 3, 4}
+	_ = *(*arr)(s)
+}
+
 func wantPanic(fn func(), s string) {
 	defer func() {
 		err := recover()
diff --git a/go/ssa/interp/testdata/src/fmt/fmt.go b/go/ssa/interp/testdata/src/fmt/fmt.go
index 2185eb7..af30402 100644
--- a/go/ssa/interp/testdata/src/fmt/fmt.go
+++ b/go/ssa/interp/testdata/src/fmt/fmt.go
@@ -1,14 +1,28 @@
 package fmt
 
+import (
+	"errors"
+	"strings"
+)
+
 func Sprint(args ...interface{}) string
 
-func Print(args ...interface{}) {
+func Sprintln(args ...interface{}) string {
+	return Sprint(args...) + "\n"
+}
+
+func Print(args ...interface{}) (int, error) {
+	var n int
 	for i, arg := range args {
 		if i > 0 {
 			print(" ")
+			n++
 		}
-		print(Sprint(arg))
+		msg := Sprint(arg)
+		n += len(msg)
+		print(msg)
 	}
+	return n, nil
 }
 
 func Println(args ...interface{}) {
@@ -17,10 +31,30 @@
 }
 
 // formatting is too complex to fake
+// handle the bare minimum needed for tests
 
-func Printf(args ...interface{}) string {
-	panic("Printf is not supported")
+func Printf(format string, args ...interface{}) (int, error) {
+	msg := Sprintf(format, args...)
+	print(msg)
+	return len(msg), nil
 }
+
 func Sprintf(format string, args ...interface{}) string {
-	panic("Sprintf is not supported")
+	// handle extremely simple cases that appear in tests.
+	if len(format) == 0 {
+		return ""
+	}
+	switch {
+	case strings.HasPrefix("%v", format) || strings.HasPrefix("%s", format):
+		return Sprint(args[0]) + Sprintf(format[2:], args[1:]...)
+	case !strings.HasPrefix("%", format):
+		return format[:1] + Sprintf(format[1:], args...)
+	default:
+		panic("unsupported format string for testing Sprintf")
+	}
+}
+
+func Errorf(format string, args ...interface{}) error {
+	msg := Sprintf(format, args...)
+	return errors.New(msg)
 }
diff --git a/go/ssa/interp/testdata/src/io/io.go b/go/ssa/interp/testdata/src/io/io.go
new file mode 100644
index 0000000..8cde430
--- /dev/null
+++ b/go/ssa/interp/testdata/src/io/io.go
@@ -0,0 +1,5 @@
+package io
+
+import "errors"
+
+var EOF = errors.New("EOF")
diff --git a/go/ssa/interp/testdata/src/log/log.go b/go/ssa/interp/testdata/src/log/log.go
new file mode 100644
index 0000000..8897c1d
--- /dev/null
+++ b/go/ssa/interp/testdata/src/log/log.go
@@ -0,0 +1,15 @@
+package log
+
+import (
+	"fmt"
+	"os"
+)
+
+func Println(v ...interface{}) {
+	fmt.Println(v...)
+}
+
+func Fatalln(v ...interface{}) {
+	Println(v...)
+	os.Exit(1)
+}
diff --git a/go/ssa/interp/testdata/src/math/math.go b/go/ssa/interp/testdata/src/math/math.go
index f51e5f5..64fe60c 100644
--- a/go/ssa/interp/testdata/src/math/math.go
+++ b/go/ssa/interp/testdata/src/math/math.go
@@ -11,3 +11,5 @@
 func Signbit(x float64) bool {
 	return Float64bits(x)&(1<<63) != 0
 }
+
+func Sqrt(x float64) float64
diff --git a/go/ssa/interp/testdata/src/reflect/reflect.go b/go/ssa/interp/testdata/src/reflect/reflect.go
index f6c4e27..8a23d27 100644
--- a/go/ssa/interp/testdata/src/reflect/reflect.go
+++ b/go/ssa/interp/testdata/src/reflect/reflect.go
@@ -2,6 +2,8 @@
 
 type Type interface {
 	String() string
+	Kind() Kind
+	Elem() Type
 }
 
 type Value struct {
@@ -9,8 +11,47 @@
 
 func (Value) String() string
 
+func (Value) Elem() string
+func (Value) Kind() Kind
+func (Value) Int() int64
+
 func SliceOf(Type) Type
 
 func TypeOf(interface{}) Type
 
 func ValueOf(interface{}) Value
+
+type Kind uint
+
+// Constants need to be kept in sync with the actual definitions for comparisons in tests.
+const (
+	Invalid Kind = iota
+	Bool
+	Int
+	Int8
+	Int16
+	Int32
+	Int64
+	Uint
+	Uint8
+	Uint16
+	Uint32
+	Uint64
+	Uintptr
+	Float32
+	Float64
+	Complex64
+	Complex128
+	Array
+	Chan
+	Func
+	Interface
+	Map
+	Pointer
+	Slice
+	String
+	Struct
+	UnsafePointer
+)
+
+const Ptr = Pointer
diff --git a/go/ssa/interp/testdata/src/sort/sort.go b/go/ssa/interp/testdata/src/sort/sort.go
new file mode 100644
index 0000000..d94d6da
--- /dev/null
+++ b/go/ssa/interp/testdata/src/sort/sort.go
@@ -0,0 +1,5 @@
+package sort
+
+func Strings(x []string)
+func Ints(x []int)
+func Float64s(x []float64)
diff --git a/go/ssa/interp/testdata/src/strconv/strconv.go b/go/ssa/interp/testdata/src/strconv/strconv.go
new file mode 100644
index 0000000..3f6f877
--- /dev/null
+++ b/go/ssa/interp/testdata/src/strconv/strconv.go
@@ -0,0 +1,6 @@
+package strconv
+
+func Itoa(i int) string
+func Atoi(s string) (int, error)
+
+func FormatFloat(float64, byte, int, int) string
diff --git a/go/ssa/interp/testdata/src/strings/strings.go b/go/ssa/interp/testdata/src/strings/strings.go
index dd86dcf..4c74f1b 100644
--- a/go/ssa/interp/testdata/src/strings/strings.go
+++ b/go/ssa/interp/testdata/src/strings/strings.go
@@ -7,3 +7,20 @@
 func Contains(haystack, needle string) bool {
 	return Index(haystack, needle) >= 0
 }
+
+func HasPrefix(s, prefix string) bool {
+	return len(s) >= len(prefix) && s[0:len(prefix)] == prefix
+}
+
+func EqualFold(s, t string) bool
+func ToLower(s string) string
+
+type Builder struct {
+	s string
+}
+
+func (b *Builder) WriteString(s string) (int, error) {
+	b.s += s
+	return len(s), nil
+}
+func (b *Builder) String() string { return b.s }
diff --git a/go/ssa/interp/testdata/src/sync/sync.go b/go/ssa/interp/testdata/src/sync/sync.go
new file mode 100644
index 0000000..457a670
--- /dev/null
+++ b/go/ssa/interp/testdata/src/sync/sync.go
@@ -0,0 +1,36 @@
+package sync
+
+// Rudimentary implementation of a mutex for interp tests.
+type Mutex struct {
+	c chan int // Mutex is held when held c!=nil and is empty. Access is guarded by g.
+}
+
+func (m *Mutex) Lock() {
+	c := ch(m)
+	<-c
+}
+
+func (m *Mutex) Unlock() {
+	c := ch(m)
+	c <- 1
+}
+
+// sequentializes Mutex.c access.
+var g = make(chan int, 1)
+
+func init() {
+	g <- 1
+}
+
+// ch initializes the m.c field if needed and returns it.
+func ch(m *Mutex) chan int {
+	<-g
+	defer func() {
+		g <- 1
+	}()
+	if m.c == nil {
+		m.c = make(chan int, 1)
+		m.c <- 1
+	}
+	return m.c
+}
diff --git a/go/ssa/methods.go b/go/ssa/methods.go
index 9cf3839..22e1f3f 100644
--- a/go/ssa/methods.go
+++ b/go/ssa/methods.go
@@ -118,7 +118,7 @@
 // Panic ensues if there is none.
 //
 func (prog *Program) declaredFunc(obj *types.Func) *Function {
-	if v := prog.packageLevelValue(obj); v != nil {
+	if v := prog.packageLevelMember(obj); v != nil {
 		return v.(*Function)
 	}
 	panic("no concrete method: " + obj.String())
diff --git a/go/ssa/parameterized.go b/go/ssa/parameterized.go
new file mode 100644
index 0000000..956718c
--- /dev/null
+++ b/go/ssa/parameterized.go
@@ -0,0 +1,113 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+import (
+	"go/types"
+
+	"golang.org/x/tools/internal/typeparams"
+)
+
+// tpWalker walks over types looking for parameterized types.
+//
+// NOTE: Adapted from go/types/infer.go. If that is exported in a future release remove this copy.
+type tpWalker struct {
+	seen map[types.Type]bool
+}
+
+// isParameterized returns true when typ contains any type parameters.
+func (w *tpWalker) isParameterized(typ types.Type) (res bool) {
+	// NOTE: Adapted from go/types/infer.go. Try to keep in sync.
+
+	// detect cycles
+	if x, ok := w.seen[typ]; ok {
+		return x
+	}
+	w.seen[typ] = false
+	defer func() {
+		w.seen[typ] = res
+	}()
+
+	switch t := typ.(type) {
+	case nil, *types.Basic: // TODO(gri) should nil be handled here?
+		break
+
+	case *types.Array:
+		return w.isParameterized(t.Elem())
+
+	case *types.Slice:
+		return w.isParameterized(t.Elem())
+
+	case *types.Struct:
+		for i, n := 0, t.NumFields(); i < n; i++ {
+			if w.isParameterized(t.Field(i).Type()) {
+				return true
+			}
+		}
+
+	case *types.Pointer:
+		return w.isParameterized(t.Elem())
+
+	case *types.Tuple:
+		n := t.Len()
+		for i := 0; i < n; i++ {
+			if w.isParameterized(t.At(i).Type()) {
+				return true
+			}
+		}
+
+	case *types.Signature:
+		// t.tparams may not be nil if we are looking at a signature
+		// of a generic function type (or an interface method) that is
+		// part of the type we're testing. We don't care about these type
+		// parameters.
+		// Similarly, the receiver of a method may declare (rather then
+		// use) type parameters, we don't care about those either.
+		// Thus, we only need to look at the input and result parameters.
+		return w.isParameterized(t.Params()) || w.isParameterized(t.Results())
+
+	case *types.Interface:
+		for i, n := 0, t.NumMethods(); i < n; i++ {
+			if w.isParameterized(t.Method(i).Type()) {
+				return true
+			}
+		}
+		terms, err := typeparams.InterfaceTermSet(t)
+		if err != nil {
+			panic(err)
+		}
+		for _, term := range terms {
+			if w.isParameterized(term.Type()) {
+				return true
+			}
+		}
+
+	case *types.Map:
+		return w.isParameterized(t.Key()) || w.isParameterized(t.Elem())
+
+	case *types.Chan:
+		return w.isParameterized(t.Elem())
+
+	case *types.Named:
+		args := typeparams.NamedTypeArgs(t)
+		// TODO(taking): this does not match go/types/infer.go. Check with rfindley.
+		if params := typeparams.ForNamed(t); params.Len() > args.Len() {
+			return true
+		}
+		for i, n := 0, args.Len(); i < n; i++ {
+			if w.isParameterized(args.At(i)) {
+				return true
+			}
+		}
+
+	case *typeparams.TypeParam:
+		return true
+
+	default:
+		panic(t) // unreachable
+	}
+
+	return false
+}
diff --git a/go/ssa/parameterized_test.go b/go/ssa/parameterized_test.go
new file mode 100644
index 0000000..64c9125
--- /dev/null
+++ b/go/ssa/parameterized_test.go
@@ -0,0 +1,80 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+import (
+	"go/ast"
+	"go/parser"
+	"go/token"
+	"go/types"
+	"testing"
+
+	"golang.org/x/tools/internal/typeparams"
+)
+
+func TestIsParameterized(t *testing.T) {
+	if !typeparams.Enabled {
+		return
+	}
+
+	const source = `
+package P
+type A int
+func (A) f()
+func (*A) g()
+
+type fer interface { f() }
+
+func Apply[T fer](x T) T {
+	x.f()
+	return x
+}
+
+type V[T any] []T
+func (v *V[T]) Push(x T) { *v = append(*v, x) }
+`
+
+	fset := token.NewFileSet()
+	f, err := parser.ParseFile(fset, "hello.go", source, 0)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	var conf types.Config
+	pkg, err := conf.Check("P", fset, []*ast.File{f}, nil)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	for _, test := range []struct {
+		expr string // type expression
+		want bool   // expected isParameterized value
+	}{
+		{"A", false},
+		{"*A", false},
+		{"error", false},
+		{"*error", false},
+		{"struct{A}", false},
+		{"*struct{A}", false},
+		{"fer", false},
+		{"Apply", true},
+		{"Apply[A]", false},
+		{"V", true},
+		{"V[A]", false},
+		{"*V[A]", false},
+		{"(*V[A]).Push", false},
+	} {
+		tv, err := types.Eval(fset, pkg, 0, test.expr)
+		if err != nil {
+			t.Errorf("Eval(%s) failed: %v", test.expr, err)
+		}
+
+		param := tpWalker{seen: make(map[types.Type]bool)}
+		if got := param.isParameterized(tv.Type); got != test.want {
+			t.Logf("Eval(%s) returned the type %s", test.expr, tv.Type)
+			t.Errorf("isParameterized(%s) = %v, want %v", test.expr, got, test.want)
+		}
+	}
+}
diff --git a/go/ssa/print.go b/go/ssa/print.go
index c1b6d22..d0f3bbf 100644
--- a/go/ssa/print.go
+++ b/go/ssa/print.go
@@ -14,6 +14,7 @@
 	"io"
 	"reflect"
 	"sort"
+	"strings"
 
 	"golang.org/x/tools/go/types/typeutil"
 )
@@ -27,7 +28,7 @@
 func relName(v Value, i Instruction) string {
 	var from *types.Package
 	if i != nil {
-		from = i.Parent().pkg()
+		from = i.Parent().relPkg()
 	}
 	switch v := v.(type) {
 	case Member: // *Function or *Global
@@ -38,8 +39,16 @@
 	return v.Name()
 }
 
+// normalizeAnyFortesting controls whether we replace occurrences of
+// interface{} with any. It is only used for normalizing test output.
+var normalizeAnyForTesting bool
+
 func relType(t types.Type, from *types.Package) string {
-	return types.TypeString(t, types.RelativeTo(from))
+	s := types.TypeString(t, types.RelativeTo(from))
+	if normalizeAnyForTesting {
+		s = strings.ReplaceAll(s, "interface{}", "any")
+	}
+	return s
 }
 
 func relString(m Member, from *types.Package) string {
@@ -57,12 +66,12 @@
 // It never appears in disassembly, which uses Value.Name().
 
 func (v *Parameter) String() string {
-	from := v.Parent().pkg()
+	from := v.Parent().relPkg()
 	return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from))
 }
 
 func (v *FreeVar) String() string {
-	from := v.Parent().pkg()
+	from := v.Parent().relPkg()
 	return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from))
 }
 
@@ -77,7 +86,7 @@
 	if v.Heap {
 		op = "new"
 	}
-	from := v.Parent().pkg()
+	from := v.Parent().relPkg()
 	return fmt.Sprintf("%s %s (%s)", op, relType(deref(v.Type()), from), v.Comment)
 }
 
@@ -151,7 +160,7 @@
 }
 
 func printConv(prefix string, v, x Value) string {
-	from := v.Parent().pkg()
+	from := v.Parent().relPkg()
 	return fmt.Sprintf("%s %s <- %s (%s)",
 		prefix,
 		relType(v.Type(), from),
@@ -182,7 +191,7 @@
 }
 
 func (v *MakeSlice) String() string {
-	from := v.Parent().pkg()
+	from := v.Parent().relPkg()
 	return fmt.Sprintf("make %s %s %s",
 		relType(v.Type(), from),
 		relName(v.Len, v),
@@ -214,12 +223,12 @@
 	if v.Reserve != nil {
 		res = relName(v.Reserve, v)
 	}
-	from := v.Parent().pkg()
+	from := v.Parent().relPkg()
 	return fmt.Sprintf("make %s %s", relType(v.Type(), from), res)
 }
 
 func (v *MakeChan) String() string {
-	from := v.Parent().pkg()
+	from := v.Parent().relPkg()
 	return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v))
 }
 
@@ -264,7 +273,7 @@
 }
 
 func (v *TypeAssert) String() string {
-	from := v.Parent().pkg()
+	from := v.Parent().relPkg()
 	return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from))
 }
 
diff --git a/go/ssa/sanity.go b/go/ssa/sanity.go
index 1d4e20f..6e65d76 100644
--- a/go/ssa/sanity.go
+++ b/go/ssa/sanity.go
@@ -409,8 +409,8 @@
 		s.errorf("nil Prog")
 	}
 
-	_ = fn.String()            // must not crash
-	_ = fn.RelString(fn.pkg()) // must not crash
+	_ = fn.String()               // must not crash
+	_ = fn.RelString(fn.relPkg()) // must not crash
 
 	// All functions have a package, except delegates (which are
 	// shared across packages, or duplicated as weak symbols in a
diff --git a/go/ssa/source.go b/go/ssa/source.go
index 8d9cca1..7e2a369 100644
--- a/go/ssa/source.go
+++ b/go/ssa/source.go
@@ -123,7 +123,7 @@
 				// Don't call Program.Method: avoid creating wrappers.
 				obj := mset.At(i).Obj().(*types.Func)
 				if obj.Pos() == pos {
-					return pkg.values[obj].(*Function)
+					return pkg.objects[obj].(*Function)
 				}
 			}
 		}
@@ -180,14 +180,14 @@
 	return prog.packages[obj]
 }
 
-// packageLevelValue returns the package-level value corresponding to
+// packageLevelMember returns the package-level member corresponding to
 // the specified named object, which may be a package-level const
-// (*Const), var (*Global) or func (*Function) of some package in
+// (*NamedConst), var (*Global) or func (*Function) of some package in
 // prog.  It returns nil if the object is not found.
 //
-func (prog *Program) packageLevelValue(obj types.Object) Value {
+func (prog *Program) packageLevelMember(obj types.Object) Member {
 	if pkg, ok := prog.packages[obj.Pkg()]; ok {
-		return pkg.values[obj]
+		return pkg.objects[obj]
 	}
 	return nil
 }
@@ -199,7 +199,7 @@
 // result's Signature, both in the params/results and in the receiver.
 //
 func (prog *Program) FuncValue(obj *types.Func) *Function {
-	fn, _ := prog.packageLevelValue(obj).(*Function)
+	fn, _ := prog.packageLevelMember(obj).(*Function)
 	return fn
 }
 
@@ -215,8 +215,8 @@
 		return NewConst(obj.Val(), obj.Type())
 	}
 	// Package-level named constant?
-	if v := prog.packageLevelValue(obj); v != nil {
-		return v.(*Const)
+	if v := prog.packageLevelMember(obj); v != nil {
+		return v.(*NamedConst).Value
 	}
 	return NewConst(obj.Val(), obj.Type())
 }
@@ -285,7 +285,7 @@
 	}
 
 	// Defining ident of package-level var?
-	if v := prog.packageLevelValue(obj); v != nil {
+	if v := prog.packageLevelMember(obj); v != nil {
 		return v.(*Global), true
 	}
 
diff --git a/go/ssa/ssa.go b/go/ssa/ssa.go
index 8358681..ea5b68e 100644
--- a/go/ssa/ssa.go
+++ b/go/ssa/ssa.go
@@ -26,10 +26,11 @@
 	mode       BuilderMode                 // set of mode bits for SSA construction
 	MethodSets typeutil.MethodSetCache     // cache of type-checker's method-sets
 
+	canon canonizer // type canonicalization map
+
 	methodsMu    sync.Mutex                 // guards the following maps:
 	methodSets   typeutil.Map               // maps type to its concrete methodSet
 	runtimeTypes typeutil.Map               // types for which rtypes are needed
-	canon        typeutil.Map               // type canonicalization map
 	bounds       map[*types.Func]*Function  // bounds for curried x.Method closures
 	thunks       map[selectionKey]*Function // thunks for T.Method expressions
 }
@@ -44,12 +45,12 @@
 // and unspecified other things too.
 //
 type Package struct {
-	Prog    *Program               // the owning program
-	Pkg     *types.Package         // the corresponding go/types.Package
-	Members map[string]Member      // all package members keyed by name (incl. init and init#%d)
-	values  map[types.Object]Value // package members (incl. types and methods), keyed by object
-	init    *Function              // Func("init"); the package's init function
-	debug   bool                   // include full debug info in this package
+	Prog    *Program                // the owning program
+	Pkg     *types.Package          // the corresponding go/types.Package
+	Members map[string]Member       // all package members keyed by name (incl. init and init#%d)
+	objects map[types.Object]Member // mapping of package objects to members (incl. methods). Contains *NamedConst, *Global, *Function.
+	init    *Function               // Func("init"); the package's init function
+	debug   bool                    // include full debug info in this package
 
 	// The following fields are set transiently, then cleared
 	// after building.
@@ -320,6 +321,7 @@
 	namedResults []*Alloc                // tuple of named results
 	targets      *targets                // linked stack of branch targets
 	lblocks      map[*ast.Object]*lblock // labelled blocks
+	info         *types.Info             // *types.Info to build from. nil for wrappers.
 }
 
 // BasicBlock represents an SSA basic block.
diff --git a/go/ssa/ssautil/load.go b/go/ssa/ssautil/load.go
index eab12dc..88d7c8f 100644
--- a/go/ssa/ssautil/load.go
+++ b/go/ssa/ssautil/load.go
@@ -14,6 +14,7 @@
 	"golang.org/x/tools/go/loader"
 	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/go/ssa"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // Packages creates an SSA program for a set of packages.
@@ -102,7 +103,7 @@
 // The mode parameter controls diagnostics and checking during SSA construction.
 //
 // Deprecated: Use golang.org/x/tools/go/packages and the Packages
-// function instead; see ssa.ExampleLoadPackages.
+// function instead; see ssa.Example_loadPackages.
 //
 func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program {
 	prog := ssa.NewProgram(lprog.Fset, mode)
@@ -147,6 +148,7 @@
 		Scopes:     make(map[ast.Node]*types.Scope),
 		Selections: make(map[*ast.SelectorExpr]*types.Selection),
 	}
+	typeparams.InitInstanceInfo(info)
 	if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil {
 		return nil, nil, err
 	}
diff --git a/go/ssa/stdlib_test.go b/go/ssa/stdlib_test.go
index 1c358b0..aaa1580 100644
--- a/go/ssa/stdlib_test.go
+++ b/go/ssa/stdlib_test.go
@@ -16,17 +16,17 @@
 
 import (
 	"go/ast"
-	"go/build"
 	"go/token"
 	"runtime"
 	"testing"
 	"time"
 
-	"golang.org/x/tools/go/buildutil"
-	"golang.org/x/tools/go/loader"
+	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/go/ssa"
 	"golang.org/x/tools/go/ssa/ssautil"
 	"golang.org/x/tools/internal/testenv"
+	"golang.org/x/tools/internal/typeparams/genericfeatures"
 )
 
 func bytesAllocated() uint64 {
@@ -46,18 +46,27 @@
 	t0 := time.Now()
 	alloc0 := bytesAllocated()
 
-	// Load, parse and type-check the program.
-	ctxt := build.Default // copy
-	ctxt.GOPATH = ""      // disable GOPATH
-	conf := loader.Config{Build: &ctxt}
-	for _, path := range buildutil.AllPackages(conf.Build) {
-		conf.ImportWithTests(path)
-	}
-
-	iprog, err := conf.Load()
+	cfg := &packages.Config{Mode: packages.LoadSyntax}
+	pkgs, err := packages.Load(cfg, "std", "cmd")
 	if err != nil {
-		t.Fatalf("Load failed: %v", err)
+		t.Fatal(err)
 	}
+	var nonGeneric int
+	for i := 0; i < len(pkgs); i++ {
+		pkg := pkgs[i]
+		inspect := inspector.New(pkg.Syntax)
+		features := genericfeatures.ForPackage(inspect, pkg.TypesInfo)
+		// Skip standard library packages that use generics. This won't be
+		// sufficient if any standard library packages start _importing_ packages
+		// that use generics.
+		if features != 0 {
+			t.Logf("skipping package %q which uses generics", pkg.PkgPath)
+			continue
+		}
+		pkgs[nonGeneric] = pkg
+		nonGeneric++
+	}
+	pkgs = pkgs[:nonGeneric]
 
 	t1 := time.Now()
 	alloc1 := bytesAllocated()
@@ -67,7 +76,7 @@
 	// Comment out these lines during benchmarking.  Approx SSA build costs are noted.
 	mode |= ssa.SanityCheckFunctions // + 2% space, + 4% time
 	mode |= ssa.GlobalDebug          // +30% space, +18% time
-	prog := ssautil.CreateProgram(iprog, mode)
+	prog, _ := ssautil.Packages(pkgs, mode)
 
 	t2 := time.Now()
 
@@ -82,8 +91,8 @@
 		t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want)
 	}
 
-	// Keep iprog reachable until after we've measured memory usage.
-	if len(iprog.AllPackages) == 0 {
+	// Keep pkgs reachable until after we've measured memory usage.
+	if len(pkgs) == 0 {
 		panic("unreachable")
 	}
 
diff --git a/go/ssa/subst.go b/go/ssa/subst.go
new file mode 100644
index 0000000..0e9263f
--- /dev/null
+++ b/go/ssa/subst.go
@@ -0,0 +1,432 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+package ssa
+
+import (
+	"fmt"
+	"go/types"
+
+	"golang.org/x/tools/internal/typeparams"
+)
+
+// Type substituter for a fixed set of replacement types.
+//
+// A nil *subster is an valid, empty substitution map. It always acts as
+// the identity function. This allows for treating parameterized and
+// non-parameterized functions identically while compiling to ssa.
+//
+// Not concurrency-safe.
+type subster struct {
+	replacements map[*typeparams.TypeParam]types.Type // values should contain no type params
+	cache        map[types.Type]types.Type            // cache of subst results
+	ctxt         *typeparams.Context
+	debug        bool // perform extra debugging checks
+	// TODO(taking): consider adding Pos
+}
+
+// Returns a subster that replaces tparams[i] with targs[i]. Uses ctxt as a cache.
+// targs should not contain any types in tparams.
+func makeSubster(ctxt *typeparams.Context, tparams []*typeparams.TypeParam, targs []types.Type, debug bool) *subster {
+	assert(len(tparams) == len(targs), "makeSubster argument count must match")
+
+	subst := &subster{
+		replacements: make(map[*typeparams.TypeParam]types.Type, len(tparams)),
+		cache:        make(map[types.Type]types.Type),
+		ctxt:         ctxt,
+		debug:        debug,
+	}
+	for i, tpar := range tparams {
+		subst.replacements[tpar] = targs[i]
+	}
+	if subst.debug {
+		if err := subst.wellFormed(); err != nil {
+			panic(err)
+		}
+	}
+	return subst
+}
+
+// wellFormed returns an error if subst was not properly initialized.
+func (subst *subster) wellFormed() error {
+	if subst == nil || len(subst.replacements) == 0 {
+		return nil
+	}
+	// Check that all of the type params do not appear in the arguments.
+	s := make(map[types.Type]bool, len(subst.replacements))
+	for tparam := range subst.replacements {
+		s[tparam] = true
+	}
+	for _, r := range subst.replacements {
+		if reaches(r, s) {
+			return fmt.Errorf("\n‰r %s s %v replacements %v\n", r, s, subst.replacements)
+		}
+	}
+	return nil
+}
+
+// typ returns the type of t with the type parameter tparams[i] substituted
+// for the type targs[i] where subst was created using tparams and targs.
+func (subst *subster) typ(t types.Type) (res types.Type) {
+	if subst == nil {
+		return t // A nil subst is type preserving.
+	}
+	if r, ok := subst.cache[t]; ok {
+		return r
+	}
+	defer func() {
+		subst.cache[t] = res
+	}()
+
+	// fall through if result r will be identical to t, types.Identical(r, t).
+	switch t := t.(type) {
+	case *typeparams.TypeParam:
+		r := subst.replacements[t]
+		assert(r != nil, "type param without replacement encountered")
+		return r
+
+	case *types.Basic:
+		return t
+
+	case *types.Array:
+		if r := subst.typ(t.Elem()); r != t.Elem() {
+			return types.NewArray(r, t.Len())
+		}
+		return t
+
+	case *types.Slice:
+		if r := subst.typ(t.Elem()); r != t.Elem() {
+			return types.NewSlice(r)
+		}
+		return t
+
+	case *types.Pointer:
+		if r := subst.typ(t.Elem()); r != t.Elem() {
+			return types.NewPointer(r)
+		}
+		return t
+
+	case *types.Tuple:
+		return subst.tuple(t)
+
+	case *types.Struct:
+		return subst.struct_(t)
+
+	case *types.Map:
+		key := subst.typ(t.Key())
+		elem := subst.typ(t.Elem())
+		if key != t.Key() || elem != t.Elem() {
+			return types.NewMap(key, elem)
+		}
+		return t
+
+	case *types.Chan:
+		if elem := subst.typ(t.Elem()); elem != t.Elem() {
+			return types.NewChan(t.Dir(), elem)
+		}
+		return t
+
+	case *types.Signature:
+		return subst.signature(t)
+
+	case *typeparams.Union:
+		return subst.union(t)
+
+	case *types.Interface:
+		return subst.interface_(t)
+
+	case *types.Named:
+		return subst.named(t)
+
+	default:
+		panic("unreachable")
+	}
+}
+
+func (subst *subster) tuple(t *types.Tuple) *types.Tuple {
+	if t != nil {
+		if vars := subst.varlist(t); vars != nil {
+			return types.NewTuple(vars...)
+		}
+	}
+	return t
+}
+
+type varlist interface {
+	At(i int) *types.Var
+	Len() int
+}
+
+// fieldlist is an adapter for structs for the varlist interface.
+type fieldlist struct {
+	str *types.Struct
+}
+
+func (fl fieldlist) At(i int) *types.Var { return fl.str.Field(i) }
+func (fl fieldlist) Len() int            { return fl.str.NumFields() }
+
+func (subst *subster) struct_(t *types.Struct) *types.Struct {
+	if t != nil {
+		if fields := subst.varlist(fieldlist{t}); fields != nil {
+			tags := make([]string, t.NumFields())
+			for i, n := 0, t.NumFields(); i < n; i++ {
+				tags[i] = t.Tag(i)
+			}
+			return types.NewStruct(fields, tags)
+		}
+	}
+	return t
+}
+
+// varlist reutrns subst(in[i]) or return nils if subst(v[i]) == v[i] for all i.
+func (subst *subster) varlist(in varlist) []*types.Var {
+	var out []*types.Var // nil => no updates
+	for i, n := 0, in.Len(); i < n; i++ {
+		v := in.At(i)
+		w := subst.var_(v)
+		if v != w && out == nil {
+			out = make([]*types.Var, n)
+			for j := 0; j < i; j++ {
+				out[j] = in.At(j)
+			}
+		}
+		if out != nil {
+			out[i] = w
+		}
+	}
+	return out
+}
+
+func (subst *subster) var_(v *types.Var) *types.Var {
+	if v != nil {
+		if typ := subst.typ(v.Type()); typ != v.Type() {
+			if v.IsField() {
+				return types.NewField(v.Pos(), v.Pkg(), v.Name(), typ, v.Embedded())
+			}
+			return types.NewVar(v.Pos(), v.Pkg(), v.Name(), typ)
+		}
+	}
+	return v
+}
+
+func (subst *subster) union(u *typeparams.Union) *typeparams.Union {
+	var out []*typeparams.Term // nil => no updates
+
+	for i, n := 0, u.Len(); i < n; i++ {
+		t := u.Term(i)
+		r := subst.typ(t.Type())
+		if r != t.Type() && out == nil {
+			out = make([]*typeparams.Term, n)
+			for j := 0; j < i; j++ {
+				out[j] = u.Term(j)
+			}
+		}
+		if out != nil {
+			out[i] = typeparams.NewTerm(t.Tilde(), r)
+		}
+	}
+
+	if out != nil {
+		return typeparams.NewUnion(out)
+	}
+	return u
+}
+
+func (subst *subster) interface_(iface *types.Interface) *types.Interface {
+	if iface == nil {
+		return nil
+	}
+
+	// methods for the interface. Initially nil if there is no known change needed.
+	// Signatures for the method where recv is nil. NewInterfaceType fills in the recievers.
+	var methods []*types.Func
+	initMethods := func(n int) { // copy first n explicit methods
+		methods = make([]*types.Func, iface.NumExplicitMethods())
+		for i := 0; i < n; i++ {
+			f := iface.ExplicitMethod(i)
+			norecv := changeRecv(f.Type().(*types.Signature), nil)
+			methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), norecv)
+		}
+	}
+	for i := 0; i < iface.NumExplicitMethods(); i++ {
+		f := iface.ExplicitMethod(i)
+		// On interfaces, we need to cycle break on anonymous interface types
+		// being in a cycle with their signatures being in cycles with their recievers
+		// that do not go through a Named.
+		norecv := changeRecv(f.Type().(*types.Signature), nil)
+		sig := subst.typ(norecv)
+		if sig != norecv && methods == nil {
+			initMethods(i)
+		}
+		if methods != nil {
+			methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), sig.(*types.Signature))
+		}
+	}
+
+	var embeds []types.Type
+	initEmbeds := func(n int) { // copy first n embedded types
+		embeds = make([]types.Type, iface.NumEmbeddeds())
+		for i := 0; i < n; i++ {
+			embeds[i] = iface.EmbeddedType(i)
+		}
+	}
+	for i := 0; i < iface.NumEmbeddeds(); i++ {
+		e := iface.EmbeddedType(i)
+		r := subst.typ(e)
+		if e != r && embeds == nil {
+			initEmbeds(i)
+		}
+		if embeds != nil {
+			embeds[i] = r
+		}
+	}
+
+	if methods == nil && embeds == nil {
+		return iface
+	}
+	if methods == nil {
+		initMethods(iface.NumExplicitMethods())
+	}
+	if embeds == nil {
+		initEmbeds(iface.NumEmbeddeds())
+	}
+	return types.NewInterfaceType(methods, embeds).Complete()
+}
+
+func (subst *subster) named(t *types.Named) types.Type {
+	// A name type may be:
+	// (1) ordinary (no type parameters, no type arguments),
+	// (2) generic (type parameters but no type arguments), or
+	// (3) instantiated (type parameters and type arguments).
+	tparams := typeparams.ForNamed(t)
+	if tparams.Len() == 0 {
+		// case (1) ordinary
+
+		// Note: If Go allows for local type declarations in generic
+		// functions we may need to descend into underlying as well.
+		return t
+	}
+	targs := typeparams.NamedTypeArgs(t)
+
+	// insts are arguments to instantiate using.
+	insts := make([]types.Type, tparams.Len())
+
+	// case (2) generic ==> targs.Len() == 0
+	// Instantiating a generic with no type arguments should be unreachable.
+	// Please report a bug if you encounter this.
+	assert(targs.Len() != 0, "substition into a generic Named type is currently unsupported")
+
+	// case (3) instantiated.
+	// Substitute into the type arguments and instantiate the replacements/
+	// Example:
+	//    type N[A any] func() A
+	//    func Foo[T](g N[T]) {}
+	//  To instantiate Foo[string], one goes through {T->string}. To get the type of g
+	//  one subsitutes T with string in {N with TypeArgs == {T} and TypeParams == {A} }
+	//  to get {N with TypeArgs == {string} and TypeParams == {A} }.
+	assert(targs.Len() == tparams.Len(), "TypeArgs().Len() must match TypeParams().Len() if present")
+	for i, n := 0, targs.Len(); i < n; i++ {
+		inst := subst.typ(targs.At(i)) // TODO(generic): Check with rfindley for mutual recursion
+		insts[i] = inst
+	}
+	r, err := typeparams.Instantiate(subst.ctxt, typeparams.NamedTypeOrigin(t), insts, false)
+	assert(err == nil, "failed to Instantiate Named type")
+	return r
+}
+
+func (subst *subster) signature(t *types.Signature) types.Type {
+	tparams := typeparams.ForSignature(t)
+
+	// We are choosing not to support tparams.Len() > 0 until a need has been observed in practice.
+	//
+	// There are some known usages for types.Types coming from types.{Eval,CheckExpr}.
+	// To support tparams.Len() > 0, we just need to do the following [psuedocode]:
+	//   targs := {subst.replacements[tparams[i]]]}; Instantiate(ctxt, t, targs, false)
+
+	assert(tparams.Len() == 0, "Substituting types.Signatures with generic functions are currently unsupported.")
+
+	// Either:
+	// (1)non-generic function.
+	//    no type params to substitute
+	// (2)generic method and recv needs to be substituted.
+
+	// Recievers can be either:
+	// named
+	// pointer to named
+	// interface
+	// nil
+	// interface is the problematic case. We need to cycle break there!
+	recv := subst.var_(t.Recv())
+	params := subst.tuple(t.Params())
+	results := subst.tuple(t.Results())
+	if recv != t.Recv() || params != t.Params() || results != t.Results() {
+		return types.NewSignature(recv, params, results, t.Variadic())
+	}
+	return t
+}
+
+// reaches returns true if a type t reaches any type t' s.t. c[t'] == true.
+// Updates c to cache results.
+func reaches(t types.Type, c map[types.Type]bool) (res bool) {
+	if c, ok := c[t]; ok {
+		return c
+	}
+	c[t] = false // prevent cycles
+	defer func() {
+		c[t] = res
+	}()
+
+	switch t := t.(type) {
+	case *typeparams.TypeParam, *types.Basic:
+		// no-op => c == false
+	case *types.Array:
+		return reaches(t.Elem(), c)
+	case *types.Slice:
+		return reaches(t.Elem(), c)
+	case *types.Pointer:
+		return reaches(t.Elem(), c)
+	case *types.Tuple:
+		for i := 0; i < t.Len(); i++ {
+			if reaches(t.At(i).Type(), c) {
+				return true
+			}
+		}
+	case *types.Struct:
+		for i := 0; i < t.NumFields(); i++ {
+			if reaches(t.Field(i).Type(), c) {
+				return true
+			}
+		}
+	case *types.Map:
+		return reaches(t.Key(), c) || reaches(t.Elem(), c)
+	case *types.Chan:
+		return reaches(t.Elem(), c)
+	case *types.Signature:
+		if t.Recv() != nil && reaches(t.Recv().Type(), c) {
+			return true
+		}
+		return reaches(t.Params(), c) || reaches(t.Results(), c)
+	case *typeparams.Union:
+		for i := 0; i < t.Len(); i++ {
+			if reaches(t.Term(i).Type(), c) {
+				return true
+			}
+		}
+	case *types.Interface:
+		for i := 0; i < t.NumEmbeddeds(); i++ {
+			if reaches(t.Embedded(i), c) {
+				return true
+			}
+		}
+		for i := 0; i < t.NumExplicitMethods(); i++ {
+			if reaches(t.ExplicitMethod(i).Type(), c) {
+				return true
+			}
+		}
+	case *types.Named:
+		return reaches(t.Underlying(), c)
+	default:
+		panic("unreachable")
+	}
+	return false
+}
diff --git a/go/ssa/subst_test.go b/go/ssa/subst_test.go
new file mode 100644
index 0000000..fe84adc
--- /dev/null
+++ b/go/ssa/subst_test.go
@@ -0,0 +1,113 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+import (
+	"go/ast"
+	"go/parser"
+	"go/token"
+	"go/types"
+	"testing"
+
+	"golang.org/x/tools/internal/typeparams"
+)
+
+func TestSubst(t *testing.T) {
+	if !typeparams.Enabled {
+		return
+	}
+
+	const source = `
+package P
+
+type t0 int
+func (t0) f()
+type t1 interface{ f() }
+type t2 interface{ g() }
+type t3 interface{ ~int }
+
+func Fn0[T t1](x T) T {
+	x.f()
+	return x
+}
+
+type A[T any] [4]T
+type B[T any] []T
+type C[T, S any] []struct{s S; t T}
+type D[T, S any] *struct{s S; t *T}
+type E[T, S any] interface{ F() (T, S) }
+type F[K comparable, V any] map[K]V
+type G[T any] chan *T
+type H[T any] func() T
+type I[T any] struct{x, y, z int; t T}
+type J[T any] interface{ t1 }
+type K[T any] interface{ t1; F() T }
+type L[T any] interface{ F() T; J[T] }
+
+var _ L[int] = Fn0[L[int]](nil)
+`
+
+	fset := token.NewFileSet()
+	f, err := parser.ParseFile(fset, "hello.go", source, 0)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	var conf types.Config
+	pkg, err := conf.Check("P", fset, []*ast.File{f}, nil)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	for _, test := range []struct {
+		expr string   // type expression of Named parameterized type
+		args []string // type expressions of args for named
+		want string   // expected underlying value after substitution
+	}{
+		{"A", []string{"string"}, "[4]string"},
+		{"A", []string{"int"}, "[4]int"},
+		{"B", []string{"int"}, "[]int"},
+		{"B", []string{"int8"}, "[]int8"},
+		{"C", []string{"int8", "string"}, "[]struct{s string; t int8}"},
+		{"C", []string{"string", "int8"}, "[]struct{s int8; t string}"},
+		{"D", []string{"int16", "string"}, "*struct{s string; t *int16}"},
+		{"E", []string{"int32", "string"}, "interface{F() (int32, string)}"},
+		{"F", []string{"int64", "string"}, "map[int64]string"},
+		{"G", []string{"uint64"}, "chan *uint64"},
+		{"H", []string{"uintptr"}, "func() uintptr"},
+		{"I", []string{"t0"}, "struct{x int; y int; z int; t P.t0}"},
+		{"J", []string{"t0"}, "interface{P.t1}"},
+		{"K", []string{"t0"}, "interface{F() P.t0; P.t1}"},
+		{"L", []string{"t0"}, "interface{F() P.t0; P.J[P.t0]}"},
+		{"L", []string{"L[t0]"}, "interface{F() P.L[P.t0]; P.J[P.L[P.t0]]}"},
+	} {
+		// Eval() expr for its type.
+		tv, err := types.Eval(fset, pkg, 0, test.expr)
+		if err != nil {
+			t.Fatalf("Eval(%s) failed: %v", test.expr, err)
+		}
+		// Eval() test.args[i] to get the i'th type arg.
+		var targs []types.Type
+		for _, astr := range test.args {
+			tv, err := types.Eval(fset, pkg, 0, astr)
+			if err != nil {
+				t.Fatalf("Eval(%s) failed: %v", astr, err)
+			}
+			targs = append(targs, tv.Type)
+		}
+
+		T := tv.Type.(*types.Named)
+		var tparams []*typeparams.TypeParam
+		for i, l := 0, typeparams.ForNamed(T); i < l.Len(); i++ {
+			tparams = append(tparams, l.At(i))
+		}
+
+		subst := makeSubster(typeparams.NewContext(), tparams, targs, true)
+		sub := subst.typ(T.Underlying())
+		if got := sub.String(); got != test.want {
+			t.Errorf("subst{%v->%v}.typ(%s) = %v, want %v", test.expr, test.args, T.Underlying(), got, test.want)
+		}
+	}
+}
diff --git a/go/ssa/testhelper_test.go b/go/ssa/testhelper_test.go
new file mode 100644
index 0000000..8d08bbb
--- /dev/null
+++ b/go/ssa/testhelper_test.go
@@ -0,0 +1,10 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// SetNormalizeAnyForTesting is exported here for external tests.
+func SetNormalizeAnyForTesting(normalize bool) {
+	normalizeAnyForTesting = normalize
+}
diff --git a/go/ssa/testmain.go b/go/ssa/testmain.go
deleted file mode 100644
index c4256d1..0000000
--- a/go/ssa/testmain.go
+++ /dev/null
@@ -1,274 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package ssa
-
-// CreateTestMainPackage synthesizes a main package that runs all the
-// tests of the supplied packages.
-// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing.
-//
-// TODO(adonovan): throws this all away now that x/tools/go/packages
-// provides access to the actual synthetic test main files.
-
-import (
-	"bytes"
-	"fmt"
-	"go/ast"
-	"go/parser"
-	"go/types"
-	"log"
-	"os"
-	"strings"
-	"text/template"
-)
-
-// FindTests returns the Test, Benchmark, and Example functions
-// (as defined by "go test") defined in the specified package,
-// and its TestMain function, if any.
-//
-// Deprecated: Use golang.org/x/tools/go/packages to access synthetic
-// testmain packages.
-func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) {
-	prog := pkg.Prog
-
-	// The first two of these may be nil: if the program doesn't import "testing",
-	// it can't contain any tests, but it may yet contain Examples.
-	var testSig *types.Signature                              // func(*testing.T)
-	var benchmarkSig *types.Signature                         // func(*testing.B)
-	var exampleSig = types.NewSignature(nil, nil, nil, false) // func()
-
-	// Obtain the types from the parameters of testing.MainStart.
-	if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil {
-		mainStart := testingPkg.Func("MainStart")
-		params := mainStart.Signature.Params()
-		testSig = funcField(params.At(1).Type())
-		benchmarkSig = funcField(params.At(2).Type())
-
-		// Does the package define this function?
-		//   func TestMain(*testing.M)
-		if f := pkg.Func("TestMain"); f != nil {
-			sig := f.Type().(*types.Signature)
-			starM := mainStart.Signature.Results().At(0).Type() // *testing.M
-			if sig.Results().Len() == 0 &&
-				sig.Params().Len() == 1 &&
-				types.Identical(sig.Params().At(0).Type(), starM) {
-				main = f
-			}
-		}
-	}
-
-	// TODO(adonovan): use a stable order, e.g. lexical.
-	for _, mem := range pkg.Members {
-		if f, ok := mem.(*Function); ok &&
-			ast.IsExported(f.Name()) &&
-			strings.HasSuffix(prog.Fset.Position(f.Pos()).Filename, "_test.go") {
-
-			switch {
-			case testSig != nil && isTestSig(f, "Test", testSig):
-				tests = append(tests, f)
-			case benchmarkSig != nil && isTestSig(f, "Benchmark", benchmarkSig):
-				benchmarks = append(benchmarks, f)
-			case isTestSig(f, "Example", exampleSig):
-				examples = append(examples, f)
-			default:
-				continue
-			}
-		}
-	}
-	return
-}
-
-// Like isTest, but checks the signature too.
-func isTestSig(f *Function, prefix string, sig *types.Signature) bool {
-	return isTest(f.Name(), prefix) && types.Identical(f.Signature, sig)
-}
-
-// Given the type of one of the three slice parameters of testing.Main,
-// returns the function type.
-func funcField(slice types.Type) *types.Signature {
-	return slice.(*types.Slice).Elem().Underlying().(*types.Struct).Field(1).Type().(*types.Signature)
-}
-
-// isTest tells whether name looks like a test (or benchmark, according to prefix).
-// It is a Test (say) if there is a character after Test that is not a lower-case letter.
-// We don't want TesticularCancer.
-// Plundered from $GOROOT/src/cmd/go/test.go
-func isTest(name, prefix string) bool {
-	if !strings.HasPrefix(name, prefix) {
-		return false
-	}
-	if len(name) == len(prefix) { // "Test" is ok
-		return true
-	}
-	return ast.IsExported(name[len(prefix):])
-}
-
-// CreateTestMainPackage creates and returns a synthetic "testmain"
-// package for the specified package if it defines tests, benchmarks or
-// executable examples, or nil otherwise.  The new package is named
-// "main" and provides a function named "main" that runs the tests,
-// similar to the one that would be created by the 'go test' tool.
-//
-// Subsequent calls to prog.AllPackages include the new package.
-// The package pkg must belong to the program prog.
-//
-// Deprecated: Use golang.org/x/tools/go/packages to access synthetic
-// testmain packages.
-func (prog *Program) CreateTestMainPackage(pkg *Package) *Package {
-	if pkg.Prog != prog {
-		log.Fatal("Package does not belong to Program")
-	}
-
-	// Template data
-	var data struct {
-		Pkg                         *Package
-		Tests, Benchmarks, Examples []*Function
-		Main                        *Function
-		Go18                        bool
-	}
-	data.Pkg = pkg
-
-	// Enumerate tests.
-	data.Tests, data.Benchmarks, data.Examples, data.Main = FindTests(pkg)
-	if data.Main == nil &&
-		data.Tests == nil && data.Benchmarks == nil && data.Examples == nil {
-		return nil
-	}
-
-	// Synthesize source for testmain package.
-	path := pkg.Pkg.Path() + "$testmain"
-	tmpl := testmainTmpl
-	if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil {
-		// In Go 1.8, testing.MainStart's first argument is an interface, not a func.
-		data.Go18 = types.IsInterface(testingPkg.Func("MainStart").Signature.Params().At(0).Type())
-	} else {
-		// The program does not import "testing", but FindTests
-		// returned non-nil, which must mean there were Examples
-		// but no Test, Benchmark, or TestMain functions.
-
-		// We'll simply call them from testmain.main; this will
-		// ensure they don't panic, but will not check any
-		// "Output:" comments.
-		// (We should not execute an Example that has no
-		// "Output:" comment, but it's impossible to tell here.)
-		tmpl = examplesOnlyTmpl
-	}
-	var buf bytes.Buffer
-	if err := tmpl.Execute(&buf, data); err != nil {
-		log.Fatalf("internal error expanding template for %s: %v", path, err)
-	}
-	if false { // debugging
-		fmt.Fprintln(os.Stderr, buf.String())
-	}
-
-	// Parse and type-check the testmain package.
-	f, err := parser.ParseFile(prog.Fset, path+".go", &buf, parser.Mode(0))
-	if err != nil {
-		log.Fatalf("internal error parsing %s: %v", path, err)
-	}
-	conf := types.Config{
-		DisableUnusedImportCheck: true,
-		Importer:                 importer{pkg},
-	}
-	files := []*ast.File{f}
-	info := &types.Info{
-		Types:      make(map[ast.Expr]types.TypeAndValue),
-		Defs:       make(map[*ast.Ident]types.Object),
-		Uses:       make(map[*ast.Ident]types.Object),
-		Implicits:  make(map[ast.Node]types.Object),
-		Scopes:     make(map[ast.Node]*types.Scope),
-		Selections: make(map[*ast.SelectorExpr]*types.Selection),
-	}
-	testmainPkg, err := conf.Check(path, prog.Fset, files, info)
-	if err != nil {
-		log.Fatalf("internal error type-checking %s: %v", path, err)
-	}
-
-	// Create and build SSA code.
-	testmain := prog.CreatePackage(testmainPkg, files, info, false)
-	testmain.SetDebugMode(false)
-	testmain.Build()
-	testmain.Func("main").Synthetic = "test main function"
-	testmain.Func("init").Synthetic = "package initializer"
-	return testmain
-}
-
-// An implementation of types.Importer for an already loaded SSA program.
-type importer struct {
-	pkg *Package // package under test; may be non-importable
-}
-
-func (imp importer) Import(path string) (*types.Package, error) {
-	if p := imp.pkg.Prog.ImportedPackage(path); p != nil {
-		return p.Pkg, nil
-	}
-	if path == imp.pkg.Pkg.Path() {
-		return imp.pkg.Pkg, nil
-	}
-	return nil, fmt.Errorf("not found") // can't happen
-}
-
-var testmainTmpl = template.Must(template.New("testmain").Parse(`
-package main
-
-import "io"
-import "os"
-import "testing"
-import p {{printf "%q" .Pkg.Pkg.Path}}
-
-{{if .Go18}}
-type deps struct{}
-
-func (deps) ImportPath() string { return "" }
-func (deps) MatchString(pat, str string) (bool, error) { return true, nil }
-func (deps) SetPanicOnExit0(bool) {}
-func (deps) StartCPUProfile(io.Writer) error { return nil }
-func (deps) StartTestLog(io.Writer) {}
-func (deps) StopCPUProfile() {}
-func (deps) StopTestLog() error { return nil }
-func (deps) WriteHeapProfile(io.Writer) error { return nil }
-func (deps) WriteProfileTo(string, io.Writer, int) error { return nil }
-
-var match deps
-{{else}}
-func match(_, _ string) (bool, error) { return true, nil }
-{{end}}
-
-func main() {
-	tests := []testing.InternalTest{
-{{range .Tests}}
-		{ {{printf "%q" .Name}}, p.{{.Name}} },
-{{end}}
-	}
-	benchmarks := []testing.InternalBenchmark{
-{{range .Benchmarks}}
-		{ {{printf "%q" .Name}}, p.{{.Name}} },
-{{end}}
-	}
-	examples := []testing.InternalExample{
-{{range .Examples}}
-		{Name: {{printf "%q" .Name}}, F: p.{{.Name}}},
-{{end}}
-	}
-	m := testing.MainStart(match, tests, benchmarks, examples)
-{{with .Main}}
-	p.{{.Name}}(m)
-{{else}}
-	os.Exit(m.Run())
-{{end}}
-}
-
-`))
-
-var examplesOnlyTmpl = template.Must(template.New("examples").Parse(`
-package main
-
-import p {{printf "%q" .Pkg.Pkg.Path}}
-
-func main() {
-{{range .Examples}}
-	p.{{.Name}}()
-{{end}}
-}
-`))
diff --git a/go/ssa/testmain_test.go b/go/ssa/testmain_test.go
deleted file mode 100644
index e24b23b..0000000
--- a/go/ssa/testmain_test.go
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package ssa_test
-
-// Tests of FindTests.  CreateTestMainPackage is tested via the interpreter.
-// TODO(adonovan): test the 'pkgs' result from FindTests.
-
-import (
-	"fmt"
-	"sort"
-	"testing"
-
-	"golang.org/x/tools/go/loader"
-	"golang.org/x/tools/go/ssa"
-	"golang.org/x/tools/go/ssa/ssautil"
-)
-
-func create(t *testing.T, content string) *ssa.Package {
-	var conf loader.Config
-	f, err := conf.ParseFile("foo_test.go", content)
-	if err != nil {
-		t.Fatal(err)
-	}
-	conf.CreateFromFiles("foo", f)
-
-	lprog, err := conf.Load()
-	if err != nil {
-		t.Fatal(err)
-	}
-
-	// We needn't call Build.
-	foo := lprog.Package("foo").Pkg
-	return ssautil.CreateProgram(lprog, ssa.SanityCheckFunctions).Package(foo)
-}
-
-func TestFindTests(t *testing.T) {
-	test := `
-package foo
-
-import "testing"
-
-type T int
-
-// Tests:
-func Test(t *testing.T) {}
-func TestA(t *testing.T) {}
-func TestB(t *testing.T) {}
-
-// Not tests:
-func testC(t *testing.T) {}
-func TestD() {}
-func testE(t *testing.T) int { return 0 }
-func (T) Test(t *testing.T) {}
-
-// Benchmarks:
-func Benchmark(*testing.B) {}
-func BenchmarkA(b *testing.B) {}
-func BenchmarkB(*testing.B) {}
-
-// Not benchmarks:
-func benchmarkC(t *testing.T) {}
-func BenchmarkD() {}
-func benchmarkE(t *testing.T) int { return 0 }
-func (T) Benchmark(t *testing.T) {}
-
-// Examples:
-func Example() {}
-func ExampleA() {}
-
-// Not examples:
-func exampleC() {}
-func ExampleD(t *testing.T) {}
-func exampleE() int { return 0 }
-func (T) Example() {}
-`
-	pkg := create(t, test)
-	tests, benchmarks, examples, _ := ssa.FindTests(pkg)
-
-	sort.Sort(funcsByPos(tests))
-	if got, want := fmt.Sprint(tests), "[foo.Test foo.TestA foo.TestB]"; got != want {
-		t.Errorf("FindTests.tests = %s, want %s", got, want)
-	}
-
-	sort.Sort(funcsByPos(benchmarks))
-	if got, want := fmt.Sprint(benchmarks), "[foo.Benchmark foo.BenchmarkA foo.BenchmarkB]"; got != want {
-		t.Errorf("FindTests.benchmarks = %s, want %s", got, want)
-	}
-
-	sort.Sort(funcsByPos(examples))
-	if got, want := fmt.Sprint(examples), "[foo.Example foo.ExampleA]"; got != want {
-		t.Errorf("FindTests examples = %s, want %s", got, want)
-	}
-}
-
-func TestFindTestsTesting(t *testing.T) {
-	test := `
-package foo
-
-// foo does not import "testing", but defines Examples.
-
-func Example() {}
-func ExampleA() {}
-`
-	pkg := create(t, test)
-	tests, benchmarks, examples, _ := ssa.FindTests(pkg)
-	if len(tests) > 0 {
-		t.Errorf("FindTests.tests = %s, want none", tests)
-	}
-	if len(benchmarks) > 0 {
-		t.Errorf("FindTests.benchmarks = %s, want none", benchmarks)
-	}
-	sort.Sort(funcsByPos(examples))
-	if got, want := fmt.Sprint(examples), "[foo.Example foo.ExampleA]"; got != want {
-		t.Errorf("FindTests examples = %s, want %s", got, want)
-	}
-}
-
-type funcsByPos []*ssa.Function
-
-func (p funcsByPos) Len() int           { return len(p) }
-func (p funcsByPos) Less(i, j int) bool { return p[i].Pos() < p[j].Pos() }
-func (p funcsByPos) Swap(i, j int)      { p[i], p[j] = p[j], p[i] }
diff --git a/go/ssa/util.go b/go/ssa/util.go
index a09949a..0102193 100644
--- a/go/ssa/util.go
+++ b/go/ssa/util.go
@@ -13,10 +13,22 @@
 	"go/types"
 	"io"
 	"os"
+	"sync"
 
 	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/go/types/typeutil"
 )
 
+//// Sanity checking utilities
+
+// assert panics with the mesage msg if p is false.
+// Avoid combining with expensive string formatting.
+func assert(p bool, msg string) {
+	if !p {
+		panic(msg)
+	}
+}
+
 //// AST utilities
 
 func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) }
@@ -87,3 +99,36 @@
 		sig:  types.NewSignature(nil, lenParams, lenResults, false),
 	}
 }
+
+// Mapping of a type T to a canonical instance C s.t. types.Indentical(T, C).
+// Thread-safe.
+type canonizer struct {
+	mu    sync.Mutex
+	canon typeutil.Map // map from type to a canonical instance
+}
+
+// Tuple returns a canonical representative of a Tuple of types.
+// Representative of the empty Tuple is nil.
+func (c *canonizer) Tuple(ts []types.Type) *types.Tuple {
+	if len(ts) == 0 {
+		return nil
+	}
+	vars := make([]*types.Var, len(ts))
+	for i, t := range ts {
+		vars[i] = anonVar(t)
+	}
+	tuple := types.NewTuple(vars...)
+	return c.Type(tuple).(*types.Tuple)
+}
+
+// Type returns a canonical representative of type T.
+func (c *canonizer) Type(T types.Type) types.Type {
+	c.mu.Lock()
+	defer c.mu.Unlock()
+
+	if r := c.canon.At(T); r != nil {
+		return r.(types.Type)
+	}
+	c.canon.Set(T, T)
+	return T
+}
diff --git a/go/ssa/wrappers.go b/go/ssa/wrappers.go
index a4ae71d..90ddc9d 100644
--- a/go/ssa/wrappers.go
+++ b/go/ssa/wrappers.go
@@ -72,6 +72,7 @@
 		Synthetic: description,
 		Prog:      prog,
 		pos:       obj.Pos(),
+		info:      nil, // info is not set on wrappers.
 	}
 	fn.startBody()
 	fn.addSpilledParam(recv)
@@ -190,6 +191,7 @@
 			Synthetic: description,
 			Prog:      prog,
 			pos:       obj.Pos(),
+			info:      nil, // info is not set on wrappers.
 		}
 
 		fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn}
@@ -246,9 +248,11 @@
 		panic(sel)
 	}
 
+	// Canonicalize sel.Recv() to avoid constructing duplicate thunks.
+	canonRecv := prog.canon.Type(sel.Recv())
 	key := selectionKey{
 		kind:     sel.Kind(),
-		recv:     sel.Recv(),
+		recv:     canonRecv,
 		obj:      sel.Obj(),
 		index:    fmt.Sprint(sel.Index()),
 		indirect: sel.Indirect(),
@@ -257,14 +261,6 @@
 	prog.methodsMu.Lock()
 	defer prog.methodsMu.Unlock()
 
-	// Canonicalize key.recv to avoid constructing duplicate thunks.
-	canonRecv, ok := prog.canon.At(key.recv).(types.Type)
-	if !ok {
-		canonRecv = key.recv
-		prog.canon.Set(key.recv, canonRecv)
-	}
-	key.recv = canonRecv
-
 	fn, ok := prog.thunks[key]
 	if !ok {
 		fn = makeWrapper(prog, sel)
diff --git a/go/types/objectpath/objectpath.go b/go/types/objectpath/objectpath.go
index 81e8fdc..557202b 100644
--- a/go/types/objectpath/objectpath.go
+++ b/go/types/objectpath/objectpath.go
@@ -23,10 +23,12 @@
 
 import (
 	"fmt"
+	"go/types"
+	"sort"
 	"strconv"
 	"strings"
 
-	"go/types"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // A Path is an opaque name that identifies a types.Object
@@ -57,12 +59,16 @@
 // - The only PO operator is Package.Scope.Lookup, which requires an identifier.
 // - The only OT operator is Object.Type,
 //   which we encode as '.' because dot cannot appear in an identifier.
-// - The TT operators are encoded as [EKPRU].
+// - The TT operators are encoded as [EKPRUTC];
+//   one of these (TypeParam) requires an integer operand,
+//   which is encoded as a string of decimal digits.
 // - The TO operators are encoded as [AFMO];
 //   three of these (At,Field,Method) require an integer operand,
 //   which is encoded as a string of decimal digits.
 //   These indices are stable across different representations
 //   of the same package, even source and export data.
+//   The indices used are implementation specific and may not correspond to
+//   the argument to the go/types function.
 //
 // In the example below,
 //
@@ -89,17 +95,19 @@
 	opType = '.' // .Type()		  (Object)
 
 	// type->type operators
-	opElem       = 'E' // .Elem()		(Pointer, Slice, Array, Chan, Map)
-	opKey        = 'K' // .Key()		(Map)
-	opParams     = 'P' // .Params()		(Signature)
-	opResults    = 'R' // .Results()	(Signature)
-	opUnderlying = 'U' // .Underlying()	(Named)
+	opElem       = 'E' // .Elem()		        (Pointer, Slice, Array, Chan, Map)
+	opKey        = 'K' // .Key()		        (Map)
+	opParams     = 'P' // .Params()		      (Signature)
+	opResults    = 'R' // .Results()	      (Signature)
+	opUnderlying = 'U' // .Underlying()	    (Named)
+	opTypeParam  = 'T' // .TypeParams.At(i) (Named, Signature)
+	opConstraint = 'C' // .Constraint()     (TypeParam)
 
 	// type->object operators
-	opAt     = 'A' // .At(i)		(Tuple)
-	opField  = 'F' // .Field(i)		(Struct)
-	opMethod = 'M' // .Method(i)		(Named or Interface; not Struct: "promoted" names are ignored)
-	opObj    = 'O' // .Obj()		(Named)
+	opAt     = 'A' // .At(i)		 (Tuple)
+	opField  = 'F' // .Field(i)	 (Struct)
+	opMethod = 'M' // .Method(i) (Named or Interface; not Struct: "promoted" names are ignored)
+	opObj    = 'O' // .Obj()		 (Named, TypeParam)
 )
 
 // The For function returns the path to an object relative to its package,
@@ -190,10 +198,15 @@
 	// 3. Not a package-level object.
 	//    Reject obviously non-viable cases.
 	switch obj := obj.(type) {
+	case *types.TypeName:
+		if _, ok := obj.Type().(*typeparams.TypeParam); !ok {
+			// With the exception of type parameters, only package-level type names
+			// have a path.
+			return "", fmt.Errorf("no path for %v", obj)
+		}
 	case *types.Const, // Only package-level constants have a path.
-		*types.TypeName, // Only package-level types have a path.
-		*types.Label,    // Labels are function-local.
-		*types.PkgName:  // PkgNames are file-local.
+		*types.Label,   // Labels are function-local.
+		*types.PkgName: // PkgNames are file-local.
 		return "", fmt.Errorf("no path for %v", obj)
 
 	case *types.Var:
@@ -241,12 +254,18 @@
 
 		if tname.IsAlias() {
 			// type alias
-			if r := find(obj, T, path); r != nil {
+			if r := find(obj, T, path, nil); r != nil {
 				return Path(r), nil
 			}
 		} else {
+			if named, _ := T.(*types.Named); named != nil {
+				if r := findTypeParam(obj, typeparams.ForNamed(named), path, nil); r != nil {
+					// generic named type
+					return Path(r), nil
+				}
+			}
 			// defined (named) type
-			if r := find(obj, T.Underlying(), append(path, opUnderlying)); r != nil {
+			if r := find(obj, T.Underlying(), append(path, opUnderlying), nil); r != nil {
 				return Path(r), nil
 			}
 		}
@@ -260,7 +279,7 @@
 		if _, ok := o.(*types.TypeName); !ok {
 			if o.Exported() {
 				// exported non-type (const, var, func)
-				if r := find(obj, o.Type(), append(path, opType)); r != nil {
+				if r := find(obj, o.Type(), append(path, opType), nil); r != nil {
 					return Path(r), nil
 				}
 			}
@@ -270,13 +289,17 @@
 		// Inspect declared methods of defined types.
 		if T, ok := o.Type().(*types.Named); ok {
 			path = append(path, opType)
-			for i := 0; i < T.NumMethods(); i++ {
-				m := T.Method(i)
+			// Note that method index here is always with respect
+			// to canonical ordering of methods, regardless of how
+			// they appear in the underlying type.
+			canonical := canonicalize(T)
+			for i := 0; i < len(canonical); i++ {
+				m := canonical[i]
 				path2 := appendOpArg(path, opMethod, i)
 				if m == obj {
 					return Path(path2), nil // found declared method
 				}
-				if r := find(obj, m.Type(), append(path2, opType)); r != nil {
+				if r := find(obj, m.Type(), append(path2, opType), nil); r != nil {
 					return Path(r), nil
 				}
 			}
@@ -293,38 +316,44 @@
 }
 
 // find finds obj within type T, returning the path to it, or nil if not found.
-func find(obj types.Object, T types.Type, path []byte) []byte {
+//
+// The seen map is used to short circuit cycles through type parameters. If
+// nil, it will be allocated as necessary.
+func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName]bool) []byte {
 	switch T := T.(type) {
 	case *types.Basic, *types.Named:
 		// Named types belonging to pkg were handled already,
 		// so T must belong to another package. No path.
 		return nil
 	case *types.Pointer:
-		return find(obj, T.Elem(), append(path, opElem))
+		return find(obj, T.Elem(), append(path, opElem), seen)
 	case *types.Slice:
-		return find(obj, T.Elem(), append(path, opElem))
+		return find(obj, T.Elem(), append(path, opElem), seen)
 	case *types.Array:
-		return find(obj, T.Elem(), append(path, opElem))
+		return find(obj, T.Elem(), append(path, opElem), seen)
 	case *types.Chan:
-		return find(obj, T.Elem(), append(path, opElem))
+		return find(obj, T.Elem(), append(path, opElem), seen)
 	case *types.Map:
-		if r := find(obj, T.Key(), append(path, opKey)); r != nil {
+		if r := find(obj, T.Key(), append(path, opKey), seen); r != nil {
 			return r
 		}
-		return find(obj, T.Elem(), append(path, opElem))
+		return find(obj, T.Elem(), append(path, opElem), seen)
 	case *types.Signature:
-		if r := find(obj, T.Params(), append(path, opParams)); r != nil {
+		if r := findTypeParam(obj, typeparams.ForSignature(T), path, seen); r != nil {
 			return r
 		}
-		return find(obj, T.Results(), append(path, opResults))
+		if r := find(obj, T.Params(), append(path, opParams), seen); r != nil {
+			return r
+		}
+		return find(obj, T.Results(), append(path, opResults), seen)
 	case *types.Struct:
 		for i := 0; i < T.NumFields(); i++ {
-			f := T.Field(i)
+			fld := T.Field(i)
 			path2 := appendOpArg(path, opField, i)
-			if f == obj {
+			if fld == obj {
 				return path2 // found field var
 			}
-			if r := find(obj, f.Type(), append(path2, opType)); r != nil {
+			if r := find(obj, fld.Type(), append(path2, opType), seen); r != nil {
 				return r
 			}
 		}
@@ -336,7 +365,7 @@
 			if v == obj {
 				return path2 // found param/result var
 			}
-			if r := find(obj, v.Type(), append(path2, opType)); r != nil {
+			if r := find(obj, v.Type(), append(path2, opType), seen); r != nil {
 				return r
 			}
 		}
@@ -348,15 +377,42 @@
 			if m == obj {
 				return path2 // found interface method
 			}
-			if r := find(obj, m.Type(), append(path2, opType)); r != nil {
+			if r := find(obj, m.Type(), append(path2, opType), seen); r != nil {
 				return r
 			}
 		}
 		return nil
+	case *typeparams.TypeParam:
+		name := T.Obj()
+		if name == obj {
+			return append(path, opObj)
+		}
+		if seen[name] {
+			return nil
+		}
+		if seen == nil {
+			seen = make(map[*types.TypeName]bool)
+		}
+		seen[name] = true
+		if r := find(obj, T.Constraint(), append(path, opConstraint), seen); r != nil {
+			return r
+		}
+		return nil
 	}
 	panic(T)
 }
 
+func findTypeParam(obj types.Object, list *typeparams.TypeParamList, path []byte, seen map[*types.TypeName]bool) []byte {
+	for i := 0; i < list.Len(); i++ {
+		tparam := list.At(i)
+		path2 := appendOpArg(path, opTypeParam, i)
+		if r := find(obj, tparam, path2, seen); r != nil {
+			return r
+		}
+	}
+	return nil
+}
+
 // Object returns the object denoted by path p within the package pkg.
 func Object(pkg *types.Package, p Path) (types.Object, error) {
 	if p == "" {
@@ -381,10 +437,13 @@
 	type hasElem interface {
 		Elem() types.Type
 	}
-	// abstraction of *types.{Interface,Named}
-	type hasMethods interface {
-		Method(int) *types.Func
-		NumMethods() int
+	// abstraction of *types.{Named,Signature}
+	type hasTypeParams interface {
+		TypeParams() *typeparams.TypeParamList
+	}
+	// abstraction of *types.{Named,TypeParam}
+	type hasObj interface {
+		Obj() *types.TypeName
 	}
 
 	// The loop state is the pair (t, obj),
@@ -401,7 +460,7 @@
 		// Codes [AFM] have an integer operand.
 		var index int
 		switch code {
-		case opAt, opField, opMethod:
+		case opAt, opField, opMethod, opTypeParam:
 			rest := strings.TrimLeft(suffix, "0123456789")
 			numerals := suffix[:len(suffix)-len(rest)]
 			suffix = rest
@@ -466,14 +525,32 @@
 		case opUnderlying:
 			named, ok := t.(*types.Named)
 			if !ok {
-				return nil, fmt.Errorf("cannot apply %q to %s (got %s, want named)", code, t, t)
+				return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named)", code, t, t)
 			}
 			t = named.Underlying()
 
+		case opTypeParam:
+			hasTypeParams, ok := t.(hasTypeParams) // Named, Signature
+			if !ok {
+				return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named or signature)", code, t, t)
+			}
+			tparams := hasTypeParams.TypeParams()
+			if n := tparams.Len(); index >= n {
+				return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n)
+			}
+			t = tparams.At(index)
+
+		case opConstraint:
+			tparam, ok := t.(*typeparams.TypeParam)
+			if !ok {
+				return nil, fmt.Errorf("cannot apply %q to %s (got %T, want type parameter)", code, t, t)
+			}
+			t = tparam.Constraint()
+
 		case opAt:
 			tuple, ok := t.(*types.Tuple)
 			if !ok {
-				return nil, fmt.Errorf("cannot apply %q to %s (got %s, want tuple)", code, t, t)
+				return nil, fmt.Errorf("cannot apply %q to %s (got %T, want tuple)", code, t, t)
 			}
 			if n := tuple.Len(); index >= n {
 				return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n)
@@ -495,20 +572,21 @@
 		case opMethod:
 			hasMethods, ok := t.(hasMethods) // Interface or Named
 			if !ok {
-				return nil, fmt.Errorf("cannot apply %q to %s (got %s, want interface or named)", code, t, t)
+				return nil, fmt.Errorf("cannot apply %q to %s (got %T, want interface or named)", code, t, t)
 			}
-			if n := hasMethods.NumMethods(); index >= n {
+			canonical := canonicalize(hasMethods)
+			if n := len(canonical); index >= n {
 				return nil, fmt.Errorf("method index %d out of range [0-%d)", index, n)
 			}
-			obj = hasMethods.Method(index)
+			obj = canonical[index]
 			t = nil
 
 		case opObj:
-			named, ok := t.(*types.Named)
+			hasObj, ok := t.(hasObj)
 			if !ok {
-				return nil, fmt.Errorf("cannot apply %q to %s (got %s, want named)", code, t, t)
+				return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named or type param)", code, t, t)
 			}
-			obj = named.Obj()
+			obj = hasObj.Obj()
 			t = nil
 
 		default:
@@ -522,3 +600,28 @@
 
 	return obj, nil // success
 }
+
+// hasMethods is an abstraction of *types.{Interface,Named}. This is pulled up
+// because it is used by methodOrdering, which is in turn used by both encoding
+// and decoding.
+type hasMethods interface {
+	Method(int) *types.Func
+	NumMethods() int
+}
+
+// canonicalize returns a canonical order for the methods in a hasMethod.
+func canonicalize(hm hasMethods) []*types.Func {
+	count := hm.NumMethods()
+	if count <= 0 {
+		return nil
+	}
+	canon := make([]*types.Func, count)
+	for i := 0; i < count; i++ {
+		canon[i] = hm.Method(i)
+	}
+	less := func(i, j int) bool {
+		return canon[i].Id() < canon[j].Id()
+	}
+	sort.Slice(canon, less)
+	return canon
+}
diff --git a/go/types/objectpath/objectpath_go118_test.go b/go/types/objectpath/objectpath_go118_test.go
new file mode 100644
index 0000000..bc156e1
--- /dev/null
+++ b/go/types/objectpath/objectpath_go118_test.go
@@ -0,0 +1,136 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package objectpath_test
+
+import (
+	"go/types"
+	"testing"
+
+	"golang.org/x/tools/go/buildutil"
+	"golang.org/x/tools/go/loader"
+	"golang.org/x/tools/go/types/objectpath"
+)
+
+func TestGenericPaths(t *testing.T) {
+	pkgs := map[string]map[string]string{
+		"b": {"b.go": `
+package b
+
+const C int = 1
+
+type T[TP0 any, TP1 interface{ M0(); M1() }] struct{}
+
+func (T[RP0, RP1]) M() {}
+
+type N int
+
+func (N) M0()
+func (N) M1()
+
+type A = T[int, N]
+
+func F[FP0 any, FP1 interface{ M() }](FP0, FP1) {}
+`},
+	}
+	paths := []pathTest{
+		// Good paths
+		{"b", "T", "type b.T[TP0 any, TP1 interface{M0(); M1()}] struct{}", ""},
+		{"b", "T.O", "type b.T[TP0 any, TP1 interface{M0(); M1()}] struct{}", ""},
+		{"b", "T.M0", "func (b.T[RP0, RP1]).M()", ""},
+		{"b", "T.T0O", "type parameter TP0 any", ""},
+		{"b", "T.T1O", "type parameter TP1 interface{M0(); M1()}", ""},
+		{"b", "T.T1CM0", "func (interface).M0()", ""},
+		{"b", "F.T0O", "type parameter FP0 any", ""},
+		{"b", "F.T1CM0", "func (interface).M()", ""},
+		// Obj of an instance is the generic declaration.
+		{"b", "A.O", "type b.T[TP0 any, TP1 interface{M0(); M1()}] struct{}", ""},
+		{"b", "A.M0", "func (b.T[int, b.N]).M()", ""},
+
+		// Bad paths
+		{"b", "N.C", "", "invalid path: ends with 'C', want [AFMO]"},
+		{"b", "N.CO", "", "cannot apply 'C' to b.N (got *types.Named, want type parameter)"},
+		{"b", "N.T", "", `invalid path: bad numeric operand "" for code 'T'`},
+		{"b", "N.T0", "", "tuple index 0 out of range [0-0)"},
+		{"b", "T.T2O", "", "tuple index 2 out of range [0-2)"},
+		{"b", "T.T1M0", "", "cannot apply 'M' to TP1 (got *types.TypeParam, want interface or named)"},
+		{"b", "C.T0", "", "cannot apply 'T' to int (got *types.Basic, want named or signature)"},
+	}
+
+	conf := loader.Config{Build: buildutil.FakeContext(pkgs)}
+	conf.Import("b")
+	prog, err := conf.Load()
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	for _, test := range paths {
+		if err := testPath(prog, test); err != nil {
+			t.Error(err)
+		}
+	}
+
+	// bad objects
+	for _, test := range []struct {
+		obj     types.Object
+		wantErr string
+	}{
+		{types.Universe.Lookup("any"), "predeclared type any = interface{} has no path"},
+		{types.Universe.Lookup("comparable"), "predeclared type comparable interface{comparable} has no path"},
+	} {
+		path, err := objectpath.For(test.obj)
+		if err == nil {
+			t.Errorf("Object(%s) = %q, want error", test.obj, path)
+			continue
+		}
+		if err.Error() != test.wantErr {
+			t.Errorf("Object(%s) error was %q, want %q", test.obj, err, test.wantErr)
+			continue
+		}
+	}
+}
+
+func TestGenericPaths_Issue51717(t *testing.T) {
+	pkgs := map[string]map[string]string{
+		"p": {"p.go": `
+package p
+
+type S struct{}
+
+func (_ S) M() {
+	// The go vet stackoverflow crash disappears when the following line is removed
+	panic("")
+}
+
+func F[WL interface{ N(item W) WL }, W any]() {
+}
+
+func main() {}
+`},
+	}
+	paths := []pathTest{
+		{"p", "F.T0CM0.RA0", "var  WL", ""},
+		{"p", "F.T0CM0.RA0.CM0", "func (interface).N(item W) WL", ""},
+
+		// Finding S.M0 reproduced the infinite recursion reported in #51717,
+		// because F is searched before S.
+		{"p", "S.M0", "func (p.S).M()", ""},
+	}
+
+	conf := loader.Config{Build: buildutil.FakeContext(pkgs)}
+	conf.Import("p")
+	prog, err := conf.Load()
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	for _, test := range paths {
+		if err := testPath(prog, test); err != nil {
+			t.Error(err)
+		}
+	}
+}
diff --git a/go/types/objectpath/objectpath_test.go b/go/types/objectpath/objectpath_test.go
index 16b6123..39e7b1b 100644
--- a/go/types/objectpath/objectpath_test.go
+++ b/go/types/objectpath/objectpath_test.go
@@ -6,6 +6,7 @@
 
 import (
 	"bytes"
+	"fmt"
 	"go/ast"
 	"go/importer"
 	"go/parser"
@@ -45,6 +46,10 @@
 
 func unexportedFunc()
 type unexportedType struct{}
+
+type S struct{t struct{x int}}
+type R []struct{y int}
+type Q [2]struct{z int}
 `},
 		"a": {"a.go": `
 package a
@@ -55,6 +60,60 @@
 
 `},
 	}
+	paths := []pathTest{
+		// Good paths
+		{"b", "C", "const b.C a.Int", ""},
+		{"b", "F", "func b.F(a int, b int, c int, d a.T)", ""},
+		{"b", "F.PA0", "var a int", ""},
+		{"b", "F.PA1", "var b int", ""},
+		{"b", "F.PA2", "var c int", ""},
+		{"b", "F.PA3", "var d a.T", ""},
+		{"b", "T", "type b.T struct{A int; b int; a.T}", ""},
+		{"b", "T.O", "type b.T struct{A int; b int; a.T}", ""},
+		{"b", "T.UF0", "field A int", ""},
+		{"b", "T.UF1", "field b int", ""},
+		{"b", "T.UF2", "field T a.T", ""},
+		{"b", "U.UF2", "field T a.T", ""}, // U.U... are aliases for T.U...
+		{"b", "A", "type b.A = struct{x int}", ""},
+		{"b", "A.F0", "field x int", ""},
+		{"b", "V", "var b.V []*a.T", ""},
+		{"b", "M", "type b.M map[struct{x int}]struct{y int}", ""},
+		{"b", "M.UKF0", "field x int", ""},
+		{"b", "M.UEF0", "field y int", ""},
+		{"b", "T.M0", "func (b.T).M() *interface{f()}", ""}, // concrete method
+		{"b", "T.M0.RA0", "var  *interface{f()}", ""},       // parameter
+		{"b", "T.M0.RA0.EM0", "func (interface).f()", ""},   // interface method
+		{"b", "unexportedType", "type b.unexportedType struct{}", ""},
+		{"b", "S.UF0.F0", "field x int", ""},
+		{"b", "R.UEF0", "field y int", ""},
+		{"b", "Q.UEF0", "field z int", ""},
+		{"a", "T", "type a.T struct{x int; y int}", ""},
+		{"a", "T.UF0", "field x int", ""},
+
+		// Bad paths
+		{"b", "", "", "empty path"},
+		{"b", "missing", "", `package b does not contain "missing"`},
+		{"b", "F.U", "", "invalid path: ends with 'U', want [AFMO]"},
+		{"b", "F.PA3.O", "", "path denotes type a.T struct{x int; y int}, which belongs to a different package"},
+		{"b", "F.PA!", "", `invalid path: bad numeric operand "" for code 'A'`},
+		{"b", "F.PA3.UF0", "", "path denotes field x int, which belongs to a different package"},
+		{"b", "F.PA3.UF5", "", "field index 5 out of range [0-2)"},
+		{"b", "V.EE", "", "invalid path: ends with 'E', want [AFMO]"},
+		{"b", "F..O", "", "invalid path: unexpected '.' in type context"},
+		{"b", "T.OO", "", "invalid path: code 'O' in object context"},
+		{"b", "T.EO", "", "cannot apply 'E' to b.T (got *types.Named, want pointer, slice, array, chan or map)"},
+		{"b", "A.O", "", "cannot apply 'O' to struct{x int} (got *types.Struct, want named or type param)"},
+		{"b", "A.UF0", "", "cannot apply 'U' to struct{x int} (got *types.Struct, want named)"},
+		{"b", "M.UPO", "", "cannot apply 'P' to map[struct{x int}]struct{y int} (got *types.Map, want signature)"},
+		{"b", "C.O", "", "path denotes type a.Int int, which belongs to a different package"},
+		{"b", "T.M9", "", "method index 9 out of range [0-1)"},
+		{"b", "M.UF0", "", "cannot apply 'F' to map[struct{x int}]struct{y int} (got *types.Map, want struct)"},
+		{"b", "V.KO", "", "cannot apply 'K' to []*a.T (got *types.Slice, want map)"},
+		{"b", "V.A4", "", "cannot apply 'A' to []*a.T (got *types.Slice, want tuple)"},
+		{"b", "V.RA0", "", "cannot apply 'R' to []*a.T (got *types.Slice, want signature)"},
+		{"b", "F.PA4", "", "tuple index 4 out of range [0-4)"},
+		{"b", "F.XO", "", "invalid path: unknown code 'X'"},
+	}
 	conf := loader.Config{Build: buildutil.FakeContext(pkgs)}
 	conf.Import("a")
 	conf.Import("b")
@@ -62,9 +121,45 @@
 	if err != nil {
 		t.Fatal(err)
 	}
-	a := prog.Imported["a"].Pkg
-	b := prog.Imported["b"].Pkg
 
+	for _, test := range paths {
+		if err := testPath(prog, test); err != nil {
+			t.Error(err)
+		}
+	}
+
+	// bad objects
+	bInfo := prog.Imported["b"]
+	for _, test := range []struct {
+		obj     types.Object
+		wantErr string
+	}{
+		{types.Universe.Lookup("nil"), "predeclared nil has no path"},
+		{types.Universe.Lookup("len"), "predeclared builtin len has no path"},
+		{types.Universe.Lookup("int"), "predeclared type int has no path"},
+		{bInfo.Implicits[bInfo.Files[0].Imports[0]], "no path for package a"}, // import "a"
+		{bInfo.Pkg.Scope().Lookup("unexportedFunc"), "no path for non-exported func b.unexportedFunc()"},
+	} {
+		path, err := objectpath.For(test.obj)
+		if err == nil {
+			t.Errorf("Object(%s) = %q, want error", test.obj, path)
+			continue
+		}
+		if err.Error() != test.wantErr {
+			t.Errorf("Object(%s) error was %q, want %q", test.obj, err, test.wantErr)
+			continue
+		}
+	}
+}
+
+type pathTest struct {
+	pkg     string
+	path    objectpath.Path
+	wantobj string
+	wantErr string
+}
+
+func testPath(prog *loader.Program, test pathTest) error {
 	// We test objectpath by enumerating a set of paths
 	// and ensuring that Path(pkg, Object(pkg, path)) == path.
 	//
@@ -80,133 +175,63 @@
 	// The downside is that the test depends on the path encoding.
 	// The upside is that the test exercises the encoding.
 
-	// good paths
-	for _, test := range []struct {
-		pkg     *types.Package
-		path    objectpath.Path
-		wantobj string
-	}{
-		{b, "C", "const b.C a.Int"},
-		{b, "F", "func b.F(a int, b int, c int, d a.T)"},
-		{b, "F.PA0", "var a int"},
-		{b, "F.PA1", "var b int"},
-		{b, "F.PA2", "var c int"},
-		{b, "F.PA3", "var d a.T"},
-		{b, "T", "type b.T struct{A int; b int; a.T}"},
-		{b, "T.O", "type b.T struct{A int; b int; a.T}"},
-		{b, "T.UF0", "field A int"},
-		{b, "T.UF1", "field b int"},
-		{b, "T.UF2", "field T a.T"},
-		{b, "U.UF2", "field T a.T"}, // U.U... are aliases for T.U...
-		{b, "A", "type b.A = struct{x int}"},
-		{b, "A.F0", "field x int"},
-		{b, "V", "var b.V []*a.T"},
-		{b, "M", "type b.M map[struct{x int}]struct{y int}"},
-		{b, "M.UKF0", "field x int"},
-		{b, "M.UEF0", "field y int"},
-		{b, "T.M0", "func (b.T).M() *interface{f()}"}, // concrete method
-		{b, "T.M0.RA0", "var  *interface{f()}"},       // parameter
-		{b, "T.M0.RA0.EM0", "func (interface).f()"},   // interface method
-		{b, "unexportedType", "type b.unexportedType struct{}"},
-		{a, "T", "type a.T struct{x int; y int}"},
-		{a, "T.UF0", "field x int"},
-	} {
-		// check path -> object
-		obj, err := objectpath.Object(test.pkg, test.path)
-		if err != nil {
-			t.Errorf("Object(%s, %q) failed: %v",
-				test.pkg.Path(), test.path, err)
-			continue
+	pkg := prog.Imported[test.pkg].Pkg
+	// check path -> object
+	obj, err := objectpath.Object(pkg, test.path)
+	if (test.wantErr != "") != (err != nil) {
+		return fmt.Errorf("Object(%s, %q) returned error %q, want %q", pkg.Path(), test.path, err, test.wantErr)
+	}
+	if test.wantErr != "" {
+		if got := stripSubscripts(err.Error()); got != test.wantErr {
+			return fmt.Errorf("Object(%s, %q) error was %q, want %q",
+				pkg.Path(), test.path, got, test.wantErr)
 		}
-		if obj.String() != test.wantobj {
-			t.Errorf("Object(%s, %q) = %v, want %s",
-				test.pkg.Path(), test.path, obj, test.wantobj)
-			continue
-		}
-		if obj.Pkg() != test.pkg {
-			t.Errorf("Object(%s, %q) = %v, which belongs to package %s",
-				test.pkg.Path(), test.path, obj, obj.Pkg().Path())
-			continue
-		}
+		return nil
+	}
+	// Inv: err == nil
 
-		// check object -> path
-		path2, err := objectpath.For(obj)
-		if err != nil {
-			t.Errorf("For(%v) failed: %v, want %q", obj, err, test.path)
-			continue
-		}
-		// We do not require that test.path == path2. Aliases are legal.
-		// But we do require that Object(path2) finds the same object.
-		obj2, err := objectpath.Object(test.pkg, path2)
-		if err != nil {
-			t.Errorf("Object(%s, %q) failed: %v (roundtrip from %q)",
-				test.pkg.Path(), path2, err, test.path)
-			continue
-		}
-		if obj2 != obj {
-			t.Errorf("Object(%s, For(obj)) != obj: got %s, obj is %s (path1=%q, path2=%q)",
-				test.pkg.Path(), obj2, obj, test.path, path2)
-			continue
-		}
+	if objString := stripSubscripts(obj.String()); objString != test.wantobj {
+		return fmt.Errorf("Object(%s, %q) = %s, want %s", pkg.Path(), test.path, objString, test.wantobj)
+	}
+	if obj.Pkg() != pkg {
+		return fmt.Errorf("Object(%s, %q) = %v, which belongs to package %s",
+			pkg.Path(), test.path, obj, obj.Pkg().Path())
 	}
 
-	// bad paths (all relative to package b)
-	for _, test := range []struct {
-		pkg     *types.Package
-		path    objectpath.Path
-		wantErr string
-	}{
-		{b, "", "empty path"},
-		{b, "missing", `package b does not contain "missing"`},
-		{b, "F.U", "invalid path: ends with 'U', want [AFMO]"},
-		{b, "F.PA3.O", "path denotes type a.T struct{x int; y int}, which belongs to a different package"},
-		{b, "F.PA!", `invalid path: bad numeric operand "" for code 'A'`},
-		{b, "F.PA3.UF0", "path denotes field x int, which belongs to a different package"},
-		{b, "F.PA3.UF5", "field index 5 out of range [0-2)"},
-		{b, "V.EE", "invalid path: ends with 'E', want [AFMO]"},
-		{b, "F..O", "invalid path: unexpected '.' in type context"},
-		{b, "T.OO", "invalid path: code 'O' in object context"},
-		{b, "T.EO", "cannot apply 'E' to b.T (got *types.Named, want pointer, slice, array, chan or map)"},
-		{b, "A.O", "cannot apply 'O' to struct{x int} (got struct{x int}, want named)"},
-		{b, "A.UF0", "cannot apply 'U' to struct{x int} (got struct{x int}, want named)"},
-		{b, "M.UPO", "cannot apply 'P' to map[struct{x int}]struct{y int} (got *types.Map, want signature)"},
-		{b, "C.O", "path denotes type a.Int int, which belongs to a different package"},
-	} {
-		obj, err := objectpath.Object(test.pkg, test.path)
-		if err == nil {
-			t.Errorf("Object(%s, %q) = %s, want error",
-				test.pkg.Path(), test.path, obj)
-			continue
-		}
-		if err.Error() != test.wantErr {
-			t.Errorf("Object(%s, %q) error was %q, want %q",
-				test.pkg.Path(), test.path, err, test.wantErr)
-			continue
-		}
+	// check object -> path
+	path2, err := objectpath.For(obj)
+	if err != nil {
+		return fmt.Errorf("For(%v) failed: %v, want %q", obj, err, test.path)
 	}
+	// We do not require that test.path == path2. Aliases are legal.
+	// But we do require that Object(path2) finds the same object.
+	obj2, err := objectpath.Object(pkg, path2)
+	if err != nil {
+		return fmt.Errorf("Object(%s, %q) failed: %v (roundtrip from %q)", pkg.Path(), path2, err, test.path)
+	}
+	if obj2 != obj {
+		return fmt.Errorf("Object(%s, For(obj)) != obj: got %s, obj is %s (path1=%q, path2=%q)", pkg.Path(), obj2, obj, test.path, path2)
+	}
+	return nil
+}
 
-	// bad objects
-	bInfo := prog.Imported["b"]
-	for _, test := range []struct {
-		obj     types.Object
-		wantErr string
-	}{
-		{types.Universe.Lookup("nil"), "predeclared nil has no path"},
-		{types.Universe.Lookup("len"), "predeclared builtin len has no path"},
-		{types.Universe.Lookup("int"), "predeclared type int has no path"},
-		{bInfo.Info.Implicits[bInfo.Files[0].Imports[0]], "no path for package a"}, // import "a"
-		{b.Scope().Lookup("unexportedFunc"), "no path for non-exported func b.unexportedFunc()"},
-	} {
-		path, err := objectpath.For(test.obj)
-		if err == nil {
-			t.Errorf("Object(%s) = %q, want error", test.obj, path)
-			continue
+// stripSubscripts removes type parameter id subscripts.
+//
+// TODO(rfindley): remove this function once subscripts are removed from the
+// type parameter type string.
+func stripSubscripts(s string) string {
+	var runes []rune
+	for _, r := range s {
+		// For debugging/uniqueness purposes, TypeString on a type parameter adds a
+		// subscript corresponding to the type parameter's unique id. This is going
+		// to be removed, but in the meantime we skip the subscript runes to get a
+		// deterministic output.
+		if 'β‚€' <= r && r < 'β‚€'+10 {
+			continue // trim type parameter subscripts
 		}
-		if err.Error() != test.wantErr {
-			t.Errorf("Object(%s) error was %q, want %q", test.obj, err, test.wantErr)
-			continue
-		}
+		runes = append(runes, r)
 	}
+	return string(runes)
 }
 
 // TestSourceAndExportData uses objectpath to compute a correspondence
@@ -299,3 +324,65 @@
 
 	return s
 }
+
+// TestOrdering uses objectpath over two Named types with the same method
+// names but in a different source order and checks that objectpath is the
+// same for methods with the same name.
+func TestOrdering(t *testing.T) {
+	pkgs := map[string]map[string]string{
+		"p": {"p.go": `
+package p
+
+type T struct{ A int }
+
+func (T) M() { }
+func (T) N() { }
+func (T) X() { }
+func (T) Y() { }
+`},
+		"q": {"q.go": `
+package q
+
+type T struct{ A int }
+
+func (T) N() { }
+func (T) M() { }
+func (T) Y() { }
+func (T) X() { }
+`}}
+	conf := loader.Config{Build: buildutil.FakeContext(pkgs)}
+	conf.Import("p")
+	conf.Import("q")
+	prog, err := conf.Load()
+	if err != nil {
+		t.Fatal(err)
+	}
+	p := prog.Imported["p"].Pkg
+	q := prog.Imported["q"].Pkg
+
+	// From here, the objectpaths generated for p and q should be the
+	// same. If they are not, then we are generating an ordering that is
+	// dependent on the declaration of the types within the file.
+	for _, test := range []struct {
+		path objectpath.Path
+	}{
+		{"T.M0"},
+		{"T.M1"},
+		{"T.M2"},
+		{"T.M3"},
+	} {
+		pobj, err := objectpath.Object(p, test.path)
+		if err != nil {
+			t.Errorf("Object(%s) failed in a1: %v", test.path, err)
+			continue
+		}
+		qobj, err := objectpath.Object(q, test.path)
+		if err != nil {
+			t.Errorf("Object(%s) failed in a2: %v", test.path, err)
+			continue
+		}
+		if pobj.Name() != pobj.Name() {
+			t.Errorf("Objects(%s) not equal, got a1 = %v, a2 = %v", test.path, pobj.Name(), qobj.Name())
+		}
+	}
+}
diff --git a/go/types/typeutil/callee.go b/go/types/typeutil/callee.go
index 38f596d..90b3ab0 100644
--- a/go/types/typeutil/callee.go
+++ b/go/types/typeutil/callee.go
@@ -9,13 +9,29 @@
 	"go/types"
 
 	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // Callee returns the named target of a function call, if any:
 // a function, method, builtin, or variable.
+//
+// Functions and methods may potentially have type parameters.
 func Callee(info *types.Info, call *ast.CallExpr) types.Object {
+	fun := astutil.Unparen(call.Fun)
+
+	// Look through type instantiation if necessary.
+	isInstance := false
+	switch fun.(type) {
+	case *ast.IndexExpr, *typeparams.IndexListExpr:
+		// When extracting the callee from an *IndexExpr, we need to check that
+		// it is a *types.Func and not a *types.Var.
+		// Example: Don't match a slice m within the expression `m[0]()`.
+		isInstance = true
+		fun, _, _, _ = typeparams.UnpackIndexExpr(fun)
+	}
+
 	var obj types.Object
-	switch fun := astutil.Unparen(call.Fun).(type) {
+	switch fun := fun.(type) {
 	case *ast.Ident:
 		obj = info.Uses[fun] // type, var, builtin, or declared func
 	case *ast.SelectorExpr:
@@ -28,11 +44,18 @@
 	if _, ok := obj.(*types.TypeName); ok {
 		return nil // T(x) is a conversion, not a call
 	}
+	// A Func is required to match instantiations.
+	if _, ok := obj.(*types.Func); isInstance && !ok {
+		return nil // Was not a Func.
+	}
 	return obj
 }
 
-// StaticCallee returns the target (function or method) of a static
-// function call, if any. It returns nil for calls to builtins.
+// StaticCallee returns the target (function or method) of a static function
+// call, if any. It returns nil for calls to builtins.
+//
+// Note: for calls of instantiated functions and methods, StaticCallee returns
+// the corresponding generic function or method on the generic type.
 func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func {
 	if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) {
 		return f
diff --git a/go/types/typeutil/callee_test.go b/go/types/typeutil/callee_test.go
index 272e1eb..3452361 100644
--- a/go/types/typeutil/callee_test.go
+++ b/go/types/typeutil/callee_test.go
@@ -5,8 +5,8 @@
 package typeutil_test
 
 import (
+	"fmt"
 	"go/ast"
-	"go/importer"
 	"go/parser"
 	"go/token"
 	"go/types"
@@ -14,76 +14,158 @@
 	"testing"
 
 	"golang.org/x/tools/go/types/typeutil"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func TestStaticCallee(t *testing.T) {
-	const src = `package p
+	testStaticCallee(t, []string{
+		`package q;
+		func Abs(x int) int {
+			if x < 0 {
+				return -x
+			}
+			return x
+		}`,
+		`package p
+		import "q"
 
-import "fmt"
+		type T int
 
-type T int
+		func g(int)
 
-func g(int)
+		var f = g
 
-var f = g
+		var x int
 
-var x int
+		type s struct{ f func(int) }
+		func (s) g(int)
 
-type s struct{ f func(int) }
-func (s) g(int)
+		type I interface{ f(int) }
 
-type I interface{ f(int) }
+		var a struct{b struct{c s}}
 
-var a struct{b struct{c s}}
+		var n map[int]func()
+		var m []func()
 
-func calls() {
-	g(x)           // a declared func
-	s{}.g(x)       // a concrete method
-	a.b.c.g(x)     // same
-	fmt.Println(x) // declared func, qualified identifier
+		func calls() {
+			g(x)           // a declared func
+			s{}.g(x)       // a concrete method
+			a.b.c.g(x)     // same
+			_ = q.Abs(x)   // declared func, qualified identifier
+		}
+
+		func noncalls() {
+			_ = T(x)    // a type
+			f(x)        // a var
+			panic(x)    // a built-in
+			s{}.f(x)    // a field
+			I(nil).f(x) // interface method
+			m[0]()      // a map
+			n[0]()      // a slice
+		}
+		`})
 }
 
-func noncalls() {
-	_ = T(x)    // a type
-	f(x)        // a var
-	panic(x)    // a built-in
-	s{}.f(x)    // a field
-	I(nil).f(x) // interface method
-}
-`
-	// parse
-	fset := token.NewFileSet()
-	f, err := parser.ParseFile(fset, "p.go", src, 0)
-	if err != nil {
-		t.Fatal(err)
+func TestTypeParamStaticCallee(t *testing.T) {
+	if !typeparams.Enabled {
+		t.Skip("type parameters are not enabled")
 	}
+	testStaticCallee(t, []string{
+		`package q
+		func R[T any]() {}
+		`,
+		`package p
+		import "q"
+		type I interface{
+			i()
+		}
 
-	// type-check
+		type G[T any] func() T
+		func F[T any]() T { var x T; return x }
+
+		type M[T I] struct{ t T }
+		func (m M[T]) noncalls() {
+			m.t.i()   // method on a type parameter
+		}
+
+		func (m M[T]) calls() {
+			m.calls() // method on a generic type
+		}
+
+		type Chain[T I] struct{ r struct { s M[T] } }
+
+		type S int
+		func (S) i() {}
+
+		func Multi[TP0, TP1 any](){}
+
+		func calls() {
+			_ = F[int]()            // instantiated function
+			_ = (F[int])()          // go through parens
+			M[S]{}.calls()          // instantiated method
+			Chain[S]{}.r.s.calls()  // same as above
+			Multi[int,string]()     // multiple type parameters
+			q.R[int]()              // different package
+		}
+
+		func noncalls() {
+			_ = G[int](nil)()  // instantiated function
+		}
+		`})
+}
+
+// testStaticCallee parses and type checks each file content in contents
+// as a single file package in order. Within functions that have the suffix
+// "calls" it checks that the CallExprs within have a static callee.
+// If the function's name == "calls" all calls must have static callees,
+// and if the name != "calls", the calls must not have static callees.
+// Failures are reported on t.
+func testStaticCallee(t *testing.T, contents []string) {
+	fset := token.NewFileSet()
+	packages := make(map[string]*types.Package)
+	cfg := &types.Config{Importer: closure(packages)}
 	info := &types.Info{
 		Uses:       make(map[*ast.Ident]types.Object),
 		Selections: make(map[*ast.SelectorExpr]*types.Selection),
 	}
-	cfg := &types.Config{Importer: importer.ForCompiler(fset, "source", nil)}
-	if _, err := cfg.Check("p", fset, []*ast.File{f}, info); err != nil {
-		t.Fatal(err)
+	typeparams.InitInstanceInfo(info)
+
+	var files []*ast.File
+	for i, content := range contents {
+		// parse
+		f, err := parser.ParseFile(fset, fmt.Sprintf("%d.go", i), content, 0)
+		if err != nil {
+			t.Fatal(err)
+		}
+		files = append(files, f)
+
+		// type-check
+		pkg, err := cfg.Check(f.Name.Name, fset, []*ast.File{f}, info)
+		if err != nil {
+			t.Fatal(err)
+		}
+		packages[pkg.Path()] = pkg
 	}
 
-	for _, decl := range f.Decls {
-		if decl, ok := decl.(*ast.FuncDecl); ok && strings.HasSuffix(decl.Name.Name, "calls") {
-			wantCallee := decl.Name.Name == "calls" // false within func noncalls()
-			ast.Inspect(decl.Body, func(n ast.Node) bool {
-				if call, ok := n.(*ast.CallExpr); ok {
-					fn := typeutil.StaticCallee(info, call)
-					if fn == nil && wantCallee {
-						t.Errorf("%s: StaticCallee returned nil",
-							fset.Position(call.Lparen))
-					} else if fn != nil && !wantCallee {
-						t.Errorf("%s: StaticCallee returned %s, want nil",
-							fset.Position(call.Lparen), fn)
+	// check
+	for _, f := range files {
+		for _, decl := range f.Decls {
+			if decl, ok := decl.(*ast.FuncDecl); ok && strings.HasSuffix(decl.Name.Name, "calls") {
+				wantCallee := decl.Name.Name == "calls" // false within func noncalls()
+				ast.Inspect(decl.Body, func(n ast.Node) bool {
+					if call, ok := n.(*ast.CallExpr); ok {
+						fn := typeutil.StaticCallee(info, call)
+						if fn == nil && wantCallee {
+							t.Errorf("%s: StaticCallee returned nil",
+								fset.Position(call.Lparen))
+						} else if fn != nil && !wantCallee {
+							t.Errorf("%s: StaticCallee returned %s, want nil",
+								fset.Position(call.Lparen), fn)
+						}
 					}
-				}
-				return true
-			})
+					return true
+				})
+			}
 		}
 	}
 }
diff --git a/go/types/typeutil/map.go b/go/types/typeutil/map.go
index c7f7545..c9f8f25 100644
--- a/go/types/typeutil/map.go
+++ b/go/types/typeutil/map.go
@@ -11,6 +11,8 @@
 	"fmt"
 	"go/types"
 	"reflect"
+
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // Map is a hash-table-based mapping from types (types.Type) to
@@ -211,11 +213,29 @@
 // Call MakeHasher to create a Hasher.
 type Hasher struct {
 	memo map[types.Type]uint32
+
+	// ptrMap records pointer identity.
+	ptrMap map[interface{}]uint32
+
+	// sigTParams holds type parameters from the signature being hashed.
+	// Signatures are considered identical modulo renaming of type parameters, so
+	// within the scope of a signature type the identity of the signature's type
+	// parameters is just their index.
+	//
+	// Since the language does not currently support referring to uninstantiated
+	// generic types or functions, and instantiated signatures do not have type
+	// parameter lists, we should never encounter a second non-empty type
+	// parameter list when hashing a generic signature.
+	sigTParams *typeparams.TypeParamList
 }
 
 // MakeHasher returns a new Hasher instance.
 func MakeHasher() Hasher {
-	return Hasher{make(map[types.Type]uint32)}
+	return Hasher{
+		memo:       make(map[types.Type]uint32),
+		ptrMap:     make(map[interface{}]uint32),
+		sigTParams: nil,
+	}
 }
 
 // Hash computes a hash value for the given type t such that
@@ -273,17 +293,62 @@
 		if t.Variadic() {
 			hash *= 8863
 		}
+
+		// Use a separate hasher for types inside of the signature, where type
+		// parameter identity is modified to be (index, constraint). We must use a
+		// new memo for this hasher as type identity may be affected by this
+		// masking. For example, in func[T any](*T), the identity of *T depends on
+		// whether we are mapping the argument in isolation, or recursively as part
+		// of hashing the signature.
+		//
+		// We should never encounter a generic signature while hashing another
+		// generic signature, but defensively set sigTParams only if h.mask is
+		// unset.
+		tparams := typeparams.ForSignature(t)
+		if h.sigTParams == nil && tparams.Len() != 0 {
+			h = Hasher{
+				// There may be something more efficient than discarding the existing
+				// memo, but it would require detecting whether types are 'tainted' by
+				// references to type parameters.
+				memo: make(map[types.Type]uint32),
+				// Re-using ptrMap ensures that pointer identity is preserved in this
+				// hasher.
+				ptrMap:     h.ptrMap,
+				sigTParams: tparams,
+			}
+		}
+
+		for i := 0; i < tparams.Len(); i++ {
+			tparam := tparams.At(i)
+			hash += 7 * h.Hash(tparam.Constraint())
+		}
+
 		return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results())
 
+	case *typeparams.Union:
+		return h.hashUnion(t)
+
 	case *types.Interface:
+		// Interfaces are identical if they have the same set of methods, with
+		// identical names and types, and they have the same set of type
+		// restrictions. See go/types.identical for more details.
 		var hash uint32 = 9103
+
+		// Hash methods.
 		for i, n := 0, t.NumMethods(); i < n; i++ {
-			// See go/types.identicalMethods for rationale.
 			// Method order is not significant.
 			// Ignore m.Pkg().
 			m := t.Method(i)
 			hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type())
 		}
+
+		// Hash type restrictions.
+		terms, err := typeparams.InterfaceTermSet(t)
+		// if err != nil t has invalid type restrictions.
+		if err == nil {
+			hash += h.hashTermSet(terms)
+		}
+
 		return hash
 
 	case *types.Map:
@@ -293,21 +358,86 @@
 		return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem())
 
 	case *types.Named:
-		// Not safe with a copying GC; objects may move.
-		return uint32(reflect.ValueOf(t.Obj()).Pointer())
+		hash := h.hashPtr(t.Obj())
+		targs := typeparams.NamedTypeArgs(t)
+		for i := 0; i < targs.Len(); i++ {
+			targ := targs.At(i)
+			hash += 2 * h.Hash(targ)
+		}
+		return hash
+
+	case *typeparams.TypeParam:
+		return h.hashTypeParam(t)
 
 	case *types.Tuple:
 		return h.hashTuple(t)
 	}
-	panic(t)
+
+	panic(fmt.Sprintf("%T: %v", t, t))
 }
 
 func (h Hasher) hashTuple(tuple *types.Tuple) uint32 {
 	// See go/types.identicalTypes for rationale.
 	n := tuple.Len()
-	var hash uint32 = 9137 + 2*uint32(n)
+	hash := 9137 + 2*uint32(n)
 	for i := 0; i < n; i++ {
 		hash += 3 * h.Hash(tuple.At(i).Type())
 	}
 	return hash
 }
+
+func (h Hasher) hashUnion(t *typeparams.Union) uint32 {
+	// Hash type restrictions.
+	terms, err := typeparams.UnionTermSet(t)
+	// if err != nil t has invalid type restrictions. Fall back on a non-zero
+	// hash.
+	if err != nil {
+		return 9151
+	}
+	return h.hashTermSet(terms)
+}
+
+func (h Hasher) hashTermSet(terms []*typeparams.Term) uint32 {
+	hash := 9157 + 2*uint32(len(terms))
+	for _, term := range terms {
+		// term order is not significant.
+		termHash := h.Hash(term.Type())
+		if term.Tilde() {
+			termHash *= 9161
+		}
+		hash += 3 * termHash
+	}
+	return hash
+}
+
+// hashTypeParam returns a hash of the type parameter t, with a hash value
+// depending on whether t is contained in h.sigTParams.
+//
+// If h.sigTParams is set and contains t, then we are in the process of hashing
+// a signature, and the hash value of t must depend only on t's index and
+// constraint: signatures are considered identical modulo type parameter
+// renaming. To avoid infinite recursion, we only hash the type parameter
+// index, and rely on types.Identical to handle signatures where constraints
+// are not identical.
+//
+// Otherwise the hash of t depends only on t's pointer identity.
+func (h Hasher) hashTypeParam(t *typeparams.TypeParam) uint32 {
+	if h.sigTParams != nil {
+		i := t.Index()
+		if i >= 0 && i < h.sigTParams.Len() && t == h.sigTParams.At(i) {
+			return 9173 + 3*uint32(i)
+		}
+	}
+	return h.hashPtr(t.Obj())
+}
+
+// hashPtr hashes the pointer identity of ptr. It uses h.ptrMap to ensure that
+// pointers values are not dependent on the GC.
+func (h Hasher) hashPtr(ptr interface{}) uint32 {
+	if hash, ok := h.ptrMap[ptr]; ok {
+		return hash
+	}
+	hash := uint32(reflect.ValueOf(ptr).Pointer())
+	h.ptrMap[ptr] = hash
+	return hash
+}
diff --git a/go/types/typeutil/map_test.go b/go/types/typeutil/map_test.go
index d4b0f63..8cd643e 100644
--- a/go/types/typeutil/map_test.go
+++ b/go/types/typeutil/map_test.go
@@ -10,10 +10,14 @@
 //   (e.g. all types generated by type-checking some body of real code).
 
 import (
+	"go/ast"
+	"go/parser"
+	"go/token"
 	"go/types"
 	"testing"
 
 	"golang.org/x/tools/go/types/typeutil"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 var (
@@ -172,3 +176,216 @@
 		t.Errorf("Len(): got %q, want %q", s, "")
 	}
 }
+
+func TestMapGenerics(t *testing.T) {
+	if !typeparams.Enabled {
+		t.Skip("type params are not enabled at this Go version")
+	}
+
+	const src = `
+package p
+
+// Basic defined types.
+type T1 int
+type T2 int
+
+// Identical methods.
+func (T1) M(int) {}
+func (T2) M(int) {}
+
+// A constraint interface.
+type C interface {
+	~int | string
+}
+
+type I interface {
+}
+
+// A generic type.
+type G[P C] int
+
+// Generic functions with identical signature.
+func Fa1[P C](p P) {}
+func Fa2[Q C](q Q) {}
+
+// Fb1 and Fb2 are identical and should be mapped to the same entry, even if we
+// map their arguments first.
+func Fb1[P any](x *P) {
+	var y *P // Map this first.
+	_ = y
+}
+func Fb2[Q any](x *Q) {
+}
+
+// G1 and G2 are mutally recursive, and have identical methods.
+type G1[P any] struct{
+	Field *G2[P]
+}
+func (G1[P]) M(G1[P], G2[P]) {}
+type G2[Q any] struct{
+	Field *G1[Q]
+}
+func (G2[P]) M(G1[P], G2[P]) {}
+
+// Method type expressions on different generic types are different.
+var ME1 = G1[int].M
+var ME2 = G2[int].M
+
+// ME1Type should have identical type as ME1.
+var ME1Type func(G1[int], G1[int], G2[int])
+
+// Examples from issue #51314
+type Constraint[T any] interface{}
+func Foo[T Constraint[T]]() {}
+func Fn[T1 ~*T2, T2 ~*T1](t1 T1, t2 T2) {}
+
+// Bar and Baz are identical to Foo.
+func Bar[P Constraint[P]]() {}
+func Baz[Q any]() {} // The underlying type of Constraint[P] is any.
+// But Quux is not.
+func Quux[Q interface{ quux() }]() {}
+`
+
+	fset := token.NewFileSet()
+	file, err := parser.ParseFile(fset, "p.go", src, 0)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	var conf types.Config
+	pkg, err := conf.Check("", fset, []*ast.File{file}, nil)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	// Collect types.
+	scope := pkg.Scope()
+	var (
+		T1      = scope.Lookup("T1").Type().(*types.Named)
+		T2      = scope.Lookup("T2").Type().(*types.Named)
+		T1M     = T1.Method(0).Type()
+		T2M     = T2.Method(0).Type()
+		G       = scope.Lookup("G").Type()
+		GInt1   = instantiate(t, G, types.Typ[types.Int])
+		GInt2   = instantiate(t, G, types.Typ[types.Int])
+		GStr    = instantiate(t, G, types.Typ[types.String])
+		C       = scope.Lookup("C").Type()
+		CI      = C.Underlying().(*types.Interface)
+		I       = scope.Lookup("I").Type()
+		II      = I.Underlying().(*types.Interface)
+		U       = CI.EmbeddedType(0).(*typeparams.Union)
+		Fa1     = scope.Lookup("Fa1").Type().(*types.Signature)
+		Fa2     = scope.Lookup("Fa2").Type().(*types.Signature)
+		Fa1P    = typeparams.ForSignature(Fa1).At(0)
+		Fa2Q    = typeparams.ForSignature(Fa2).At(0)
+		Fb1     = scope.Lookup("Fb1").Type().(*types.Signature)
+		Fb1x    = Fb1.Params().At(0).Type()
+		Fb1y    = scope.Lookup("Fb1").(*types.Func).Scope().Lookup("y").Type()
+		Fb2     = scope.Lookup("Fb2").Type().(*types.Signature)
+		Fb2x    = Fb2.Params().At(0).Type()
+		G1      = scope.Lookup("G1").Type().(*types.Named)
+		G1M     = G1.Method(0).Type()
+		G1IntM1 = instantiate(t, G1, types.Typ[types.Int]).(*types.Named).Method(0).Type()
+		G1IntM2 = instantiate(t, G1, types.Typ[types.Int]).(*types.Named).Method(0).Type()
+		G1StrM  = instantiate(t, G1, types.Typ[types.String]).(*types.Named).Method(0).Type()
+		G2      = scope.Lookup("G2").Type()
+		// See below.
+		// G2M     = G2.Method(0).Type()
+		G2IntM  = instantiate(t, G2, types.Typ[types.Int]).(*types.Named).Method(0).Type()
+		ME1     = scope.Lookup("ME1").Type()
+		ME1Type = scope.Lookup("ME1Type").Type()
+		ME2     = scope.Lookup("ME2").Type()
+
+		Constraint = scope.Lookup("Constraint").Type()
+		Foo        = scope.Lookup("Foo").Type()
+		Fn         = scope.Lookup("Fn").Type()
+		Bar        = scope.Lookup("Foo").Type()
+		Baz        = scope.Lookup("Foo").Type()
+		Quux       = scope.Lookup("Quux").Type()
+	)
+
+	tmap := new(typeutil.Map)
+
+	steps := []struct {
+		typ      types.Type
+		name     string
+		newEntry bool
+	}{
+		{T1, "T1", true},
+		{T2, "T2", true},
+		{G, "G", true},
+		{C, "C", true},
+		{CI, "CI", true},
+		{U, "U", true},
+		{I, "I", true},
+		{II, "II", true}, // should not be identical to CI
+
+		// Methods can be identical, even with distinct receivers.
+		{T1M, "T1M", true},
+		{T2M, "T2M", false},
+
+		// Identical instances should map to the same entry.
+		{GInt1, "GInt1", true},
+		{GInt2, "GInt2", false},
+		// ..but instantiating with different arguments should yield a new entry.
+		{GStr, "GStr", true},
+
+		// F1 and F2 should have identical signatures.
+		{Fa1, "F1", true},
+		{Fa2, "F2", false},
+
+		// The identity of P and Q should not have been affected by type parameter
+		// masking during signature hashing.
+		{Fa1P, "F1P", true},
+		{Fa2Q, "F2Q", true},
+
+		{Fb1y, "Fb1y", true},
+		{Fb1x, "Fb1x", false},
+		{Fb2x, "Fb2x", true},
+		{Fb1, "Fb1", true},
+
+		// Mapping elements of the function scope should not affect the identity of
+		// Fb2 or Fb1.
+		{Fb2, "Fb1", false},
+
+		{G1, "G1", true},
+		{G1M, "G1M", true},
+		{G2, "G2", true},
+
+		// See golang/go#49912: receiver type parameter names should be ignored
+		// when comparing method identity.
+		// {G2M, "G2M", false},
+		{G1IntM1, "G1IntM1", true},
+		{G1IntM2, "G1IntM2", false},
+		{G1StrM, "G1StrM", true},
+		{G2IntM, "G2IntM", false}, // identical to G1IntM1
+
+		{ME1, "ME1", true},
+		{ME1Type, "ME1Type", false},
+		{ME2, "ME2", true},
+
+		// See golang/go#51314: avoid infinite recursion on cyclic type constraints.
+		{Constraint, "Constraint", true},
+		{Foo, "Foo", true},
+		{Fn, "Fn", true},
+		{Bar, "Bar", false},
+		{Baz, "Baz", false},
+		{Quux, "Quux", true},
+	}
+
+	for _, step := range steps {
+		existing := tmap.At(step.typ)
+		if (existing == nil) != step.newEntry {
+			t.Errorf("At(%s) = %v, want new entry: %t", step.name, existing, step.newEntry)
+		}
+		tmap.Set(step.typ, step.name)
+	}
+}
+
+func instantiate(t *testing.T, origin types.Type, targs ...types.Type) types.Type {
+	inst, err := typeparams.Instantiate(nil, origin, targs, true)
+	if err != nil {
+		t.Fatal(err)
+	}
+	return inst
+}
diff --git a/godoc/analysis/README b/godoc/analysis/README
deleted file mode 100644
index d3e732e..0000000
--- a/godoc/analysis/README
+++ /dev/null
@@ -1,111 +0,0 @@
-
-Type and Pointer Analysis to-do list
-====================================
-
-Alan Donovan <adonovan@google.com>
-
-
-Overall design
---------------
-
-We should re-run the type and pointer analyses periodically,
-as we do with the indexer.
-
-Version skew: how to mitigate the bad effects of stale URLs in old pages?
-We could record the file's length/CRC32/mtime in the go/loader, and
-refuse to decorate it with links unless they match at serving time.
-
-Use the VFS mechanism when (a) enumerating packages and (b) loading
-them.  (Requires planned changes to go/loader.)
-
-Future work: shard this using map/reduce for larger corpora.
-
-Testing: how does one test that a web page "looks right"?
-
-
-Bugs
-----
-
-(*ssa.Program).Create requires transitively error-free packages.  We
-can make this more robust by making the requirement transitively free
-of "hard" errors; soft errors are fine.
-
-Markup of compiler errors is slightly buggy because they overlap with
-other selections (e.g. Idents).  Fix.
-
-
-User Interface
---------------
-
-CALLGRAPH:
-- Add a search box: given a search node, expand path from each entry
-  point to it.
-- Cause hovering over a given node to highlight that node, and all
-  nodes that are logically identical to it.
-- Initially expand the callgraph trees (but not their toggle divs).
-
-CALLEES:
-- The '(' links are not very discoverable.  Highlight them?
-
-Type info:
-- In the source viewer's lower pane, use a toggle div around the
-  IMPLEMENTS and METHODSETS lists, like we do in the package view.
-  Only expand them initially if short.
-- Include IMPLEMENTS and METHOD SETS information in search index.
-- URLs in IMPLEMENTS/METHOD SETS always link to source, even from the
-  package docs view.  This makes sense for links to non-exported
-  types, but links to exported types and funcs should probably go to
-  other package docs.
-- Suppress toggle divs for empty method sets.
-
-Misc:
-- The [X] button in the lower pane is subject to scrolling.
-- Should the lower pane be floating?  An iframe?
-  When we change document.location by clicking on a link, it will go away.
-  How do we prevent that (a la Gmail's chat windows)?
-- Progress/status: for each file, display its analysis status, one of:
-   - not in analysis scope
-   - type analysis running...
-   - type analysis complete
-     (+ optionally: there were type errors in this file)
-   And if PTA requested:
-   - type analysis complete; PTA not attempted due to type errors
-   - PTA running...
-   - PTA complete
-- Scroll the selection into view, e.g. the vertical center, or better
-  still, under the pointer (assuming we have a mouse).
-
-
-More features
--------------
-
-Display the REFERRERS relation?  (Useful but potentially large.)
-
-Display the INSTANTIATIONS relation? i.e. given a type T, show the set of
-syntactic constructs that can instantiate it:
-    var x T
-    x := T{...}
-    x = new(T)
-    x = make([]T, n)
-    etc
-    + all INSTANTIATIONS of all S defined as struct{t T} or [n]T
-(Potentially a lot of information.)
-(Add this to guru too.)
-
-
-Optimisations
--------------
-
-Each call to addLink takes a (per-file) lock.  The locking is
-fine-grained so server latency isn't terrible, but overall it makes
-the link computation quite slow.  Batch update might be better.
-
-Memory usage is now about 1.5GB for GOROOT + go.tools.  It used to be 700MB.
-
-Optimize for time and space.  The main slowdown is the network I/O
-time caused by an increase in page size of about 3x: about 2x from
-HTML, and 0.7--2.1x from JSON (unindented vs indented).  The JSON
-contains a lot of filenames (e.g. 820 copies of 16 distinct
-filenames).  20% of the HTML is L%d spans (now disabled).  The HTML
-also contains lots of tooltips for long struct/interface types.
-De-dup or just abbreviate?  The actual formatting is very fast.
diff --git a/godoc/analysis/analysis.go b/godoc/analysis/analysis.go
index b79286c..de8e470 100644
--- a/godoc/analysis/analysis.go
+++ b/godoc/analysis/analysis.go
@@ -43,24 +43,9 @@
 package analysis // import "golang.org/x/tools/godoc/analysis"
 
 import (
-	"fmt"
-	"go/build"
-	"go/scanner"
-	"go/token"
-	"go/types"
-	"html"
 	"io"
-	"log"
-	"os"
-	"path/filepath"
 	"sort"
-	"strings"
 	"sync"
-
-	"golang.org/x/tools/go/loader"
-	"golang.org/x/tools/go/pointer"
-	"golang.org/x/tools/go/ssa"
-	"golang.org/x/tools/go/ssa/ssautil"
 )
 
 // -- links ------------------------------------------------------------
@@ -73,53 +58,6 @@
 	Write(w io.Writer, _ int, start bool) // the godoc.LinkWriter signature
 }
 
-// An <a> element.
-type aLink struct {
-	start, end int    // =godoc.Segment
-	title      string // hover text
-	onclick    string // JS code (NB: trusted)
-	href       string // URL     (NB: trusted)
-}
-
-func (a aLink) Start() int { return a.start }
-func (a aLink) End() int   { return a.end }
-func (a aLink) Write(w io.Writer, _ int, start bool) {
-	if start {
-		fmt.Fprintf(w, `<a title='%s'`, html.EscapeString(a.title))
-		if a.onclick != "" {
-			fmt.Fprintf(w, ` onclick='%s'`, html.EscapeString(a.onclick))
-		}
-		if a.href != "" {
-			// TODO(adonovan): I think that in principle, a.href must first be
-			// url.QueryEscape'd, but if I do that, a leading slash becomes "%2F",
-			// which causes the browser to treat the path as relative, not absolute.
-			// WTF?
-			fmt.Fprintf(w, ` href='%s'`, html.EscapeString(a.href))
-		}
-		fmt.Fprintf(w, ">")
-	} else {
-		fmt.Fprintf(w, "</a>")
-	}
-}
-
-// An <a class='error'> element.
-type errorLink struct {
-	start int
-	msg   string
-}
-
-func (e errorLink) Start() int { return e.start }
-func (e errorLink) End() int   { return e.start + 1 }
-
-func (e errorLink) Write(w io.Writer, _ int, start bool) {
-	// <span> causes havoc, not sure why, so use <a>.
-	if start {
-		fmt.Fprintf(w, `<a class='error' title='%s'>`, html.EscapeString(e.msg))
-	} else {
-		fmt.Fprintf(w, "</a>")
-	}
-}
-
 // -- fileInfo ---------------------------------------------------------
 
 // FileInfo holds analysis information for the source file view.
@@ -139,27 +77,6 @@
 	hasErrors bool // TODO(adonovan): surface this in the UI
 }
 
-// addLink adds a link to the Go source file fi.
-func (fi *fileInfo) addLink(link Link) {
-	fi.mu.Lock()
-	fi.links = append(fi.links, link)
-	fi.sorted = false
-	if _, ok := link.(errorLink); ok {
-		fi.hasErrors = true
-	}
-	fi.mu.Unlock()
-}
-
-// addData adds the structured value x to the JSON data for the Go
-// source file fi.  Its index is returned.
-func (fi *fileInfo) addData(x interface{}) int {
-	fi.mu.Lock()
-	index := len(fi.data)
-	fi.data = append(fi.data, x)
-	fi.mu.Unlock()
-	return index
-}
-
 // get returns the file info in external form.
 // Callers must not mutate its fields.
 func (fi *fileInfo) get() FileInfo {
@@ -191,19 +108,6 @@
 	types          []*TypeInfoJSON // type info for exported types
 }
 
-func (pi *pkgInfo) setCallGraph(callGraph []*PCGNodeJSON, callGraphIndex map[string]int) {
-	pi.mu.Lock()
-	pi.callGraph = callGraph
-	pi.callGraphIndex = callGraphIndex
-	pi.mu.Unlock()
-}
-
-func (pi *pkgInfo) addType(t *TypeInfoJSON) {
-	pi.mu.Lock()
-	pi.types = append(pi.types, t)
-	pi.mu.Unlock()
-}
-
 // get returns the package info in external form.
 // Callers must not mutate its fields.
 func (pi *pkgInfo) get() PackageInfo {
@@ -252,13 +156,6 @@
 	return res.status
 }
 
-func (res *Result) setStatusf(format string, args ...interface{}) {
-	res.mu.Lock()
-	res.status = fmt.Sprintf(format, args...)
-	log.Printf(format, args...)
-	res.mu.Unlock()
-}
-
 // FileInfo returns new slices containing opaque JSON values and the
 // HTML link markup for the specified godoc file URL.  Thread-safe.
 // Callers must not mutate the elements.
@@ -293,321 +190,8 @@
 	return res.pkgInfo(importPath).get()
 }
 
-// -- analysis ---------------------------------------------------------
-
-type analysis struct {
-	result    *Result
-	prog      *ssa.Program
-	ops       []chanOp       // all channel ops in program
-	allNamed  []*types.Named // all "defined" (formerly "named") types in the program
-	ptaConfig pointer.Config
-	path2url  map[string]string // maps openable path to godoc file URL (/src/fmt/print.go)
-	pcgs      map[*ssa.Package]*packageCallGraph
-}
-
-// fileAndOffset returns the file and offset for a given pos.
-func (a *analysis) fileAndOffset(pos token.Pos) (fi *fileInfo, offset int) {
-	return a.fileAndOffsetPosn(a.prog.Fset.Position(pos))
-}
-
-// fileAndOffsetPosn returns the file and offset for a given position.
-func (a *analysis) fileAndOffsetPosn(posn token.Position) (fi *fileInfo, offset int) {
-	url := a.path2url[posn.Filename]
-	return a.result.fileInfo(url), posn.Offset
-}
-
-// posURL returns the URL of the source extent [pos, pos+len).
-func (a *analysis) posURL(pos token.Pos, len int) string {
-	if pos == token.NoPos {
-		return ""
-	}
-	posn := a.prog.Fset.Position(pos)
-	url := a.path2url[posn.Filename]
-	return fmt.Sprintf("%s?s=%d:%d#L%d",
-		url, posn.Offset, posn.Offset+len, posn.Line)
-}
-
-// ----------------------------------------------------------------------
-
-// Run runs program analysis and computes the resulting markup,
-// populating *result in a thread-safe manner, first with type
-// information then later with pointer analysis information if
-// enabled by the pta flag.
-//
-func Run(pta bool, result *Result) {
-	conf := loader.Config{
-		AllowErrors: true,
-	}
-
-	// Silence the default error handler.
-	// Don't print all errors; we'll report just
-	// one per errant package later.
-	conf.TypeChecker.Error = func(e error) {}
-
-	var roots, args []string // roots[i] ends with os.PathSeparator
-
-	// Enumerate packages in $GOROOT.
-	root := filepath.Join(build.Default.GOROOT, "src") + string(os.PathSeparator)
-	roots = append(roots, root)
-	args = allPackages(root)
-	log.Printf("GOROOT=%s: %s\n", root, args)
-
-	// Enumerate packages in $GOPATH.
-	for i, dir := range filepath.SplitList(build.Default.GOPATH) {
-		root := filepath.Join(dir, "src") + string(os.PathSeparator)
-		roots = append(roots, root)
-		pkgs := allPackages(root)
-		log.Printf("GOPATH[%d]=%s: %s\n", i, root, pkgs)
-		args = append(args, pkgs...)
-	}
-
-	// Uncomment to make startup quicker during debugging.
-	//args = []string{"golang.org/x/tools/cmd/godoc"}
-	//args = []string{"fmt"}
-
-	if _, err := conf.FromArgs(args, true); err != nil {
-		// TODO(adonovan): degrade gracefully, not fail totally.
-		// (The crippling case is a parse error in an external test file.)
-		result.setStatusf("Analysis failed: %s.", err) // import error
-		return
-	}
-
-	result.setStatusf("Loading and type-checking packages...")
-	iprog, err := conf.Load()
-	if iprog != nil {
-		// Report only the first error of each package.
-		for _, info := range iprog.AllPackages {
-			for _, err := range info.Errors {
-				fmt.Fprintln(os.Stderr, err)
-				break
-			}
-		}
-		log.Printf("Loaded %d packages.", len(iprog.AllPackages))
-	}
-	if err != nil {
-		result.setStatusf("Loading failed: %s.\n", err)
-		return
-	}
-
-	// Create SSA-form program representation.
-	// Only the transitively error-free packages are used.
-	prog := ssautil.CreateProgram(iprog, ssa.GlobalDebug)
-
-	// Create a "testmain" package for each package with tests.
-	for _, pkg := range prog.AllPackages() {
-		if testmain := prog.CreateTestMainPackage(pkg); testmain != nil {
-			log.Printf("Adding tests for %s", pkg.Pkg.Path())
-		}
-	}
-
-	// Build SSA code for bodies of all functions in the whole program.
-	result.setStatusf("Constructing SSA form...")
-	prog.Build()
-	log.Print("SSA construction complete")
-
-	a := analysis{
-		result: result,
-		prog:   prog,
-		pcgs:   make(map[*ssa.Package]*packageCallGraph),
-	}
-
-	// Build a mapping from openable filenames to godoc file URLs,
-	// i.e. "/src/" plus path relative to GOROOT/src or GOPATH[i]/src.
-	a.path2url = make(map[string]string)
-	for _, info := range iprog.AllPackages {
-	nextfile:
-		for _, f := range info.Files {
-			if f.Pos() == 0 {
-				continue // e.g. files generated by cgo
-			}
-			abs := iprog.Fset.File(f.Pos()).Name()
-			// Find the root to which this file belongs.
-			for _, root := range roots {
-				rel := strings.TrimPrefix(abs, root)
-				if len(rel) < len(abs) {
-					a.path2url[abs] = "/src/" + filepath.ToSlash(rel)
-					continue nextfile
-				}
-			}
-
-			log.Printf("Can't locate file %s (package %q) beneath any root",
-				abs, info.Pkg.Path())
-		}
-	}
-
-	// Add links for scanner, parser, type-checker errors.
-	// TODO(adonovan): fix: these links can overlap with
-	// identifier markup, causing the renderer to emit some
-	// characters twice.
-	errors := make(map[token.Position][]string)
-	for _, info := range iprog.AllPackages {
-		for _, err := range info.Errors {
-			switch err := err.(type) {
-			case types.Error:
-				posn := a.prog.Fset.Position(err.Pos)
-				errors[posn] = append(errors[posn], err.Msg)
-			case scanner.ErrorList:
-				for _, e := range err {
-					errors[e.Pos] = append(errors[e.Pos], e.Msg)
-				}
-			default:
-				log.Printf("Package %q has error (%T) without position: %v\n",
-					info.Pkg.Path(), err, err)
-			}
-		}
-	}
-	for posn, errs := range errors {
-		fi, offset := a.fileAndOffsetPosn(posn)
-		fi.addLink(errorLink{
-			start: offset,
-			msg:   strings.Join(errs, "\n"),
-		})
-	}
-
-	// ---------- type-based analyses ----------
-
-	// Compute the all-pairs IMPLEMENTS relation.
-	// Collect all named types, even local types
-	// (which can have methods via promotion)
-	// and the built-in "error".
-	errorType := types.Universe.Lookup("error").Type().(*types.Named)
-	a.allNamed = append(a.allNamed, errorType)
-	for _, info := range iprog.AllPackages {
-		for _, obj := range info.Defs {
-			if obj, ok := obj.(*types.TypeName); ok {
-				if named, ok := obj.Type().(*types.Named); ok {
-					a.allNamed = append(a.allNamed, named)
-				}
-			}
-		}
-	}
-	log.Print("Computing implements relation...")
-	facts := computeImplements(&a.prog.MethodSets, a.allNamed)
-
-	// Add the type-based analysis results.
-	log.Print("Extracting type info...")
-	for _, info := range iprog.AllPackages {
-		a.doTypeInfo(info, facts)
-	}
-
-	a.visitInstrs(pta)
-
-	result.setStatusf("Type analysis complete.")
-
-	if pta {
-		mainPkgs := ssautil.MainPackages(prog.AllPackages())
-		log.Print("Transitively error-free main packages: ", mainPkgs)
-		a.pointer(mainPkgs)
-	}
-}
-
-// visitInstrs visits all SSA instructions in the program.
-func (a *analysis) visitInstrs(pta bool) {
-	log.Print("Visit instructions...")
-	for fn := range ssautil.AllFunctions(a.prog) {
-		for _, b := range fn.Blocks {
-			for _, instr := range b.Instrs {
-				// CALLEES (static)
-				// (Dynamic calls require pointer analysis.)
-				//
-				// We use the SSA representation to find the static callee,
-				// since in many cases it does better than the
-				// types.Info.{Refs,Selection} information.  For example:
-				//
-				//   defer func(){}()      // static call to anon function
-				//   f := func(){}; f()    // static call to anon function
-				//   f := fmt.Println; f() // static call to named function
-				//
-				// The downside is that we get no static callee information
-				// for packages that (transitively) contain errors.
-				if site, ok := instr.(ssa.CallInstruction); ok {
-					if callee := site.Common().StaticCallee(); callee != nil {
-						// TODO(adonovan): callgraph: elide wrappers.
-						// (Do static calls ever go to wrappers?)
-						if site.Common().Pos() != token.NoPos {
-							a.addCallees(site, []*ssa.Function{callee})
-						}
-					}
-				}
-
-				if !pta {
-					continue
-				}
-
-				// CHANNEL PEERS
-				// Collect send/receive/close instructions in the whole ssa.Program.
-				for _, op := range chanOps(instr) {
-					a.ops = append(a.ops, op)
-					a.ptaConfig.AddQuery(op.ch) // add channel ssa.Value to PTA query
-				}
-			}
-		}
-	}
-	log.Print("Visit instructions complete")
-}
-
-// pointer runs the pointer analysis.
-func (a *analysis) pointer(mainPkgs []*ssa.Package) {
-	// Run the pointer analysis and build the complete callgraph.
-	a.ptaConfig.Mains = mainPkgs
-	a.ptaConfig.BuildCallGraph = true
-	a.ptaConfig.Reflection = false // (for now)
-
-	a.result.setStatusf("Pointer analysis running...")
-
-	ptares, err := pointer.Analyze(&a.ptaConfig)
-	if err != nil {
-		// If this happens, it indicates a bug.
-		a.result.setStatusf("Pointer analysis failed: %s.", err)
-		return
-	}
-	log.Print("Pointer analysis complete.")
-
-	// Add the results of pointer analysis.
-
-	a.result.setStatusf("Computing channel peers...")
-	a.doChannelPeers(ptares.Queries)
-	a.result.setStatusf("Computing dynamic call graph edges...")
-	a.doCallgraph(ptares.CallGraph)
-
-	a.result.setStatusf("Analysis complete.")
-}
-
 type linksByStart []Link
 
 func (a linksByStart) Less(i, j int) bool { return a[i].Start() < a[j].Start() }
 func (a linksByStart) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
 func (a linksByStart) Len() int           { return len(a) }
-
-// allPackages returns a new sorted slice of all packages beneath the
-// specified package root directory, e.g. $GOROOT/src or $GOPATH/src.
-// Derived from from go/ssa/stdlib_test.go
-// root must end with os.PathSeparator.
-//
-// TODO(adonovan): use buildutil.AllPackages when the tree thaws.
-func allPackages(root string) []string {
-	var pkgs []string
-	filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
-		if info == nil {
-			return nil // non-existent root directory?
-		}
-		if !info.IsDir() {
-			return nil // not a directory
-		}
-		// Prune the search if we encounter any of these names:
-		base := filepath.Base(path)
-		if base == "testdata" || strings.HasPrefix(base, ".") {
-			return filepath.SkipDir
-		}
-		pkg := filepath.ToSlash(strings.TrimPrefix(path, root))
-		switch pkg {
-		case "builtin":
-			return filepath.SkipDir
-		case "":
-			return nil // ignore root of tree
-		}
-		pkgs = append(pkgs, pkg)
-		return nil
-	})
-	return pkgs
-}
diff --git a/godoc/analysis/callgraph.go b/godoc/analysis/callgraph.go
deleted file mode 100644
index 492022d..0000000
--- a/godoc/analysis/callgraph.go
+++ /dev/null
@@ -1,351 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package analysis
-
-// This file computes the CALLERS and CALLEES relations from the call
-// graph.  CALLERS/CALLEES information is displayed in the lower pane
-// when a "func" token or ast.CallExpr.Lparen is clicked, respectively.
-
-import (
-	"fmt"
-	"go/ast"
-	"go/token"
-	"go/types"
-	"log"
-	"math/big"
-	"sort"
-
-	"golang.org/x/tools/go/callgraph"
-	"golang.org/x/tools/go/ssa"
-)
-
-// doCallgraph computes the CALLEES and CALLERS relations.
-func (a *analysis) doCallgraph(cg *callgraph.Graph) {
-	log.Print("Deleting synthetic nodes...")
-	// TODO(adonovan): opt: DeleteSyntheticNodes is asymptotically
-	// inefficient and can be (unpredictably) slow.
-	cg.DeleteSyntheticNodes()
-	log.Print("Synthetic nodes deleted")
-
-	// Populate nodes of package call graphs (PCGs).
-	for _, n := range cg.Nodes {
-		a.pcgAddNode(n.Func)
-	}
-	// Within each PCG, sort funcs by name.
-	for _, pcg := range a.pcgs {
-		pcg.sortNodes()
-	}
-
-	calledFuncs := make(map[ssa.CallInstruction]map[*ssa.Function]bool)
-	callingSites := make(map[*ssa.Function]map[ssa.CallInstruction]bool)
-	for _, n := range cg.Nodes {
-		for _, e := range n.Out {
-			if e.Site == nil {
-				continue // a call from a synthetic node such as <root>
-			}
-
-			// Add (site pos, callee) to calledFuncs.
-			// (Dynamic calls only.)
-			callee := e.Callee.Func
-
-			a.pcgAddEdge(n.Func, callee)
-
-			if callee.Synthetic != "" {
-				continue // call of a package initializer
-			}
-
-			if e.Site.Common().StaticCallee() == nil {
-				// dynamic call
-				// (CALLEES information for static calls
-				// is computed using SSA information.)
-				lparen := e.Site.Common().Pos()
-				if lparen != token.NoPos {
-					fns := calledFuncs[e.Site]
-					if fns == nil {
-						fns = make(map[*ssa.Function]bool)
-						calledFuncs[e.Site] = fns
-					}
-					fns[callee] = true
-				}
-			}
-
-			// Add (callee, site) to callingSites.
-			fns := callingSites[callee]
-			if fns == nil {
-				fns = make(map[ssa.CallInstruction]bool)
-				callingSites[callee] = fns
-			}
-			fns[e.Site] = true
-		}
-	}
-
-	// CALLEES.
-	log.Print("Callees...")
-	for site, fns := range calledFuncs {
-		var funcs funcsByPos
-		for fn := range fns {
-			funcs = append(funcs, fn)
-		}
-		sort.Sort(funcs)
-
-		a.addCallees(site, funcs)
-	}
-
-	// CALLERS
-	log.Print("Callers...")
-	for callee, sites := range callingSites {
-		pos := funcToken(callee)
-		if pos == token.NoPos {
-			log.Printf("CALLERS: skipping %s: no pos", callee)
-			continue
-		}
-
-		var this *types.Package // for relativizing names
-		if callee.Pkg != nil {
-			this = callee.Pkg.Pkg
-		}
-
-		// Compute sites grouped by parent, with text and URLs.
-		sitesByParent := make(map[*ssa.Function]sitesByPos)
-		for site := range sites {
-			fn := site.Parent()
-			sitesByParent[fn] = append(sitesByParent[fn], site)
-		}
-		var funcs funcsByPos
-		for fn := range sitesByParent {
-			funcs = append(funcs, fn)
-		}
-		sort.Sort(funcs)
-
-		v := callersJSON{
-			Callee:  callee.String(),
-			Callers: []callerJSON{}, // (JS wants non-nil)
-		}
-		for _, fn := range funcs {
-			caller := callerJSON{
-				Func:  prettyFunc(this, fn),
-				Sites: []anchorJSON{}, // (JS wants non-nil)
-			}
-			sites := sitesByParent[fn]
-			sort.Sort(sites)
-			for _, site := range sites {
-				pos := site.Common().Pos()
-				if pos != token.NoPos {
-					caller.Sites = append(caller.Sites, anchorJSON{
-						Text: fmt.Sprintf("%d", a.prog.Fset.Position(pos).Line),
-						Href: a.posURL(pos, len("(")),
-					})
-				}
-			}
-			v.Callers = append(v.Callers, caller)
-		}
-
-		fi, offset := a.fileAndOffset(pos)
-		fi.addLink(aLink{
-			start:   offset,
-			end:     offset + len("func"),
-			title:   fmt.Sprintf("%d callers", len(sites)),
-			onclick: fmt.Sprintf("onClickCallers(%d)", fi.addData(v)),
-		})
-	}
-
-	// PACKAGE CALLGRAPH
-	log.Print("Package call graph...")
-	for pkg, pcg := range a.pcgs {
-		// Maps (*ssa.Function).RelString() to index in JSON CALLGRAPH array.
-		index := make(map[string]int)
-
-		// Treat exported functions (and exported methods of
-		// exported named types) as roots even if they aren't
-		// actually called from outside the package.
-		for i, n := range pcg.nodes {
-			if i == 0 || n.fn.Object() == nil || !n.fn.Object().Exported() {
-				continue
-			}
-			recv := n.fn.Signature.Recv()
-			if recv == nil || deref(recv.Type()).(*types.Named).Obj().Exported() {
-				roots := &pcg.nodes[0].edges
-				roots.SetBit(roots, i, 1)
-			}
-			index[n.fn.RelString(pkg.Pkg)] = i
-		}
-
-		json := a.pcgJSON(pcg)
-
-		// TODO(adonovan): pkg.Path() is not unique!
-		// It is possible to declare a non-test package called x_test.
-		a.result.pkgInfo(pkg.Pkg.Path()).setCallGraph(json, index)
-	}
-}
-
-// addCallees adds client data and links for the facts that site calls fns.
-func (a *analysis) addCallees(site ssa.CallInstruction, fns []*ssa.Function) {
-	v := calleesJSON{
-		Descr:   site.Common().Description(),
-		Callees: []anchorJSON{}, // (JS wants non-nil)
-	}
-	var this *types.Package // for relativizing names
-	if p := site.Parent().Package(); p != nil {
-		this = p.Pkg
-	}
-
-	for _, fn := range fns {
-		v.Callees = append(v.Callees, anchorJSON{
-			Text: prettyFunc(this, fn),
-			Href: a.posURL(funcToken(fn), len("func")),
-		})
-	}
-
-	fi, offset := a.fileAndOffset(site.Common().Pos())
-	fi.addLink(aLink{
-		start:   offset,
-		end:     offset + len("("),
-		title:   fmt.Sprintf("%d callees", len(v.Callees)),
-		onclick: fmt.Sprintf("onClickCallees(%d)", fi.addData(v)),
-	})
-}
-
-// -- utilities --------------------------------------------------------
-
-// stable order within packages but undefined across packages.
-type funcsByPos []*ssa.Function
-
-func (a funcsByPos) Less(i, j int) bool { return a[i].Pos() < a[j].Pos() }
-func (a funcsByPos) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
-func (a funcsByPos) Len() int           { return len(a) }
-
-type sitesByPos []ssa.CallInstruction
-
-func (a sitesByPos) Less(i, j int) bool { return a[i].Common().Pos() < a[j].Common().Pos() }
-func (a sitesByPos) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
-func (a sitesByPos) Len() int           { return len(a) }
-
-func funcToken(fn *ssa.Function) token.Pos {
-	switch syntax := fn.Syntax().(type) {
-	case *ast.FuncLit:
-		return syntax.Type.Func
-	case *ast.FuncDecl:
-		return syntax.Type.Func
-	}
-	return token.NoPos
-}
-
-// prettyFunc pretty-prints fn for the user interface.
-// TODO(adonovan): return HTML so we have more markup freedom.
-func prettyFunc(this *types.Package, fn *ssa.Function) string {
-	if fn.Parent() != nil {
-		return fmt.Sprintf("%s in %s",
-			types.TypeString(fn.Signature, types.RelativeTo(this)),
-			prettyFunc(this, fn.Parent()))
-	}
-	if fn.Synthetic != "" && fn.Name() == "init" {
-		// (This is the actual initializer, not a declared 'func init').
-		if fn.Pkg.Pkg == this {
-			return "package initializer"
-		}
-		return fmt.Sprintf("%q package initializer", fn.Pkg.Pkg.Path())
-	}
-	return fn.RelString(this)
-}
-
-// -- intra-package callgraph ------------------------------------------
-
-// pcgNode represents a node in the package call graph (PCG).
-type pcgNode struct {
-	fn     *ssa.Function
-	pretty string  // cache of prettyFunc(fn)
-	edges  big.Int // set of callee func indices
-}
-
-// A packageCallGraph represents the intra-package edges of the global call graph.
-// The zeroth node indicates "all external functions".
-type packageCallGraph struct {
-	nodeIndex map[*ssa.Function]int // maps func to node index (a small int)
-	nodes     []*pcgNode            // maps node index to node
-}
-
-// sortNodes populates pcg.nodes in name order and updates the nodeIndex.
-func (pcg *packageCallGraph) sortNodes() {
-	nodes := make([]*pcgNode, 0, len(pcg.nodeIndex))
-	nodes = append(nodes, &pcgNode{fn: nil, pretty: "<external>"})
-	for fn := range pcg.nodeIndex {
-		nodes = append(nodes, &pcgNode{
-			fn:     fn,
-			pretty: prettyFunc(fn.Pkg.Pkg, fn),
-		})
-	}
-	sort.Sort(pcgNodesByPretty(nodes[1:]))
-	for i, n := range nodes {
-		pcg.nodeIndex[n.fn] = i
-	}
-	pcg.nodes = nodes
-}
-
-func (pcg *packageCallGraph) addEdge(caller, callee *ssa.Function) {
-	var callerIndex int
-	if caller.Pkg == callee.Pkg {
-		// intra-package edge
-		callerIndex = pcg.nodeIndex[caller]
-		if callerIndex < 1 {
-			panic(caller)
-		}
-	}
-	edges := &pcg.nodes[callerIndex].edges
-	edges.SetBit(edges, pcg.nodeIndex[callee], 1)
-}
-
-func (a *analysis) pcgAddNode(fn *ssa.Function) {
-	if fn.Pkg == nil {
-		return
-	}
-	pcg, ok := a.pcgs[fn.Pkg]
-	if !ok {
-		pcg = &packageCallGraph{nodeIndex: make(map[*ssa.Function]int)}
-		a.pcgs[fn.Pkg] = pcg
-	}
-	pcg.nodeIndex[fn] = -1
-}
-
-func (a *analysis) pcgAddEdge(caller, callee *ssa.Function) {
-	if callee.Pkg != nil {
-		a.pcgs[callee.Pkg].addEdge(caller, callee)
-	}
-}
-
-// pcgJSON returns a new slice of callgraph JSON values.
-func (a *analysis) pcgJSON(pcg *packageCallGraph) []*PCGNodeJSON {
-	var nodes []*PCGNodeJSON
-	for _, n := range pcg.nodes {
-
-		// TODO(adonovan): why is there no good way to iterate
-		// over the set bits of a big.Int?
-		var callees []int
-		nbits := n.edges.BitLen()
-		for j := 0; j < nbits; j++ {
-			if n.edges.Bit(j) == 1 {
-				callees = append(callees, j)
-			}
-		}
-
-		var pos token.Pos
-		if n.fn != nil {
-			pos = funcToken(n.fn)
-		}
-		nodes = append(nodes, &PCGNodeJSON{
-			Func: anchorJSON{
-				Text: n.pretty,
-				Href: a.posURL(pos, len("func")),
-			},
-			Callees: callees,
-		})
-	}
-	return nodes
-}
-
-type pcgNodesByPretty []*pcgNode
-
-func (a pcgNodesByPretty) Less(i, j int) bool { return a[i].pretty < a[j].pretty }
-func (a pcgNodesByPretty) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
-func (a pcgNodesByPretty) Len() int           { return len(a) }
diff --git a/godoc/analysis/implements.go b/godoc/analysis/implements.go
deleted file mode 100644
index 5a29579..0000000
--- a/godoc/analysis/implements.go
+++ /dev/null
@@ -1,195 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package analysis
-
-// This file computes the "implements" relation over all pairs of
-// named types in the program.  (The mark-up is done by typeinfo.go.)
-
-// TODO(adonovan): do we want to report implements(C, I) where C and I
-// belong to different packages and at least one is not exported?
-
-import (
-	"go/types"
-	"sort"
-
-	"golang.org/x/tools/go/types/typeutil"
-)
-
-// computeImplements computes the "implements" relation over all pairs
-// of named types in allNamed.
-func computeImplements(cache *typeutil.MethodSetCache, allNamed []*types.Named) map[*types.Named]implementsFacts {
-	// Information about a single type's method set.
-	type msetInfo struct {
-		typ          types.Type
-		mset         *types.MethodSet
-		mask1, mask2 uint64
-	}
-
-	initMsetInfo := func(info *msetInfo, typ types.Type) {
-		info.typ = typ
-		info.mset = cache.MethodSet(typ)
-		for i := 0; i < info.mset.Len(); i++ {
-			name := info.mset.At(i).Obj().Name()
-			info.mask1 |= 1 << methodBit(name[0])
-			info.mask2 |= 1 << methodBit(name[len(name)-1])
-		}
-	}
-
-	// satisfies(T, U) reports whether type T satisfies type U.
-	// U must be an interface.
-	//
-	// Since there are thousands of types (and thus millions of
-	// pairs of types) and types.Assignable(T, U) is relatively
-	// expensive, we compute assignability directly from the
-	// method sets.  (At least one of T and U must be an
-	// interface.)
-	//
-	// We use a trick (thanks gri!) related to a Bloom filter to
-	// quickly reject most tests, which are false.  For each
-	// method set, we precompute a mask, a set of bits, one per
-	// distinct initial byte of each method name.  Thus the mask
-	// for io.ReadWriter would be {'R','W'}.  AssignableTo(T, U)
-	// cannot be true unless mask(T)&mask(U)==mask(U).
-	//
-	// As with a Bloom filter, we can improve precision by testing
-	// additional hashes, e.g. using the last letter of each
-	// method name, so long as the subset mask property holds.
-	//
-	// When analyzing the standard library, there are about 1e6
-	// calls to satisfies(), of which 0.6% return true.  With a
-	// 1-hash filter, 95% of calls avoid the expensive check; with
-	// a 2-hash filter, this grows to 98.2%.
-	satisfies := func(T, U *msetInfo) bool {
-		return T.mask1&U.mask1 == U.mask1 &&
-			T.mask2&U.mask2 == U.mask2 &&
-			containsAllIdsOf(T.mset, U.mset)
-	}
-
-	// Information about a named type N, and perhaps also *N.
-	type namedInfo struct {
-		isInterface bool
-		base        msetInfo // N
-		ptr         msetInfo // *N, iff N !isInterface
-	}
-
-	var infos []namedInfo
-
-	// Precompute the method sets and their masks.
-	for _, N := range allNamed {
-		var info namedInfo
-		initMsetInfo(&info.base, N)
-		_, info.isInterface = N.Underlying().(*types.Interface)
-		if !info.isInterface {
-			initMsetInfo(&info.ptr, types.NewPointer(N))
-		}
-
-		if info.base.mask1|info.ptr.mask1 == 0 {
-			continue // neither N nor *N has methods
-		}
-
-		infos = append(infos, info)
-	}
-
-	facts := make(map[*types.Named]implementsFacts)
-
-	// Test all pairs of distinct named types (T, U).
-	// TODO(adonovan): opt: compute (U, T) at the same time.
-	for t := range infos {
-		T := &infos[t]
-		var to, from, fromPtr []types.Type
-		for u := range infos {
-			if t == u {
-				continue
-			}
-			U := &infos[u]
-			switch {
-			case T.isInterface && U.isInterface:
-				if satisfies(&U.base, &T.base) {
-					to = append(to, U.base.typ)
-				}
-				if satisfies(&T.base, &U.base) {
-					from = append(from, U.base.typ)
-				}
-			case T.isInterface: // U concrete
-				if satisfies(&U.base, &T.base) {
-					to = append(to, U.base.typ)
-				} else if satisfies(&U.ptr, &T.base) {
-					to = append(to, U.ptr.typ)
-				}
-			case U.isInterface: // T concrete
-				if satisfies(&T.base, &U.base) {
-					from = append(from, U.base.typ)
-				} else if satisfies(&T.ptr, &U.base) {
-					fromPtr = append(fromPtr, U.base.typ)
-				}
-			}
-		}
-
-		// Sort types (arbitrarily) to avoid nondeterminism.
-		sort.Sort(typesByString(to))
-		sort.Sort(typesByString(from))
-		sort.Sort(typesByString(fromPtr))
-
-		facts[T.base.typ.(*types.Named)] = implementsFacts{to, from, fromPtr}
-	}
-
-	return facts
-}
-
-type implementsFacts struct {
-	to      []types.Type // named or ptr-to-named types assignable to interface T
-	from    []types.Type // named interfaces assignable from T
-	fromPtr []types.Type // named interfaces assignable only from *T
-}
-
-type typesByString []types.Type
-
-func (p typesByString) Len() int           { return len(p) }
-func (p typesByString) Less(i, j int) bool { return p[i].String() < p[j].String() }
-func (p typesByString) Swap(i, j int)      { p[i], p[j] = p[j], p[i] }
-
-// methodBit returns the index of x in [a-zA-Z], or 52 if not found.
-func methodBit(x byte) uint64 {
-	switch {
-	case 'a' <= x && x <= 'z':
-		return uint64(x - 'a')
-	case 'A' <= x && x <= 'Z':
-		return uint64(26 + x - 'A')
-	}
-	return 52 // all other bytes
-}
-
-// containsAllIdsOf reports whether the method identifiers of T are a
-// superset of those in U.  If U belongs to an interface type, the
-// result is equal to types.Assignable(T, U), but is cheaper to compute.
-//
-// TODO(gri): make this a method of *types.MethodSet.
-//
-func containsAllIdsOf(T, U *types.MethodSet) bool {
-	t, tlen := 0, T.Len()
-	u, ulen := 0, U.Len()
-	for t < tlen && u < ulen {
-		tMeth := T.At(t).Obj()
-		uMeth := U.At(u).Obj()
-		tId := tMeth.Id()
-		uId := uMeth.Id()
-		if tId > uId {
-			// U has a method T lacks: fail.
-			return false
-		}
-		if tId < uId {
-			// T has a method U lacks: ignore it.
-			t++
-			continue
-		}
-		// U and T both have a method of this Id.  Check types.
-		if !types.Identical(tMeth.Type(), uMeth.Type()) {
-			return false // type mismatch
-		}
-		u++
-		t++
-	}
-	return u == ulen
-}
diff --git a/godoc/analysis/json.go b/godoc/analysis/json.go
index f897618..b6e1e3f 100644
--- a/godoc/analysis/json.go
+++ b/godoc/analysis/json.go
@@ -11,16 +11,6 @@
 	Href string // URL
 }
 
-type commOpJSON struct {
-	Op anchorJSON
-	Fn string
-}
-
-// JavaScript's onClickComm() expects a commJSON.
-type commJSON struct {
-	Ops []commOpJSON
-}
-
 // Indicates one of these forms of fact about a type T:
 // T "is implemented by <ByKind> type <Other>"  (ByKind != "", e.g. "array")
 // T "implements <Other>"                       (ByKind == "")
@@ -43,23 +33,6 @@
 	ImplGroups  []implGroupJSON
 }
 
-// JavaScript's onClickCallees() expects a calleesJSON.
-type calleesJSON struct {
-	Descr   string
-	Callees []anchorJSON // markup for called function
-}
-
-type callerJSON struct {
-	Func  string
-	Sites []anchorJSON
-}
-
-// JavaScript's onClickCallers() expects a callersJSON.
-type callersJSON struct {
-	Callee  string
-	Callers []callerJSON
-}
-
 // JavaScript's cgAddChild requires a global array of PCGNodeJSON
 // called CALLGRAPH, representing the intra-package call graph.
 // The first element is special and represents "all external callers".
diff --git a/godoc/analysis/peers.go b/godoc/analysis/peers.go
deleted file mode 100644
index a742f06..0000000
--- a/godoc/analysis/peers.go
+++ /dev/null
@@ -1,154 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package analysis
-
-// This file computes the channel "peers" relation over all pairs of
-// channel operations in the program.  The peers are displayed in the
-// lower pane when a channel operation (make, <-, close) is clicked.
-
-// TODO(adonovan): handle calls to reflect.{Select,Recv,Send,Close} too,
-// then enable reflection in PTA.
-
-import (
-	"fmt"
-	"go/token"
-	"go/types"
-
-	"golang.org/x/tools/go/pointer"
-	"golang.org/x/tools/go/ssa"
-)
-
-func (a *analysis) doChannelPeers(ptsets map[ssa.Value]pointer.Pointer) {
-	addSendRecv := func(j *commJSON, op chanOp) {
-		j.Ops = append(j.Ops, commOpJSON{
-			Op: anchorJSON{
-				Text: op.mode,
-				Href: a.posURL(op.pos, op.len),
-			},
-			Fn: prettyFunc(nil, op.fn),
-		})
-	}
-
-	// Build an undirected bipartite multigraph (binary relation)
-	// of MakeChan ops and send/recv/close ops.
-	//
-	// TODO(adonovan): opt: use channel element types to partition
-	// the O(n^2) problem into subproblems.
-	aliasedOps := make(map[*ssa.MakeChan][]chanOp)
-	opToMakes := make(map[chanOp][]*ssa.MakeChan)
-	for _, op := range a.ops {
-		// Combine the PT sets from all contexts.
-		var makes []*ssa.MakeChan // aliased ops
-		ptr, ok := ptsets[op.ch]
-		if !ok {
-			continue // e.g. channel op in dead code
-		}
-		for _, label := range ptr.PointsTo().Labels() {
-			makechan, ok := label.Value().(*ssa.MakeChan)
-			if !ok {
-				continue // skip intrinsically-created channels for now
-			}
-			if makechan.Pos() == token.NoPos {
-				continue // not possible?
-			}
-			makes = append(makes, makechan)
-			aliasedOps[makechan] = append(aliasedOps[makechan], op)
-		}
-		opToMakes[op] = makes
-	}
-
-	// Now that complete relation is built, build links for ops.
-	for _, op := range a.ops {
-		v := commJSON{
-			Ops: []commOpJSON{}, // (JS wants non-nil)
-		}
-		ops := make(map[chanOp]bool)
-		for _, makechan := range opToMakes[op] {
-			v.Ops = append(v.Ops, commOpJSON{
-				Op: anchorJSON{
-					Text: "made",
-					Href: a.posURL(makechan.Pos()-token.Pos(len("make")),
-						len("make")),
-				},
-				Fn: makechan.Parent().RelString(op.fn.Package().Pkg),
-			})
-			for _, op := range aliasedOps[makechan] {
-				ops[op] = true
-			}
-		}
-		for op := range ops {
-			addSendRecv(&v, op)
-		}
-
-		// Add links for each aliased op.
-		fi, offset := a.fileAndOffset(op.pos)
-		fi.addLink(aLink{
-			start:   offset,
-			end:     offset + op.len,
-			title:   "show channel ops",
-			onclick: fmt.Sprintf("onClickComm(%d)", fi.addData(v)),
-		})
-	}
-	// Add links for makechan ops themselves.
-	for makechan, ops := range aliasedOps {
-		v := commJSON{
-			Ops: []commOpJSON{}, // (JS wants non-nil)
-		}
-		for _, op := range ops {
-			addSendRecv(&v, op)
-		}
-
-		fi, offset := a.fileAndOffset(makechan.Pos())
-		fi.addLink(aLink{
-			start:   offset - len("make"),
-			end:     offset,
-			title:   "show channel ops",
-			onclick: fmt.Sprintf("onClickComm(%d)", fi.addData(v)),
-		})
-	}
-}
-
-// -- utilities --------------------------------------------------------
-
-// chanOp abstracts an ssa.Send, ssa.Unop(ARROW), close(), or a SelectState.
-// Derived from cmd/guru/peers.go.
-type chanOp struct {
-	ch   ssa.Value
-	mode string // sent|received|closed
-	pos  token.Pos
-	len  int
-	fn   *ssa.Function
-}
-
-// chanOps returns a slice of all the channel operations in the instruction.
-// Derived from cmd/guru/peers.go.
-func chanOps(instr ssa.Instruction) []chanOp {
-	fn := instr.Parent()
-	var ops []chanOp
-	switch instr := instr.(type) {
-	case *ssa.UnOp:
-		if instr.Op == token.ARROW {
-			// TODO(adonovan): don't assume <-ch; could be 'range ch'.
-			ops = append(ops, chanOp{instr.X, "received", instr.Pos(), len("<-"), fn})
-		}
-	case *ssa.Send:
-		ops = append(ops, chanOp{instr.Chan, "sent", instr.Pos(), len("<-"), fn})
-	case *ssa.Select:
-		for _, st := range instr.States {
-			mode := "received"
-			if st.Dir == types.SendOnly {
-				mode = "sent"
-			}
-			ops = append(ops, chanOp{st.Chan, mode, st.Pos, len("<-"), fn})
-		}
-	case ssa.CallInstruction:
-		call := instr.Common()
-		if blt, ok := call.Value.(*ssa.Builtin); ok && blt.Name() == "close" {
-			pos := instr.Common().Pos()
-			ops = append(ops, chanOp{call.Args[0], "closed", pos - token.Pos(len("close")), len("close("), fn})
-		}
-	}
-	return ops
-}
diff --git a/godoc/analysis/typeinfo.go b/godoc/analysis/typeinfo.go
deleted file mode 100644
index e57683f..0000000
--- a/godoc/analysis/typeinfo.go
+++ /dev/null
@@ -1,234 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package analysis
-
-// This file computes the markup for information from go/types:
-// IMPORTS, identifier RESOLUTION, METHOD SETS, size/alignment, and
-// the IMPLEMENTS relation.
-//
-// IMPORTS links connect import specs to the documentation for the
-// imported package.
-//
-// RESOLUTION links referring identifiers to their defining
-// identifier, and adds tooltips for kind and type.
-//
-// METHOD SETS, size/alignment, and the IMPLEMENTS relation are
-// displayed in the lower pane when a type's defining identifier is
-// clicked.
-
-import (
-	"fmt"
-	"go/types"
-	"reflect"
-	"strconv"
-	"strings"
-
-	"golang.org/x/tools/go/loader"
-	"golang.org/x/tools/go/types/typeutil"
-)
-
-// TODO(adonovan): audit to make sure it's safe on ill-typed packages.
-
-// TODO(adonovan): use same Sizes as loader.Config.
-var sizes = types.StdSizes{WordSize: 8, MaxAlign: 8}
-
-func (a *analysis) doTypeInfo(info *loader.PackageInfo, implements map[*types.Named]implementsFacts) {
-	// We must not assume the corresponding SSA packages were
-	// created (i.e. were transitively error-free).
-
-	// IMPORTS
-	for _, f := range info.Files {
-		// Package decl.
-		fi, offset := a.fileAndOffset(f.Name.Pos())
-		fi.addLink(aLink{
-			start: offset,
-			end:   offset + len(f.Name.Name),
-			title: "Package docs for " + info.Pkg.Path(),
-			// TODO(adonovan): fix: we're putting the untrusted Path()
-			// into a trusted field.  What's the appropriate sanitizer?
-			href: "/pkg/" + info.Pkg.Path(),
-		})
-
-		// Import specs.
-		for _, imp := range f.Imports {
-			// Remove quotes.
-			L := int(imp.End()-imp.Path.Pos()) - len(`""`)
-			path, _ := strconv.Unquote(imp.Path.Value)
-			fi, offset := a.fileAndOffset(imp.Path.Pos())
-			fi.addLink(aLink{
-				start: offset + 1,
-				end:   offset + 1 + L,
-				title: "Package docs for " + path,
-				// TODO(adonovan): fix: we're putting the untrusted path
-				// into a trusted field.  What's the appropriate sanitizer?
-				href: "/pkg/" + path,
-			})
-		}
-	}
-
-	// RESOLUTION
-	qualifier := types.RelativeTo(info.Pkg)
-	for id, obj := range info.Uses {
-		// Position of the object definition.
-		pos := obj.Pos()
-		Len := len(obj.Name())
-
-		// Correct the position for non-renaming import specs.
-		//  import "sync/atomic"
-		//          ^^^^^^^^^^^
-		if obj, ok := obj.(*types.PkgName); ok && id.Name == obj.Imported().Name() {
-			// Assume this is a non-renaming import.
-			// NB: not true for degenerate renamings: `import foo "foo"`.
-			pos++
-			Len = len(obj.Imported().Path())
-		}
-
-		if obj.Pkg() == nil {
-			continue // don't mark up built-ins.
-		}
-
-		fi, offset := a.fileAndOffset(id.NamePos)
-		fi.addLink(aLink{
-			start: offset,
-			end:   offset + len(id.Name),
-			title: types.ObjectString(obj, qualifier),
-			href:  a.posURL(pos, Len),
-		})
-	}
-
-	// IMPLEMENTS & METHOD SETS
-	for _, obj := range info.Defs {
-		if obj, ok := obj.(*types.TypeName); ok {
-			if named, ok := obj.Type().(*types.Named); ok {
-				a.namedType(named, implements)
-			}
-		}
-	}
-}
-
-func (a *analysis) namedType(T *types.Named, implements map[*types.Named]implementsFacts) {
-	obj := T.Obj()
-	qualifier := types.RelativeTo(obj.Pkg())
-	v := &TypeInfoJSON{
-		Name:    obj.Name(),
-		Size:    sizes.Sizeof(T),
-		Align:   sizes.Alignof(T),
-		Methods: []anchorJSON{}, // (JS wants non-nil)
-	}
-
-	// addFact adds the fact "is implemented by T" (by) or
-	// "implements T" (!by) to group.
-	addFact := func(group *implGroupJSON, T types.Type, by bool) {
-		Tobj := deref(T).(*types.Named).Obj()
-		var byKind string
-		if by {
-			// Show underlying kind of implementing type,
-			// e.g. "slice", "array", "struct".
-			s := reflect.TypeOf(T.Underlying()).String()
-			byKind = strings.ToLower(strings.TrimPrefix(s, "*types."))
-		}
-		group.Facts = append(group.Facts, implFactJSON{
-			ByKind: byKind,
-			Other: anchorJSON{
-				Href: a.posURL(Tobj.Pos(), len(Tobj.Name())),
-				Text: types.TypeString(T, qualifier),
-			},
-		})
-	}
-
-	// IMPLEMENTS
-	if r, ok := implements[T]; ok {
-		if isInterface(T) {
-			// "T is implemented by <conc>" ...
-			// "T is implemented by <iface>"...
-			// "T implements        <iface>"...
-			group := implGroupJSON{
-				Descr: types.TypeString(T, qualifier),
-			}
-			// Show concrete types first; use two passes.
-			for _, sub := range r.to {
-				if !isInterface(sub) {
-					addFact(&group, sub, true)
-				}
-			}
-			for _, sub := range r.to {
-				if isInterface(sub) {
-					addFact(&group, sub, true)
-				}
-			}
-			for _, super := range r.from {
-				addFact(&group, super, false)
-			}
-			v.ImplGroups = append(v.ImplGroups, group)
-		} else {
-			// T is concrete.
-			if r.from != nil {
-				// "T implements <iface>"...
-				group := implGroupJSON{
-					Descr: types.TypeString(T, qualifier),
-				}
-				for _, super := range r.from {
-					addFact(&group, super, false)
-				}
-				v.ImplGroups = append(v.ImplGroups, group)
-			}
-			if r.fromPtr != nil {
-				// "*C implements <iface>"...
-				group := implGroupJSON{
-					Descr: "*" + types.TypeString(T, qualifier),
-				}
-				for _, psuper := range r.fromPtr {
-					addFact(&group, psuper, false)
-				}
-				v.ImplGroups = append(v.ImplGroups, group)
-			}
-		}
-	}
-
-	// METHOD SETS
-	for _, sel := range typeutil.IntuitiveMethodSet(T, &a.prog.MethodSets) {
-		meth := sel.Obj().(*types.Func)
-		pos := meth.Pos() // may be 0 for error.Error
-		v.Methods = append(v.Methods, anchorJSON{
-			Href: a.posURL(pos, len(meth.Name())),
-			Text: types.SelectionString(sel, qualifier),
-		})
-	}
-
-	// Since there can be many specs per decl, we
-	// can't attach the link to the keyword 'type'
-	// (as we do with 'func'); we use the Ident.
-	fi, offset := a.fileAndOffset(obj.Pos())
-	fi.addLink(aLink{
-		start:   offset,
-		end:     offset + len(obj.Name()),
-		title:   fmt.Sprintf("type info for %s", obj.Name()),
-		onclick: fmt.Sprintf("onClickTypeInfo(%d)", fi.addData(v)),
-	})
-
-	// Add info for exported package-level types to the package info.
-	if obj.Exported() && isPackageLevel(obj) {
-		// TODO(adonovan): Path is not unique!
-		// It is possible to declare a non-test package called x_test.
-		a.result.pkgInfo(obj.Pkg().Path()).addType(v)
-	}
-}
-
-// -- utilities --------------------------------------------------------
-
-func isInterface(T types.Type) bool { return types.IsInterface(T) }
-
-// deref returns a pointer's element type; otherwise it returns typ.
-func deref(typ types.Type) types.Type {
-	if p, ok := typ.Underlying().(*types.Pointer); ok {
-		return p.Elem()
-	}
-	return typ
-}
-
-// isPackageLevel reports whether obj is a package-level object.
-func isPackageLevel(obj types.Object) bool {
-	return obj.Pkg().Scope().Lookup(obj.Name()) == obj
-}
diff --git a/godoc/godoc_test.go b/godoc/godoc_test.go
index 33dbe3f..7f3470e 100644
--- a/godoc/godoc_test.go
+++ b/godoc/godoc_test.go
@@ -10,6 +10,8 @@
 	"go/token"
 	"strings"
 	"testing"
+
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func TestPkgLinkFunc(t *testing.T) {
@@ -368,3 +370,58 @@
 		t.Errorf("filterOutBuildAnnotations should not remove non-build tag comment")
 	}
 }
+
+func TestLinkifyGenerics(t *testing.T) {
+	if !typeparams.Enabled {
+		t.Skip("type params are not enabled at this Go version")
+	}
+
+	got := linkifySource(t, []byte(`
+package foo
+
+type T struct {
+	field *T
+}
+
+type ParametricStruct[T any] struct {
+	field *T
+}
+
+func F1[T any](arg T) { }
+
+func F2(arg T) { }
+
+func (*ParametricStruct[T]) M(arg T) { }
+
+func (*T) M(arg T) { }
+
+type ParametricStruct2[T1, T2 any] struct {
+	a T1
+	b T2
+}
+
+func (*ParametricStruct2[T1, T2]) M(a T1, b T2) { }
+
+
+`))
+
+	want := `type T struct {
+<span id="T.field"></span>field *<a href="#T">T</a>
+}
+type ParametricStruct[T <a href="/pkg/builtin/#any">any</a>] struct {
+<span id="ParametricStruct.field"></span>field *T
+}
+func F1[T <a href="/pkg/builtin/#any">any</a>](arg T) {}
+func F2(arg <a href="#T">T</a>) {}
+func (*<a href="#ParametricStruct">ParametricStruct</a>[T]) M(arg T) {}
+func (*<a href="#T">T</a>) M(arg <a href="#T">T</a>) {}
+type ParametricStruct2[T1, T2 <a href="/pkg/builtin/#any">any</a>] struct {
+<span id="ParametricStruct2.a"></span>a T1
+<span id="ParametricStruct2.b"></span>b T2
+}
+func (*<a href="#ParametricStruct2">ParametricStruct2</a>[T1, T2]) M(a T1, b T2) {}`
+
+	if got != want {
+		t.Errorf("got: %s\n\nwant: %s\n", got, want)
+	}
+}
diff --git a/godoc/linkify.go b/godoc/linkify.go
index e4add22..4a9c506 100644
--- a/godoc/linkify.go
+++ b/godoc/linkify.go
@@ -17,6 +17,8 @@
 	"go/token"
 	"io"
 	"strconv"
+
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // LinkifyText HTML-escapes source text and writes it to w.
@@ -89,6 +91,8 @@
 	// their ast.Ident nodes are visited.
 	linkMap := make(map[*ast.Ident]link)
 
+	typeParams := make(map[string]bool)
+
 	ast.Inspect(node, func(node ast.Node) bool {
 		switch n := node.(type) {
 		case *ast.Field:
@@ -105,6 +109,24 @@
 			}
 		case *ast.FuncDecl:
 			linkMap[n.Name] = link{}
+			if n.Recv != nil {
+				recv := n.Recv.List[0].Type
+				if r, isstar := recv.(*ast.StarExpr); isstar {
+					recv = r.X
+				}
+				switch x := recv.(type) {
+				case *ast.IndexExpr:
+					if ident, _ := x.Index.(*ast.Ident); ident != nil {
+						typeParams[ident.Name] = true
+					}
+				case *typeparams.IndexListExpr:
+					for _, index := range x.Indices {
+						if ident, _ := index.(*ast.Ident); ident != nil {
+							typeParams[ident.Name] = true
+						}
+					}
+				}
+			}
 		case *ast.TypeSpec:
 			linkMap[n.Name] = link{}
 		case *ast.AssignStmt:
@@ -183,8 +205,26 @@
 				links = append(links, l)
 			} else {
 				l := link{name: n.Name}
-				if n.Obj == nil && doc.IsPredeclared(n.Name) {
-					l.path = builtinPkgPath
+				if n.Obj == nil {
+					if doc.IsPredeclared(n.Name) {
+						l.path = builtinPkgPath
+					} else {
+						if typeParams[n.Name] {
+							// If a type parameter was declared then do not generate a link.
+							// Doing this is necessary because type parameter identifiers do not
+							// have their Decl recorded sometimes, see
+							// https://golang.org/issue/50956.
+							l = link{}
+						}
+					}
+				} else {
+					if n.Obj.Kind == ast.Typ {
+						if _, isfield := n.Obj.Decl.(*ast.Field); isfield {
+							// If an identifier is a type declared in a field assume it is a type
+							// parameter and do not generate a link.
+							l = link{}
+						}
+					}
 				}
 				links = append(links, l)
 			}
diff --git a/godoc/redirect/redirect_test.go b/godoc/redirect/redirect_test.go
index 756c0d0..1de3c6c 100644
--- a/godoc/redirect/redirect_test.go
+++ b/godoc/redirect/redirect_test.go
@@ -95,6 +95,7 @@
 			t.Errorf("(path: %q) unexpected error: %v", path, err)
 			continue
 		}
+		resp.Body.Close() // We only care about the headers, so close the body immediately.
 
 		if resp.StatusCode != want.status {
 			t.Errorf("(path: %q) got status %d, want %d", path, resp.StatusCode, want.status)
diff --git a/godoc/server.go b/godoc/server.go
index 48e8d95..9c5d556 100644
--- a/godoc/server.go
+++ b/godoc/server.go
@@ -30,6 +30,7 @@
 	"golang.org/x/tools/godoc/analysis"
 	"golang.org/x/tools/godoc/util"
 	"golang.org/x/tools/godoc/vfs"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // handlerServer is a migration from an old godoc http Handler type.
@@ -462,12 +463,19 @@
 	case *ast.FuncDecl:
 		name := d.Name.Name
 		if d.Recv != nil {
+			r := d.Recv.List[0].Type
+			if rr, isstar := r.(*ast.StarExpr); isstar {
+				r = rr.X
+			}
+
 			var typeName string
-			switch r := d.Recv.List[0].Type.(type) {
-			case *ast.StarExpr:
-				typeName = r.X.(*ast.Ident).Name
+			switch x := r.(type) {
 			case *ast.Ident:
-				typeName = r.Name
+				typeName = x.Name
+			case *ast.IndexExpr:
+				typeName = x.X.(*ast.Ident).Name
+			case *typeparams.IndexListExpr:
+				typeName = x.X.(*ast.Ident).Name
 			}
 			name = typeName + "_" + name
 		}
diff --git a/godoc/server_test.go b/godoc/server_test.go
index 0d48e9f..d6cc923 100644
--- a/godoc/server_test.go
+++ b/godoc/server_test.go
@@ -5,14 +5,17 @@
 package godoc
 
 import (
+	"go/doc"
 	"net/http"
 	"net/http/httptest"
 	"net/url"
+	"sort"
 	"strings"
 	"testing"
 	"text/template"
 
 	"golang.org/x/tools/godoc/vfs/mapfs"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // TestIgnoredGoFiles tests the scenario where a folder has no .go or .c files,
@@ -128,3 +131,115 @@
 	testServeBody(t, p, "/doc/test.html", "<strong>bold</strong>")
 	testServeBody(t, p, "/doc/test2.html", "<em>template</em>")
 }
+
+func TestGenerics(t *testing.T) {
+	if !typeparams.Enabled {
+		t.Skip("type params are not enabled at this Go version")
+	}
+
+	c := NewCorpus(mapfs.New(map[string]string{
+		"blah/blah.go": `package blah
+
+var A AStruct[int]
+
+type AStruct[T any] struct {
+	A string
+	X T
+}
+
+func (a *AStruct[T]) Method() T {
+	return a.X
+}
+
+func (a AStruct[T]) NonPointerMethod() T {
+	return a.X
+}
+
+func NewAStruct[T any](arg T) *AStruct[T] {
+	return &AStruct[T]{ X: arg }
+}
+
+type NonGenericStruct struct {
+	B int
+}
+
+func (b *NonGenericStruct) NonGenericMethod() int {
+	return b.B
+}
+
+func NewNonGenericStruct(arg int) *NonGenericStruct {
+	return &NonGenericStruct{arg}
+}
+
+type Pair[K, V any] struct {
+	K K
+	V V
+}
+
+func (p Pair[K, V]) Apply(kf func(K) K, vf func(V) V) Pair[K, V] {
+	return &Pair{ K: kf(p.K), V: vf(p.V) }
+}
+
+func (p *Pair[K, V]) Set(k K, v V) {
+	p.K = k
+	p.V = v
+}
+
+func NewPair[K, V any](k K, v V) Pair[K, V] {
+	return Pair[K, V]{ k, v }
+}
+`}))
+
+	srv := &handlerServer{
+		p: &Presentation{
+			Corpus: c,
+		},
+		c: c,
+	}
+	pInfo := srv.GetPageInfo("/blah/", "", NoFiltering, "linux", "amd64")
+	t.Logf("%v\n", pInfo)
+
+	findType := func(name string) *doc.Type {
+		for _, typ := range pInfo.PDoc.Types {
+			if typ.Name == name {
+				return typ
+			}
+		}
+		return nil
+	}
+
+	assertFuncs := func(typ *doc.Type, typFuncs []*doc.Func, funcs ...string) {
+		typfuncs := make([]string, len(typFuncs))
+		for i := range typFuncs {
+			typfuncs[i] = typFuncs[i].Name
+		}
+		sort.Strings(typfuncs)
+		sort.Strings(funcs)
+		if len(typfuncs) != len(funcs) {
+			t.Errorf("function mismatch for type %q, got: %q, want: %q", typ.Name, typfuncs, funcs)
+			return
+		}
+		for i := range funcs {
+			if funcs[i] != typfuncs[i] {
+				t.Errorf("function mismatch for type %q: got: %q, want: %q", typ.Name, typfuncs, funcs)
+				return
+			}
+		}
+	}
+
+	aStructType := findType("AStruct")
+	assertFuncs(aStructType, aStructType.Funcs, "NewAStruct")
+	assertFuncs(aStructType, aStructType.Methods, "Method", "NonPointerMethod")
+
+	nonGenericStructType := findType("NonGenericStruct")
+	assertFuncs(nonGenericStructType, nonGenericStructType.Funcs, "NewNonGenericStruct")
+	assertFuncs(nonGenericStructType, nonGenericStructType.Methods, "NonGenericMethod")
+
+	pairType := findType("Pair")
+	assertFuncs(pairType, pairType.Funcs, "NewPair")
+	assertFuncs(pairType, pairType.Methods, "Apply", "Set")
+
+	if len(pInfo.PDoc.Funcs) > 0 {
+		t.Errorf("unexpected functions in package documentation")
+	}
+}
diff --git a/gopls/README.md b/gopls/README.md
index 85de62a..9afc2e4 100644
--- a/gopls/README.md
+++ b/gopls/README.md
@@ -36,12 +36,9 @@
 directory is fine), and run:
 
 ```sh
-GO111MODULE=on go get golang.org/x/tools/gopls@latest
+go install golang.org/x/tools/gopls@latest
 ```
 
-**NOTE**: Do not use the `-u` flag, as it will update your dependencies to
-incompatible versions.
-
 Learn more in the [advanced installation
 instructions](doc/advanced.md#installing-unreleased-versions).
 
@@ -72,13 +69,21 @@
 
 ## Supported Go versions and build systems
 
-`gopls` follows the [Go Release
-Policy](https://golang.org/doc/devel/release.html#policy), meaning that it
-officially supports the last 2 major Go releases. Per
+`gopls` follows the
+[Go Release Policy](https://golang.org/doc/devel/release.html#policy),
+meaning that it officially supports the last 2 major Go releases. Per
 [issue #39146](golang.org/issues/39146), we attempt to maintain best-effort
 support for the last 4 major Go releases, but this support extends only to not
 breaking the build and avoiding easily fixable regressions.
 
+The following table shows the final gopls version that supports being built at
+a given Go Version. Any more recent Go versions missing from this table can
+still be built with the latest version of gopls.
+
+| Go Version  | Final gopls Version With Support |
+| ----------- | -------------------------------- |
+| Go 1.12     | [gopls@v0.7.5](https://github.com/golang/tools/releases/tag/gopls%2Fv0.7.5) |
+
 Our extended support is enforced via [continuous integration with older Go
 versions](doc/contributing.md#ci). This legacy Go CI may not block releases:
 test failures may be skipped rather than fixed. Furthermore, if a regression in
@@ -86,9 +91,11 @@
 that Go version in CI if it is 3 or 4 Go versions old.
 
 `gopls` currently only supports the `go` command, so if you are using a
-different build system, `gopls` will not work well. Bazel support is currently
-blocked on
-[bazelbuild/rules_go#512](https://github.com/bazelbuild/rules_go/issues/512).
+different build system, `gopls` will not work well. Bazel is not officially
+supported, but Bazel support is in development (see
+[bazelbuild/rules_go#512](https://github.com/bazelbuild/rules_go/issues/512)).
+You can follow [these instructions](https://github.com/bazelbuild/rules_go/wiki/Editor-setup)
+to configure your `gopls` to work with Bazel.
 
 ## Additional information
 
diff --git a/gopls/api-diff/api_diff.go b/gopls/api-diff/api_diff.go
new file mode 100644
index 0000000..167bdbd
--- /dev/null
+++ b/gopls/api-diff/api_diff.go
@@ -0,0 +1,274 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package main
+
+import (
+	"bytes"
+	"context"
+	"encoding/json"
+	"flag"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"log"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"strings"
+
+	"golang.org/x/tools/internal/gocommand"
+	difflib "golang.org/x/tools/internal/lsp/diff"
+	"golang.org/x/tools/internal/lsp/diff/myers"
+	"golang.org/x/tools/internal/lsp/source"
+)
+
+var (
+	previousVersionFlag = flag.String("prev", "", "version to compare against")
+	versionFlag         = flag.String("version", "", "version being tagged, or current version if omitted")
+)
+
+func main() {
+	flag.Parse()
+
+	apiDiff, err := diffAPI(*versionFlag, *previousVersionFlag)
+	if err != nil {
+		log.Fatal(err)
+	}
+	fmt.Printf(`
+%s
+`, apiDiff)
+}
+
+type JSON interface {
+	String() string
+	Write(io.Writer)
+}
+
+func diffAPI(version, prev string) (string, error) {
+	ctx := context.Background()
+	previousApi, err := loadAPI(ctx, prev)
+	if err != nil {
+		return "", fmt.Errorf("load previous API: %v", err)
+	}
+	var currentApi *source.APIJSON
+	if version == "" {
+		currentApi = source.GeneratedAPIJSON
+	} else {
+		var err error
+		currentApi, err = loadAPI(ctx, version)
+		if err != nil {
+			return "", fmt.Errorf("load current API: %v", err)
+		}
+	}
+
+	b := &strings.Builder{}
+	if err := diff(b, previousApi.Commands, currentApi.Commands, "command", func(c *source.CommandJSON) string {
+		return c.Command
+	}, diffCommands); err != nil {
+		return "", fmt.Errorf("diff commands: %v", err)
+	}
+	if diff(b, previousApi.Analyzers, currentApi.Analyzers, "analyzer", func(a *source.AnalyzerJSON) string {
+		return a.Name
+	}, diffAnalyzers); err != nil {
+		return "", fmt.Errorf("diff analyzers: %v", err)
+	}
+	if err := diff(b, previousApi.Lenses, currentApi.Lenses, "code lens", func(l *source.LensJSON) string {
+		return l.Lens
+	}, diffLenses); err != nil {
+		return "", fmt.Errorf("diff lenses: %v", err)
+	}
+	for key, prev := range previousApi.Options {
+		current, ok := currentApi.Options[key]
+		if !ok {
+			panic(fmt.Sprintf("unexpected option key: %s", key))
+		}
+		if err := diff(b, prev, current, "option", func(o *source.OptionJSON) string {
+			return o.Name
+		}, diffOptions); err != nil {
+			return "", fmt.Errorf("diff options (%s): %v", key, err)
+		}
+	}
+
+	return b.String(), nil
+}
+
+func diff[T JSON](b *strings.Builder, previous, new []T, kind string, uniqueKey func(T) string, diffFunc func(*strings.Builder, T, T)) error {
+	prevJSON := collect(previous, uniqueKey)
+	newJSON := collect(new, uniqueKey)
+	for k := range newJSON {
+		delete(prevJSON, k)
+	}
+	for _, deleted := range prevJSON {
+		b.WriteString(fmt.Sprintf("%s %s was deleted.\n", kind, deleted))
+	}
+	for _, prev := range previous {
+		delete(newJSON, uniqueKey(prev))
+	}
+	if len(newJSON) > 0 {
+		b.WriteString("The following commands were added:\n")
+		for _, n := range newJSON {
+			n.Write(b)
+			b.WriteByte('\n')
+		}
+	}
+	previousMap := collect(previous, uniqueKey)
+	for _, current := range new {
+		prev, ok := previousMap[uniqueKey(current)]
+		if !ok {
+			continue
+		}
+		c, p := bytes.NewBuffer(nil), bytes.NewBuffer(nil)
+		prev.Write(p)
+		current.Write(c)
+		if diff, err := diffStr(p.String(), c.String()); err == nil && diff != "" {
+			diffFunc(b, prev, current)
+			b.WriteString("\n--\n")
+		}
+	}
+	return nil
+}
+
+func collect[T JSON](args []T, uniqueKey func(T) string) map[string]T {
+	m := map[string]T{}
+	for _, arg := range args {
+		m[uniqueKey(arg)] = arg
+	}
+	return m
+}
+
+var goCmdRunner = gocommand.Runner{}
+
+func loadAPI(ctx context.Context, version string) (*source.APIJSON, error) {
+	tmpGopath, err := ioutil.TempDir("", "gopath*")
+	if err != nil {
+		return nil, fmt.Errorf("temp dir: %v", err)
+	}
+	defer os.RemoveAll(tmpGopath)
+
+	exampleDir := fmt.Sprintf("%s/src/example.com", tmpGopath)
+	if err := os.MkdirAll(exampleDir, 0776); err != nil {
+		return nil, fmt.Errorf("mkdir: %v", err)
+	}
+
+	if stdout, err := goCmdRunner.Run(ctx, gocommand.Invocation{
+		Verb:       "mod",
+		Args:       []string{"init", "example.com"},
+		WorkingDir: exampleDir,
+		Env:        append(os.Environ(), fmt.Sprintf("GOPATH=%s", tmpGopath)),
+	}); err != nil {
+		return nil, fmt.Errorf("go mod init failed: %v (stdout: %v)", err, stdout)
+	}
+	if stdout, err := goCmdRunner.Run(ctx, gocommand.Invocation{
+		Verb:       "install",
+		Args:       []string{fmt.Sprintf("golang.org/x/tools/gopls@%s", version)},
+		WorkingDir: exampleDir,
+		Env:        append(os.Environ(), fmt.Sprintf("GOPATH=%s", tmpGopath)),
+	}); err != nil {
+		return nil, fmt.Errorf("go install failed: %v (stdout: %v)", err, stdout.String())
+	}
+	cmd := exec.Cmd{
+		Path: filepath.Join(tmpGopath, "bin", "gopls"),
+		Args: []string{"gopls", "api-json"},
+		Dir:  tmpGopath,
+	}
+	out, err := cmd.Output()
+	if err != nil {
+		return nil, fmt.Errorf("output: %v", err)
+	}
+	apiJson := &source.APIJSON{}
+	if err := json.Unmarshal(out, apiJson); err != nil {
+		return nil, fmt.Errorf("unmarshal: %v", err)
+	}
+	return apiJson, nil
+}
+
+func diffCommands(b *strings.Builder, prev, current *source.CommandJSON) {
+	if prev.Title != current.Title {
+		b.WriteString(fmt.Sprintf("Title changed from %q to %q\n", prev.Title, current.Title))
+	}
+	if prev.Doc != current.Doc {
+		b.WriteString(fmt.Sprintf("Documentation changed from %q to %q\n", prev.Doc, current.Doc))
+	}
+	if prev.ArgDoc != current.ArgDoc {
+		b.WriteString("Arguments changed from " + formatBlock(prev.ArgDoc) + " to " + formatBlock(current.ArgDoc))
+	}
+	if prev.ResultDoc != current.ResultDoc {
+		b.WriteString("Results changed from " + formatBlock(prev.ResultDoc) + " to " + formatBlock(current.ResultDoc))
+	}
+}
+
+func diffAnalyzers(b *strings.Builder, previous, current *source.AnalyzerJSON) {
+	b.WriteString(fmt.Sprintf("Changes to analyzer %s:\n\n", current.Name))
+	if previous.Doc != current.Doc {
+		b.WriteString(fmt.Sprintf("Documentation changed from %q to %q\n", previous.Doc, current.Doc))
+	}
+	if previous.Default != current.Default {
+		b.WriteString(fmt.Sprintf("Default changed from %v to %v\n", previous.Default, current.Default))
+	}
+}
+
+func diffLenses(b *strings.Builder, previous, current *source.LensJSON) {
+	b.WriteString(fmt.Sprintf("Changes to code lens %s:\n\n", current.Title))
+	if previous.Title != current.Title {
+		b.WriteString(fmt.Sprintf("Title changed from %q to %q\n", previous.Title, current.Title))
+	}
+	if previous.Doc != current.Doc {
+		b.WriteString(fmt.Sprintf("Documentation changed from %q to %q\n", previous.Doc, current.Doc))
+	}
+}
+
+func diffOptions(b *strings.Builder, previous, current *source.OptionJSON) {
+	b.WriteString(fmt.Sprintf("Changes to option %s:\n\n", current.Name))
+	if previous.Doc != current.Doc {
+		diff, err := diffStr(previous.Doc, current.Doc)
+		if err != nil {
+			panic(err)
+		}
+		b.WriteString(fmt.Sprintf("Documentation changed:\n%s\n", diff))
+	}
+	if previous.Default != current.Default {
+		b.WriteString(fmt.Sprintf("Default changed from %q to %q\n", previous.Default, current.Default))
+	}
+	if previous.Hierarchy != current.Hierarchy {
+		b.WriteString(fmt.Sprintf("Categorization changed from %q to %q\n", previous.Hierarchy, current.Hierarchy))
+	}
+	if previous.Status != current.Status {
+		b.WriteString(fmt.Sprintf("Status changed from %q to %q\n", previous.Status, current.Status))
+	}
+	if previous.Type != current.Type {
+		b.WriteString(fmt.Sprintf("Type changed from %q to %q\n", previous.Type, current.Type))
+	}
+	// TODO(rstambler): Handle possibility of same number but different keys/values.
+	if len(previous.EnumKeys.Keys) != len(current.EnumKeys.Keys) {
+		b.WriteString(fmt.Sprintf("Enum keys changed from\n%s\n to \n%s\n", previous.EnumKeys, current.EnumKeys))
+	}
+	if len(previous.EnumValues) != len(current.EnumValues) {
+		b.WriteString(fmt.Sprintf("Enum values changed from\n%s\n to \n%s\n", previous.EnumValues, current.EnumValues))
+	}
+}
+
+func formatBlock(str string) string {
+	if str == "" {
+		return `""`
+	}
+	return "\n```\n" + str + "\n```\n"
+}
+
+func diffStr(before, after string) (string, error) {
+	// Add newlines to avoid newline messages in diff.
+	if before == after {
+		return "", nil
+	}
+	before += "\n"
+	after += "\n"
+	d, err := myers.ComputeEdits("", before, after)
+	if err != nil {
+		return "", err
+	}
+	return fmt.Sprintf("%q", difflib.ToUnified("previous", "current", before, d)), err
+}
diff --git a/gopls/doc/advanced.md b/gopls/doc/advanced.md
index 0fa1139..c4e9eab 100644
--- a/gopls/doc/advanced.md
+++ b/gopls/doc/advanced.md
@@ -9,17 +9,25 @@
 version), run:
 
 ```sh
-GO111MODULE=on go get golang.org/x/tools/gopls@vX.Y.Z
+GO111MODULE=on go install golang.org/x/tools/gopls@vX.Y.Z
 ```
 
 Where `vX.Y.Z` is the desired version.
 
 ### Unstable versions
 
-To update `gopls` to the latest **unstable** version, use:
+To update `gopls` to the latest **unstable** version, use the following
+commands.
 
 ```sh
-GO111MODULE=on go get golang.org/x/tools/gopls@master golang.org/x/tools@master
+# Create an empty go.mod file, only for tracking requirements.
+cd $(mktemp -d)
+go mod init gopls-unstable
+
+# Use 'go get' to add requirements and to ensure they work together.
+go get -d golang.org/x/tools/gopls@master golang.org/x/tools@master
+
+go install golang.org/x/tools/gopls
 ```
 
 ## Working on the Go source distribution
@@ -36,38 +44,42 @@
 
 ## Working with generic code
 
-Gopls has experimental support for generic Go, as defined by the type
+Gopls has beta support for editing generic Go code, as defined by the type
 parameters proposal ([golang/go#43651](https://golang.org/issues/43651)) and
 type set addendum ([golang/go#45346](https://golang.org/issues/45346)).
 
-To enable this support, you need to build gopls with a version of Go that
-supports type parameters: the
-[dev.typeparams branch](https://github.com/golang/go/tree/dev.typeparams). This
-can be done by checking out this branch in the Go repository, or by using
-`golang.org/dl/gotip`:
+To enable this support, you need to **build gopls with a version of Go that
+supports generics**. The easiest way to do this is by installing the Go 1.18 Beta
+as described at
+[Tutorial: Getting started with generics#prerequisites](https://go.dev/doc/tutorial/generics),
+and then using this Go version to build gopls:
 
 ```
-$ go get golang.org/dl/gotip
-$ gotip download dev.typeparams
+$ go1.18beta2 install golang.org/x/tools/gopls@latest
 ```
 
-For building gopls with type parameter support, it is recommended that you
-build gopls at tip. External APIs are under active development on the
-`dev.typeparams` branch, so building gopls at tip minimizes the chances of
-a build failure (though it is still possible). To get enhanced gopls features
-for generic code, build gopls with the `typeparams` build constraint (though
-this increases your chances of a build failure).
+When using the Go 1.18, it is strongly recommended that you install the latest
+version of `gopls`, or the latest **unstable** version as
+[described above](#installing-unreleased-versions).
+
+You also need to make `gopls` select the beta version of `go` (in `<GOROOT>/go/bin`
+where GOROOT is the location reported by `go1.18beta2 env GOROOT`) by adding
+it to your `PATH` or by configuring your editor.
+
+The `gopls` built with these instructions understands generic code. To actually
+run the generic code you develop, you must also use the beta version of the Go
+compiler. For example:
 
 ```
-$ GO111MODULE=on gotip get -tags=typeparams golang.org/x/tools/gopls@master golang.org/x/tools@master
+$ go1.18beta2 run .
 ```
 
-This will build a version of gopls that understands generic code. To actually
-run the generic code you develop, you must also tell the compiler to speak
-generics using the `-G=3` compiler flag. For example
+### Known issues
 
-```
-$ gotip run -gcflags=-G=3 .
-```
+  * [`staticcheck`](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#staticcheck-bool)
+    on generic code is not supported yet.
+
+please follow the [v0.8.0](https://github.com/golang/go/milestone/244) milestone
+to see the list of go1.18-related known issues and our progress.
 
 [Go project]: https://go.googlesource.com/go
diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md
index 80ee65d..07f846d 100644
--- a/gopls/doc/analyzers.md
+++ b/gopls/doc/analyzers.md
@@ -184,6 +184,22 @@
 
 **Enabled by default.**
 
+## **infertypeargs**
+
+check for unnecessary type arguments in call expressions
+
+Explicit type arguments may be omitted from call expressions if they can be
+inferred from function arguments, or from other type arguments:
+
+	func f[T any](T) {}
+	
+	func _() {
+		f[string]("foo") // string could be inferred
+	}
+
+
+**Enabled by default.**
+
 ## **loopclosure**
 
 check references to loop variables from within nested functions
@@ -554,9 +570,15 @@
 
 **Disabled by default. Enable it by setting `"analyses": {"unusedwrite": true}`.**
 
+## **useany**
+
+check for constraints that could be simplified to "any"
+
+**Disabled by default. Enable it by setting `"analyses": {"useany": true}`.**
+
 ## **fillreturns**
 
-suggested fixes for "wrong number of return values (want %d, got %d)"
+suggest fixes for errors due to an incorrect number of return values
 
 This checker provides suggested fixes for type errors of the
 type "wrong number of return values (want %d, got %d)". For example:
@@ -590,10 +612,11 @@
 
 ## **noresultvalues**
 
-suggested fixes for "no result values expected"
+suggested fixes for unexpected return values
 
 This checker provides suggested fixes for type errors of the
-type "no result values expected". For example:
+type "no result values expected" or "too many return values".
+For example:
 	func z() { return nil }
 will turn into
 	func z() { return }
@@ -606,8 +629,17 @@
 suggested fixes for "undeclared name: <>"
 
 This checker provides suggested fixes for type errors of the
-type "undeclared name: <>". It will insert a new statement:
-"<> := ".
+type "undeclared name: <>". It will either insert a new statement,
+such as:
+
+"<> := "
+
+or a new function declaration, such as:
+
+func <>(inferred parameters) {
+	panic("implement me!")
+}
+
 
 **Enabled by default.**
 
@@ -623,4 +655,13 @@
 
 **Enabled by default.**
 
+## **stubmethods**
+
+stub methods analyzer
+
+This analyzer generates method stubs for concrete types
+in order to implement a target interface
+
+**Enabled by default.**
+
 <!-- END Analyzers: DO NOT MANUALLY EDIT THIS SECTION -->
diff --git a/gopls/doc/command-line.md b/gopls/doc/command-line.md
index 6865799..4659058 100644
--- a/gopls/doc/command-line.md
+++ b/gopls/doc/command-line.md
@@ -13,5 +13,3 @@
 There are two main reasons for this. The first is that we do not want users to rely on separate command line tools when they wish to do some task outside of an editor. The second is that the CLI assists in debugging. It is easier to reproduce behavior via single command.
 
 It is not a goal of `gopls` to be a high performance command line tool. Its command line is intended for single file/package user interaction speeds, not bulk processing.
-
-For more information, see the `gopls` [command line page](command-line.md).
diff --git a/gopls/doc/commands.md b/gopls/doc/commands.md
index 4c44f42..65fa5c5 100644
--- a/gopls/doc/commands.md
+++ b/gopls/doc/commands.md
@@ -84,6 +84,22 @@
 }
 ```
 
+### **Run go mod edit -go=version**
+Identifier: `gopls.edit_go_directive`
+
+Runs `go mod edit -go=version` for a module.
+
+Args:
+
+```
+{
+	// Any document URI within the relevant module.
+	"URI": string,
+	// The version to pass to `go mod edit -go`.
+	"Version": string,
+}
+```
+
 ### **Toggle gc_details**
 Identifier: `gopls.gc_details`
 
@@ -142,6 +158,37 @@
 }
 ```
 
+### **List imports of a file and its package**
+Identifier: `gopls.list_imports`
+
+Retrieve a list of imports in the given Go file, and the package it
+belongs to.
+
+Args:
+
+```
+{
+	// The file URI.
+	"URI": string,
+}
+```
+
+Result:
+
+```
+{
+	// Imports is a list of imports in the requested file.
+	"Imports": []{
+		"Path": string,
+		"Name": string,
+	},
+	// PackageImports is a list of all imports in the requested file's package.
+	"PackageImports": []{
+		"Path": string,
+	},
+}
+```
+
 ### **List known packages**
 Identifier: `gopls.list_known_packages`
 
@@ -218,6 +265,41 @@
 }
 ```
 
+### **Run vulncheck (experimental)**
+Identifier: `gopls.run_vulncheck_exp`
+
+Run vulnerability check (`govulncheck`).
+
+Args:
+
+```
+{
+	// Dir is the directory from which vulncheck will run from.
+	"Dir": string,
+	// Package pattern. E.g. "", ".", "./...".
+	"Pattern": string,
+}
+```
+
+Result:
+
+```
+{
+	"Vuln": []{
+		"ID": string,
+		"Details": string,
+		"Aliases": []string,
+		"Symbol": string,
+		"PkgPath": string,
+		"ModPath": string,
+		"URL": string,
+		"CurrentVersion": string,
+		"FixedVersion": string,
+		"CallStacks": [][]golang.org/x/tools/internal/lsp/command.StackEntry,
+	},
+}
+```
+
 ### **Start the gopls debug server**
 Identifier: `gopls.start_debugging`
 
@@ -350,21 +432,4 @@
 }
 ```
 
-### **Query workspace metadata**
-Identifier: `gopls.workspace_metadata`
-
-Query the server for information about active workspaces.
-
-Result:
-
-```
-{
-	// All workspaces for this session.
-	"Workspaces": []{
-		"Name": string,
-		"ModuleDir": string,
-	},
-}
-```
-
 <!-- END Commands: DO NOT MANUALLY EDIT THIS SECTION -->
diff --git a/gopls/doc/contributing.md b/gopls/doc/contributing.md
index 307b601..99e4529 100644
--- a/gopls/doc/contributing.md
+++ b/gopls/doc/contributing.md
@@ -102,10 +102,8 @@
 
 Kokoro runs are triggered by the `Run-TryBot=1` label, just like TryBots, but
 unlike TryBots they do not automatically re-run if the "gopls-CI" result is
-removed in Gerrit. In order to force a new run, you must upload a new patch
-set. (Technically, Googlers can force a new run on an existing patch-set via an
-internal Kokoro dashboard, but unfortunately this ability can't be made more
-generally available).
+removed in Gerrit. To force a re-run of the Kokoro CI on a CL containing the
+`Run-TryBot=1` label, you can reply in Gerrit with the comment "kokoro rerun".
 
 ## Debugging
 
diff --git a/gopls/doc/features.md b/gopls/doc/features.md
index 9cb6864..dce6719 100644
--- a/gopls/doc/features.md
+++ b/gopls/doc/features.md
@@ -4,10 +4,10 @@
 currently under construction, so, for a comprehensive list, see the
 [Language Server Protocol](https://microsoft.github.io/language-server-protocol/).
 
-For now, only special features outside of the LSP are described below.
-
 ## Special features
 
+Here, only special features outside of the LSP are described.
+
 ### Symbol Queries
 
 Gopls supports some extended syntax for `workspace/symbol` requests, when using
@@ -21,4 +21,35 @@
 | `^`       | `^printf` | exact prefix |
 | `$`       | `printf$` | exact suffix |
 
+## Template Files
+
+Gopls provides some support for Go template files, that is, files that
+are parsed by `text/template` or `html/template`.
+Gopls recognizes template files based on their file extension, which may be
+configured by the
+[`templateExtensions`](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#templateextensions-string) setting.
+Making this list empty turns off template support.
+
+In template files, template support works inside
+the default `{{` delimiters. (Go template parsing
+allows the user to specify other delimiters, but
+gopls does not know how to do that.)
+
+Gopls template support includes the following features:
++ **Diagnostics**: if template parsing returns an error,
+it is presented as a diagnostic. (Missing functions do not produce errors.)
++ **Syntax Highlighting**: syntax highlighting is provided for template files.
++  **Definitions**: gopls provides jump-to-definition inside templates, though it does not understand scoping (all templates are considered to be in one global scope).
++  **References**: gopls provides find-references, with the same scoping limitation as definitions.
++ **Completions**: gopls will attempt to suggest completions inside templates.
+
+### Configuring your editor
+
+In addition to configuring `templateExtensions`, you may need to configure your
+editor or LSP client to activate `gopls` for template files. For example, in
+`VS Code` you will need to configure both
+[`files.associations`](https://code.visualstudio.com/docs/languages/identifiers)
+and `build.templateExtensions` (the gopls setting).
+
 <!--TODO(rstambler): Automatically generate a list of supported features.-->
+
diff --git a/gopls/doc/generate.go b/gopls/doc/generate.go
index 91d45ba..e63653d 100644
--- a/gopls/doc/generate.go
+++ b/gopls/doc/generate.go
@@ -2,6 +2,9 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
+//go:build go1.16
+// +build go1.16
+
 // Command generate creates API (settings, etc) documentation in JSON and
 // Markdown for machine and human consumption.
 package main
@@ -26,7 +29,7 @@
 	"time"
 	"unicode"
 
-	"github.com/sanity-io/litter"
+	"github.com/jba/printsrc"
 	"golang.org/x/tools/go/ast/astutil"
 	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/internal/lsp/command"
@@ -370,7 +373,6 @@
 }
 
 func loadCommands(pkg *packages.Package) ([]*source.CommandJSON, error) {
-
 	var commands []*source.CommandJSON
 
 	_, cmds, err := commandmeta.Load()
@@ -432,7 +434,7 @@
 				fmt.Fprintf(&b, "%s\t// %s\n", indent, line)
 			}
 		}
-		tag := fld.JSONTag
+		tag := strings.Split(fld.JSONTag, ",")[0]
 		if tag == "" {
 			tag = fld.Name
 		}
@@ -533,28 +535,14 @@
 }
 
 func rewriteAPI(_ []byte, api *source.APIJSON) ([]byte, error) {
-	buf := bytes.NewBuffer(nil)
-	apiStr := litter.Options{
-		HomePackage: "source",
-	}.Sdump(api)
-	// Massive hack: filter out redundant types from the composite literal.
-	apiStr = strings.ReplaceAll(apiStr, "&OptionJSON", "")
-	apiStr = strings.ReplaceAll(apiStr, ": []*OptionJSON", ":")
-	apiStr = strings.ReplaceAll(apiStr, "&CommandJSON", "")
-	apiStr = strings.ReplaceAll(apiStr, "&LensJSON", "")
-	apiStr = strings.ReplaceAll(apiStr, "&AnalyzerJSON", "")
-	apiStr = strings.ReplaceAll(apiStr, "  EnumValue{", "{")
-	apiStr = strings.ReplaceAll(apiStr, "  EnumKey{", "{")
-	apiBytes, err := format.Source([]byte(apiStr))
-	if err != nil {
+	var buf bytes.Buffer
+	fmt.Fprintf(&buf, "// Code generated by \"golang.org/x/tools/gopls/doc/generate\"; DO NOT EDIT.\n\npackage source\n\nvar GeneratedAPIJSON = ")
+	if err := printsrc.NewPrinter("golang.org/x/tools/internal/lsp/source").Fprint(&buf, api); err != nil {
 		return nil, err
 	}
-	fmt.Fprintf(buf, "// Code generated by \"golang.org/x/tools/gopls/doc/generate\"; DO NOT EDIT.\n\npackage source\n\nvar GeneratedAPIJSON = %s\n", apiBytes)
-	return buf.Bytes(), nil
+	return format.Source(buf.Bytes())
 }
 
-var parBreakRE = regexp.MustCompile("\n{2,}")
-
 type optionsGroup struct {
 	title   string
 	final   string
@@ -583,10 +571,8 @@
 			writeTitle(section, h.final, level)
 			for _, opt := range h.options {
 				header := strMultiply("#", level+1)
-				fmt.Fprintf(section, "%s **%v** *%v*\n\n", header, opt.Name, opt.Type)
-				writeStatus(section, opt.Status)
-				enumValues := collectEnums(opt)
-				fmt.Fprintf(section, "%v%v\nDefault: `%v`.\n\n", opt.Doc, enumValues, opt.Default)
+				section.Write([]byte(fmt.Sprintf("%s ", header)))
+				opt.Write(section)
 			}
 		}
 		var err error
@@ -657,38 +643,6 @@
 	return groups
 }
 
-func collectEnums(opt *source.OptionJSON) string {
-	var b strings.Builder
-	write := func(name, doc string, index, len int) {
-		if doc != "" {
-			unbroken := parBreakRE.ReplaceAllString(doc, "\\\n")
-			fmt.Fprintf(&b, "* %s", unbroken)
-		} else {
-			fmt.Fprintf(&b, "* `%s`", name)
-		}
-		if index < len-1 {
-			fmt.Fprint(&b, "\n")
-		}
-	}
-	if len(opt.EnumValues) > 0 && opt.Type == "enum" {
-		b.WriteString("\nMust be one of:\n\n")
-		for i, val := range opt.EnumValues {
-			write(val.Value, val.Doc, i, len(opt.EnumValues))
-		}
-	} else if len(opt.EnumKeys.Keys) > 0 && shouldShowEnumKeysInSettings(opt.Name) {
-		b.WriteString("\nCan contain any of:\n\n")
-		for i, val := range opt.EnumKeys.Keys {
-			write(val.Name, val.Doc, i, len(opt.EnumKeys.Keys))
-		}
-	}
-	return b.String()
-}
-
-func shouldShowEnumKeysInSettings(name string) bool {
-	// Both of these fields have too many possible options to print.
-	return !hardcodedEnumKeys(name)
-}
-
 func hardcodedEnumKeys(name string) bool {
 	return name == "analyses" || name == "codelenses"
 }
@@ -710,20 +664,6 @@
 	fmt.Fprintf(w, "%s %s\n\n", strMultiply("#", level), capitalize(title))
 }
 
-func writeStatus(section io.Writer, status string) {
-	switch status {
-	case "":
-	case "advanced":
-		fmt.Fprint(section, "**This is an advanced setting and should not be configured by most `gopls` users.**\n\n")
-	case "debug":
-		fmt.Fprint(section, "**This setting is for debugging purposes only.**\n\n")
-	case "experimental":
-		fmt.Fprint(section, "**This setting is experimental and may be deleted.**\n\n")
-	default:
-		fmt.Fprintf(section, "**Status: %s.**\n\n", status)
-	}
-}
-
 func capitalize(s string) string {
 	return string(unicode.ToUpper(rune(s[0]))) + s[1:]
 }
@@ -739,13 +679,7 @@
 func rewriteCommands(doc []byte, api *source.APIJSON) ([]byte, error) {
 	section := bytes.NewBuffer(nil)
 	for _, command := range api.Commands {
-		fmt.Fprintf(section, "### **%v**\nIdentifier: `%v`\n\n%v\n\n", command.Title, command.Command, command.Doc)
-		if command.ArgDoc != "" {
-			fmt.Fprintf(section, "Args:\n\n```\n%s\n```\n\n", command.ArgDoc)
-		}
-		if command.ResultDoc != "" {
-			fmt.Fprintf(section, "Result:\n\n```\n%s\n```\n\n", command.ResultDoc)
-		}
+		command.Write(section)
 	}
 	return replaceSection(doc, "Commands", section.Bytes())
 }
diff --git a/gopls/doc/generate_test.go b/gopls/doc/generate_test.go
index 521d01c..137a646 100644
--- a/gopls/doc/generate_test.go
+++ b/gopls/doc/generate_test.go
@@ -2,6 +2,9 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
+//go:build go1.16
+// +build go1.16
+
 package main
 
 import (
diff --git a/gopls/doc/semantictokens.md b/gopls/doc/semantictokens.md
index fc541fb..c9124b7 100644
--- a/gopls/doc/semantictokens.md
+++ b/gopls/doc/semantictokens.md
@@ -16,7 +16,7 @@
 
 The 22 semantic tokens are `namespace`, `type`, `class`, `enum`, `interface`,
 		`struct`, `typeParameter`, `parameter`, `variable`, `property`, `enumMember`,
-		`event`, `function`, `member`, `macro`, `keyword`, `modifier`, `comment`,
+		`event`, `function`, `method`, `macro`, `keyword`, `modifier`, `comment`,
 		`string`, `number`, `regexp`, `operator`.
 
 The 10 modifiers are `declaration`, `definition`, `readonly`, `static`,
@@ -72,7 +72,7 @@
 1. __`type`__ Objects of type ```types.TypeName``` are marked `type`.
 If they are also ```types.Basic```
 the modifier is `defaultLibrary`. (And in ```type B struct{C}```, ```B``` has modifier `definition`.)
-1. __`parameter`__ The formal arguments in ```ast.FuncDecl``` nodes are marked `parameter`.
+1. __`parameter`__ The formal arguments in ```ast.FuncDecl``` and ```ast.FuncType``` nodes are marked `parameter`.
 1. __`variable`__  Identifiers in the
 scope of ```const``` are modified with `readonly`. ```nil``` is usually a `variable` modified with both
 `readonly` and `defaultLibrary`. (```nil``` is a predefined identifier; the user can redefine it,
@@ -80,8 +80,8 @@
 not surprisingly, marked `variable`. Identifiers being defined (node ```ast.GenDecl```) are modified
 by `definition` and, if appropriate, `readonly`. Receivers (in method declarations) are
 `variable`.
-1. __`member`__ Members are marked at their definition (```func (x foo) bar() {}```) or declaration
-in an ```interface```. Members are not marked where they are used.
+1. __`method`__ Methods are marked at their definition (```func (x foo) bar() {}```) or declaration
+in an ```interface```. Methods are not marked where they are used.
 In ```x.bar()```, ```x``` will be marked
 either as a `namespace` if it is a package name, or as a `variable` if it is an interface value,
 so distinguishing ```bar``` seemed superfluous.
diff --git a/gopls/doc/settings.md b/gopls/doc/settings.md
index 8b0ec83..092a3c7 100644
--- a/gopls/doc/settings.md
+++ b/gopls/doc/settings.md
@@ -70,6 +70,14 @@
 
 Include only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules`
 
+Default: `["-node_modules"]`.
+
+#### **templateExtensions** *[]string*
+
+templateExtensions gives the extensions of file names that are treateed
+as template files. (The extension
+is the part of the file name after the final dot.)
+
 Default: `[]`.
 
 #### **memoryMode** *enum*
@@ -86,8 +94,8 @@
 * `"DegradeClosed"`: In DegradeClosed mode, `gopls` will collect less information about
 packages without open files. As a result, features like Find
 References and Rename will miss results in such packages.
-
 * `"Normal"`
+
 Default: `"Normal"`.
 
 #### **expandWorkspaceToModule** *bool*
@@ -112,15 +120,6 @@
 
 Default: `false`.
 
-#### **experimentalTemplateSupport** *bool*
-
-**This setting is experimental and may be deleted.**
-
-experimentalTemplateSupport opts into the experimental support
-for template files.
-
-Default: `false`.
-
 #### **experimentalPackageCacheKey** *bool*
 
 **This setting is experimental and may be deleted.**
@@ -188,7 +187,7 @@
 
 codelenses overrides the enabled/disabled state of code lenses. See the
 "Code Lenses" section of the
-[Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md)
+[Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#code-lenses)
 for the list of supported lenses.
 
 Example Usage:
@@ -196,7 +195,7 @@
 ```json5
 "gopls": {
 ...
-  "codelens": {
+  "codelenses": {
     "generate": false,  // Don't show the `go generate` lens.
     "gc_details": true  // Show a code lens toggling the display of gc's choices.
   }
@@ -248,13 +247,14 @@
 * `"CaseInsensitive"`
 * `"CaseSensitive"`
 * `"Fuzzy"`
+
 Default: `"Fuzzy"`.
 
 ##### **experimentalPostfixCompletions** *bool*
 
 **This setting is experimental and may be deleted.**
 
-experimentalPostfixCompletions enables artifical method snippets
+experimentalPostfixCompletions enables artificial method snippets
 such as "someSlice.sort!".
 
 Default: `true`.
@@ -299,11 +299,8 @@
 Can contain any of:
 
 * `"bounds"` controls bounds checking diagnostics.
-
 * `"escape"` controls diagnostics about escape choices.
-
 * `"inline"` controls diagnostics about inlining choices.
-
 * `"nil"` controls nil checks.
 
 Default: `{"bounds":true,"escape":true,"inline":true,"nil":true}`.
@@ -351,8 +348,8 @@
 This format separates the signature from the documentation, so that the client
 can do more manipulation of these fields.\
 This should only be used by clients that support this behavior.
-
 * `"SynopsisDocumentation"`
+
 Default: `"FullDocumentation"`.
 
 ##### **linkTarget** *string*
@@ -385,6 +382,7 @@
 * `"Both"`
 * `"Definition"`
 * `"Link"`
+
 Default: `"Both"`.
 
 ##### **symbolMatcher** *enum*
@@ -399,7 +397,8 @@
 * `"CaseSensitive"`
 * `"FastFuzzy"`
 * `"Fuzzy"`
-Default: `"Fuzzy"`.
+
+Default: `"FastFuzzy"`.
 
 ##### **symbolStyle** *enum*
 
@@ -412,7 +411,7 @@
 ```json5
 "gopls": {
 ...
-  "symbolStyle": "dynamic",
+  "symbolStyle": "Dynamic",
 ...
 }
 ```
@@ -423,10 +422,8 @@
 match for the given symbol query. Here a "qualifier" is any "/" or "."
 delimited suffix of the fully qualified symbol. i.e. "to/pkg.Foo.Field" or
 just "Foo.Field".
-
 * `"Full"` is fully qualified symbols, i.e.
 "path/to/pkg.Foo.Field".
-
 * `"Package"` is package qualified symbols i.e.
 "pkg.Foo.Field".
 
diff --git a/gopls/doc/subl.md b/gopls/doc/subl.md
index bd130ef..a2b1585 100644
--- a/gopls/doc/subl.md
+++ b/gopls/doc/subl.md
@@ -56,6 +56,10 @@
     "settings": {
         "LSP": {
             "gopls": {
+                // To use a specific version of gopls with Sublime Text LSP (e.g., to try new features in development)
+                "command": [
+                    "/path/to/your/go/bin/gopls"
+                ],
                 "env": {
                     "PATH": "/path/to/your/go-dev/bin:/path/to/your/go/bin",
                     "GOPATH": "",
diff --git a/gopls/doc/vim.md b/gopls/doc/vim.md
index a6b40a4..887a246 100644
--- a/gopls/doc/vim.md
+++ b/gopls/doc/vim.md
@@ -93,7 +93,7 @@
   "languageserver": {
     "golang": {
       "command": "gopls",
-      "rootPatterns": ["go.mod", ".vim/", ".git/", ".hg/"],
+      "rootPatterns": ["go.work", "go.mod", ".vim/", ".git/", ".hg/"],
       "filetypes": ["go"],
       "initializationOptions": {
         "usePlaceholders": true
@@ -102,6 +102,13 @@
   }
 ```
 
+If you use `go.work` files, you may want to set the
+`workspace.workspaceFolderCheckCwd` option. This will force coc.nvim to search
+parent directories for `go.work` files, even if the current open directory has
+a `go.mod` file. See the
+[coc.nvim documentation](https://github.com/neoclide/coc.nvim/wiki/Using-workspaceFolders)
+for more details.
+
 Other [settings](settings.md) can be added in `initializationOptions` too.
 
 The `editor.action.organizeImport` code action will auto-format code and add missing imports. To run this automatically on save, add the following line to your `init.vim`:
@@ -141,8 +148,12 @@
 ```vim
 lua <<EOF
   lspconfig = require "lspconfig"
+  util = require "lspconfig/util"
+
   lspconfig.gopls.setup {
     cmd = {"gopls", "serve"},
+    filetypes = {"go", "gomod"},
+    root_dir = util.root_pattern("go.work", "go.mod", ".git"),
     settings = {
       gopls = {
         analyses = {
@@ -164,38 +175,23 @@
 lua <<EOF
   -- …
 
-  function goimports(timeout_ms)
-    local context = { only = { "source.organizeImports" } }
-    vim.validate { context = { context, "t", true } }
-
+  function OrgImports(wait_ms)
     local params = vim.lsp.util.make_range_params()
-    params.context = context
-
-    -- See the implementation of the textDocument/codeAction callback
-    -- (lua/vim/lsp/handler.lua) for how to do this properly.
-    local result = vim.lsp.buf_request_sync(0, "textDocument/codeAction", params, timeout_ms)
-    if not result or next(result) == nil then return end
-    local actions = result[1].result
-    if not actions then return end
-    local action = actions[1]
-
-    -- textDocument/codeAction can return either Command[] or CodeAction[]. If it
-    -- is a CodeAction, it can have either an edit, a command or both. Edits
-    -- should be executed first.
-    if action.edit or type(action.command) == "table" then
-      if action.edit then
-        vim.lsp.util.apply_workspace_edit(action.edit)
+    params.context = {only = {"source.organizeImports"}}
+    local result = vim.lsp.buf_request_sync(0, "textDocument/codeAction", params, wait_ms)
+    for _, res in pairs(result or {}) do
+      for _, r in pairs(res.result or {}) do
+        if r.edit then
+          vim.lsp.util.apply_workspace_edit(r.edit)
+        else
+          vim.lsp.buf.execute_command(r.command)
+        end
       end
-      if type(action.command) == "table" then
-        vim.lsp.buf.execute_command(action.command)
-      end
-    else
-      vim.lsp.buf.execute_command(action)
     end
   end
 EOF
 
-autocmd BufWritePre *.go lua goimports(1000)
+autocmd BufWritePre *.go lua OrgImports(1000)
 ```
 
 (Taken from the [discussion][nvim-lspconfig-imports] on Neovim issue tracker.)
@@ -225,5 +221,5 @@
 [govim-install]: https://github.com/myitcv/govim/blob/master/README.md#govim---go-development-plugin-for-vim8
 [nvim-docs]: https://neovim.io/doc/user/lsp.html
 [nvim-install]: https://github.com/neovim/neovim/wiki/Installing-Neovim
-[nvim-lspconfig]: https://github.com/neovim/nvim-lspconfig/blob/master/CONFIG.md#gopls
+[nvim-lspconfig]: https://github.com/neovim/nvim-lspconfig/blob/master/doc/server_configurations.md#gopls
 [nvim-lspconfig-imports]: https://github.com/neovim/nvim-lspconfig/issues/115
diff --git a/gopls/doc/workspace.md b/gopls/doc/workspace.md
index 821ba49..610afbe 100644
--- a/gopls/doc/workspace.md
+++ b/gopls/doc/workspace.md
@@ -19,40 +19,60 @@
 
 ### Multiple modules
 
-As of Jan 2021, if you are working with multiple modules or nested modules, you
-will need to create a "workspace folder" for each module. This means that each
-module has its own scope, and features will not work across modules. We are
-currently working on addressing this limitation--see details about
-[experimental workspace module mode](#workspace-module-experimental)
-below.
+Gopls has several alternatives for working on multiple modules simultaneously,
+described below. Starting with Go 1.18, Go workspaces are the preferred solution.
+
+#### Go workspaces (Go 1.18+)
+
+Starting with Go 1.18, the `go` command has native support for multi-module
+workspaces, via [`go.work`](https://go.dev/ref/mod#workspaces) files. These
+files are recognized by gopls starting with `gopls@v0.8.0`.
+
+The easiest way to work on multiple modules in Go 1.18 and later is therefore
+to create a `go.work` file containing the modules you wish to work on, and set
+your workspace root to the directory containing the `go.work` file.
+
+For example, suppose this repo is checked out into the `$WORK/tools` directory.
+We can work on both `golang.org/x/tools` and `golang.org/x/tools/gopls`
+simultaneously by creating a `go.work` file:
+
+```
+cd $WORK
+go work init
+go work use tools tools/gopls
+```
+
+...followed by opening the `$WORK` directory in our editor.
+
+#### Experimental workspace module (Go 1.17 and earlier)
+
+With earlier versions of Go, `gopls` can simulate multi-module workspaces by
+creating a synthetic module requiring the the modules in the workspace root.
+See [the design document](https://github.com/golang/proposal/blob/master/design/37720-gopls-workspaces.md)
+for more information.
+
+This feature is experimental, and will eventually be removed once `go.work`
+files are accepted by all supported Go versions.
+
+You can enable this feature by configuring the
+[experimentalWorkspaceModule](settings.md#experimentalworkspacemodule-bool)
+setting.
+
+#### Multiple workspace folders
+
+If neither of the above solutions work, and your editor allows configuring the
+set of
+["workspace folders"](https://microsoft.github.io/language-server-protocol/specifications/specification-3-17/#workspaceFolder)
+used during your LSP session, you can still work on multiple modules by adding
+a workspace folder at each module root (the locations of `go.mod` files). This
+means that each module has its own scope, and features will not work across
+modules. 
 
 In VS Code, you can create a workspace folder by setting up a
 [multi-root workspace](https://code.visualstudio.com/docs/editor/multi-root-workspaces).
 View the [documentation for your editor plugin](../README.md#editor) to learn how to
 configure a workspace folder in your editor.
 
-#### Workspace module (experimental)
-
-Many `gopls` users would like to work with multiple modules at the same time
-([golang/go#32394](https://github.com/golang/go/issues/32394)), and
-specifically, have features that work across modules. We plan to add support
-for this via a concept called the "workspace module", which is described in
-[this design document](https://github.com/golang/proposal/blob/master/design/37720-gopls-workspaces.md).
-This feature works by creating a temporary module that requires all of your
-workspace modules, meaning all of their dependencies must be compatible.
-
-The workspace module feature is currently available as an opt-in experiment,
-and it will allow you to work with multiple modules without creating workspace
-folders for each module. You can try it out by configuring the
-[experimentalWorkspaceModule](settings.md#experimentalworkspacemodule-bool)
-setting. If you try it and encounter issues, please
-[report them](https://github.com/golang/go/issues/new) so we can address them
-before the feature is enabled by default.
-
-You can follow our progress on the workspace module work by looking at the
-open issues in the
-[gopls/workspace-module milestone](https://github.com/golang/go/milestone/179).
-
 ### GOPATH mode
 
 When opening a directory within your GOPATH, the workspace scope will be just
diff --git a/gopls/go.mod b/gopls/go.mod
index f84e502..ed04fd0 100644
--- a/gopls/go.mod
+++ b/gopls/go.mod
@@ -1,23 +1,27 @@
 module golang.org/x/tools/gopls
 
-go 1.17
+go 1.18
 
 require (
-	github.com/BurntSushi/toml v0.4.1 // indirect
-	github.com/google/go-cmp v0.5.6
-	github.com/google/safehtml v0.0.2 // indirect
+	github.com/google/go-cmp v0.5.7
+	github.com/jba/printsrc v0.2.2
 	github.com/jba/templatecheck v0.6.0
-	github.com/sanity-io/litter v1.5.1
 	github.com/sergi/go-diff v1.1.0
-	golang.org/x/mod v0.4.2
+	golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3
+	golang.org/x/sys v0.0.0-20220209214540-3681064d5158
+	golang.org/x/tools v0.1.9
+	golang.org/x/vuln v0.0.0-20220324005316-18fd808f5c7f
+	honnef.co/go/tools v0.2.2
+	mvdan.cc/gofumpt v0.3.0
+	mvdan.cc/xurls/v2 v2.4.0
+)
+
+require (
+	github.com/BurntSushi/toml v1.0.0 // indirect
+	github.com/google/safehtml v0.0.2 // indirect
 	golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect
-	golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e
 	golang.org/x/text v0.3.7 // indirect
-	golang.org/x/tools v0.1.5
 	golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
-	honnef.co/go/tools v0.2.0
-	mvdan.cc/gofumpt v0.1.1
-	mvdan.cc/xurls/v2 v2.3.0
 )
 
 replace golang.org/x/tools => ../
diff --git a/gopls/go.sum b/gopls/go.sum
index f026fde..759d7ef 100644
--- a/gopls/go.sum
+++ b/gopls/go.sum
@@ -1,56 +1,67 @@
 github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/BurntSushi/toml v0.4.1 h1:GaI7EiDXDRfa8VshkTj7Fym7ha+y8/XxIgD2okUIjLw=
-github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
-github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/BurntSushi/toml v1.0.0 h1:dtDWrepsVPfW9H/4y7dDgFc2MBUSeJhlaDtK13CxFlU=
+github.com/BurntSushi/toml v1.0.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
+github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=
-github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/frankban/quicktest v1.14.2 h1:SPb1KFFmM+ybpEjPUhCCkZOM5xlovT5UbrMvWnXyBns=
+github.com/frankban/quicktest v1.14.2/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
+github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o=
+github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
 github.com/google/safehtml v0.0.2 h1:ZOt2VXg4x24bW0m2jtzAOkhoXV0iM8vNKc0paByCZqM=
 github.com/google/safehtml v0.0.2/go.mod h1:L4KWwDsUJdECRAEpZoBn3O64bQaywRscowZjJAzjHnU=
+github.com/jba/printsrc v0.2.2 h1:9OHK51UT+/iMAEBlQIIXW04qvKyF3/vvLuwW/hL8tDU=
+github.com/jba/printsrc v0.2.2/go.mod h1:1xULjw59sL0dPdWpDoVU06TIEO/Wnfv6AHRpiElTwYM=
 github.com/jba/templatecheck v0.6.0 h1:SwM8C4hlK/YNLsdcXStfnHWE2HKkuTVwy5FKQHt5ro8=
 github.com/jba/templatecheck v0.6.0/go.mod h1:/1k7EajoSErFI9GLHAsiIJEaNLt3ALKNw2TV7z2SYv4=
 github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
 github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
 github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
 github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
-github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
 github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
-github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
-github.com/sanity-io/litter v1.5.1 h1:dwnrSypP6q56o3lFxTU+t2fwQ9A+U5qrXVO4Qg9KwVU=
-github.com/sanity-io/litter v1.5.1/go.mod h1:5Z71SvaYy5kcGtyglXOC9rrUi3c1E8CamFWjQsazTh0=
+github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
+github.com/rogpeppe/go-internal v1.8.1 h1:geMPLpDpQOgVyCg5z5GoRwLHepNdb71NXb67XFkP+Eg=
+github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o=
 github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
 github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
 github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
 github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
-github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
-golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo=
-golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
+golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o=
+golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
 golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e h1:WUoyKPm6nCo1BnNUvPGnFG3T5DUVem42yDJZZ4CNxMA=
-golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220209214540-3681064d5158 h1:rm+CHSpPEEW2IsXUib1ThaHIjuBVZjxNgSKmBLFfD4c=
+golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
 golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/vuln v0.0.0-20220324005316-18fd808f5c7f h1:9dMzk88fnONra7zrEalqkRMGa9jMGf9B5mdzhYVyI28=
+golang.org/x/vuln v0.0.0-20220324005316-18fd808f5c7f/go.mod h1:RMxFJYUtgT86cNTSzXJAe51WiT0Vg5LCGePfAGufJCc=
 golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
@@ -62,9 +73,10 @@
 gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
 gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-honnef.co/go/tools v0.2.0 h1:ws8AfbgTX3oIczLPNPCu5166oBg9ST2vNs0rcht+mDE=
-honnef.co/go/tools v0.2.0/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY=
-mvdan.cc/gofumpt v0.1.1 h1:bi/1aS/5W00E2ny5q65w9SnKpWEF/UIOqDYBILpo9rA=
-mvdan.cc/gofumpt v0.1.1/go.mod h1:yXG1r1WqZVKWbVRtBWKWX9+CxGYfA51nSomhM0woR48=
-mvdan.cc/xurls/v2 v2.3.0 h1:59Olnbt67UKpxF1EwVBopJvkSUBmgtb468E4GVWIZ1I=
-mvdan.cc/xurls/v2 v2.3.0/go.mod h1:AjuTy7gEiUArFMjgBBDU4SMxlfUYsRokpJQgNWOt3e4=
+honnef.co/go/tools v0.2.2 h1:MNh1AVMyVX23VUHE2O27jm6lNj3vjO5DexS4A1xvnzk=
+honnef.co/go/tools v0.2.2/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY=
+mvdan.cc/gofumpt v0.3.0 h1:kTojdZo9AcEYbQYhGuLf/zszYthRdhDNDUi2JKTxas4=
+mvdan.cc/gofumpt v0.3.0/go.mod h1:0+VyGZWleeIj5oostkOex+nDBA0eyavuDnDusAJ8ylo=
+mvdan.cc/unparam v0.0.0-20211214103731-d0ef000c54e5 h1:Jh3LAeMt1eGpxomyu3jVkmVZWW2MxZ1qIIV2TZ/nRio=
+mvdan.cc/xurls/v2 v2.4.0 h1:tzxjVAj+wSBmDcF6zBB7/myTy3gX9xvi8Tyr28AuQgc=
+mvdan.cc/xurls/v2 v2.4.0/go.mod h1:+GEjq9uNjqs8LQfM9nVnM8rff0OQ5Iash5rzX+N1CSg=
diff --git a/gopls/integration/govim/run_local.sh b/gopls/integration/govim/run_local.sh
index b7aba5e..b5c284f 100755
--- a/gopls/integration/govim/run_local.sh
+++ b/gopls/integration/govim/run_local.sh
@@ -13,7 +13,7 @@
 Args:
   --sudo     run docker with sudo
   --short    run `go test` with `-short`
-  --version  run on the specific tagged Go version (or latest) rather
+  --version  run on the specific tagged govim version (or latest) rather
              than the default branch
 
 Run govim tests against HEAD using local docker.
@@ -71,7 +71,7 @@
 ${SUDO_IF_NEEDED}docker run --rm -t \
   -v "${tools_dir}:/src/tools" \
   -w "/src/tools/gopls" \
-  golang:latest \
+  golang:rc \
   go build -o $(basename ${temp_gopls})
 
 # Build the test harness. Here we are careful to pass in a very limited build
diff --git a/gopls/internal/hooks/hooks.go b/gopls/internal/hooks/hooks.go
index 390967d..023aefe 100644
--- a/gopls/internal/hooks/hooks.go
+++ b/gopls/internal/hooks/hooks.go
@@ -9,8 +9,8 @@
 
 import (
 	"context"
-	"regexp"
 
+	"golang.org/x/tools/gopls/internal/vulncheck"
 	"golang.org/x/tools/internal/lsp/source"
 	"mvdan.cc/gofumpt/format"
 	"mvdan.cc/xurls/v2"
@@ -21,17 +21,14 @@
 	if options.GoDiff {
 		options.ComputeEdits = ComputeEdits
 	}
-	options.URLRegexp = relaxedFullWord
-	options.GofumptFormat = func(ctx context.Context, src []byte) ([]byte, error) {
-		return format.Source(src, format.Options{})
+	options.URLRegexp = xurls.Relaxed()
+	options.GofumptFormat = func(ctx context.Context, langVersion, modulePath string, src []byte) ([]byte, error) {
+		return format.Source(src, format.Options{
+			LangVersion: langVersion,
+			ModulePath:  modulePath,
+		})
 	}
 	updateAnalyzers(options)
-}
 
-var relaxedFullWord *regexp.Regexp
-
-// Ensure links are matched as full words, not anywhere.
-func init() {
-	relaxedFullWord = regexp.MustCompile(`\b(` + xurls.Relaxed().String() + `)\b`)
-	relaxedFullWord.Longest()
+	options.Govulncheck = vulncheck.Govulncheck
 }
diff --git a/gopls/internal/regtest/bench/bench_test.go b/gopls/internal/regtest/bench/bench_test.go
index 9cbf2f4..61d4ae2 100644
--- a/gopls/internal/regtest/bench/bench_test.go
+++ b/gopls/internal/regtest/bench/bench_test.go
@@ -10,7 +10,6 @@
 	"os"
 	"runtime/pprof"
 	"testing"
-	"time"
 
 	"golang.org/x/tools/gopls/internal/hooks"
 	"golang.org/x/tools/internal/lsp/fake"
@@ -32,9 +31,9 @@
 		SkipLogs(),
 		// The Debug server only makes sense if running in singleton mode.
 		Modes(Singleton),
-		// Set a generous timeout. Individual tests should control their own
-		// graceful termination.
-		Timeout(20 * time.Minute),
+		// Remove the default timeout. Individual tests should control their
+		// own graceful termination.
+		NoDefaultTimeout(),
 
 		// Use the actual proxy, since we want our builds to succeed.
 		GOPROXY("https://proxy.golang.org"),
diff --git a/gopls/internal/regtest/codelens/codelens_test.go b/gopls/internal/regtest/codelens/codelens_test.go
index ad35a29..3e15271 100644
--- a/gopls/internal/regtest/codelens/codelens_test.go
+++ b/gopls/internal/regtest/codelens/codelens_test.go
@@ -9,7 +9,6 @@
 	"runtime"
 	"strings"
 	"testing"
-	"time"
 
 	"golang.org/x/tools/gopls/internal/hooks"
 	. "golang.org/x/tools/internal/lsp/regtest"
@@ -311,8 +310,6 @@
 			CodeLenses: map[string]bool{
 				"gc_details": true,
 			}},
-		// TestGCDetails seems to suffer from poor performance on certain builders. Give it some more time to complete.
-		Timeout(60*time.Second),
 	).Run(t, mod, func(t *testing.T, env *Env) {
 		env.OpenFile("main.go")
 		env.ExecuteCodeLensCommand("main.go", command.GCDetails)
diff --git a/gopls/internal/regtest/completion/completion18_test.go b/gopls/internal/regtest/completion/completion18_test.go
new file mode 100644
index 0000000..9683e30
--- /dev/null
+++ b/gopls/internal/regtest/completion/completion18_test.go
@@ -0,0 +1,123 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package completion
+
+import (
+	"testing"
+
+	. "golang.org/x/tools/internal/lsp/regtest"
+)
+
+// test generic receivers
+func TestGenericReceiver(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.18
+-- main.go --
+package main
+type SyncMap[K any, V comparable] struct {}
+func (s *SyncMap[K,V]) f() {}
+type XX[T any] struct {}
+type UU[T any] struct {}
+func (s SyncMap[XX,string]) g(v UU) {}
+`
+
+	tests := []struct {
+		pat  string
+		want []string
+	}{
+		{"s .Syn", []string{"SyncMap[K, V]"}},
+		{"Map.X", []string{}}, // This is probably wrong, Maybe "XX"?
+		{"v U", []string{"UU", "uint", "uint16", "uint32", "uint64", "uint8", "uintptr"}}, // not U[T]
+	}
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("main.go")
+		env.Await(env.DoneWithOpen())
+		for _, tst := range tests {
+			pos := env.RegexpSearch("main.go", tst.pat)
+			pos.Column += len(tst.pat)
+			completions := env.Completion("main.go", pos)
+			result := compareCompletionResults(tst.want, completions.Items)
+			if result != "" {
+				t.Errorf("%s: wanted %v", result, tst.want)
+				for i, g := range completions.Items {
+					t.Errorf("got %d %s %s", i, g.Label, g.Detail)
+				}
+			}
+		}
+	})
+}
+func TestFuzzFunc(t *testing.T) {
+	// use the example from the package documentation
+	modfile := `
+-- go.mod --
+module mod.com
+
+go 1.18
+`
+	part0 := `package foo
+import "testing"
+func FuzzNone(f *testing.F) {
+	f.Add(12) // better not find this f.Add
+}
+func FuzzHex(f *testing.F) {
+	for _, seed := range [][]byte{{}, {0}, {9}, {0xa}, {0xf}, {1, 2, 3, 4}} {
+		f.Ad`
+	part1 := `d(seed)
+	}
+	f.F`
+	part2 := `uzz(func(t *testing.T, in []byte) {
+		enc := hex.EncodeToString(in)
+		out, err := hex.DecodeString(enc)
+		if err != nil {
+		  f.Failed()
+		}
+		if !bytes.Equal(in, out) {
+		  t.Fatalf("%v: round trip: %v, %s", in, out, f.Name())
+		}
+	})
+}
+`
+	data := modfile + `-- a_test.go --
+` + part0 + `
+-- b_test.go --
+` + part0 + part1 + `
+-- c_test.go --
+` + part0 + part1 + part2
+
+	tests := []struct {
+		file   string
+		pat    string
+		offset int // from the beginning of pat to what the user just typed
+		want   []string
+	}{
+		{"a_test.go", "f.Ad", 3, []string{"Add"}},
+		{"c_test.go", " f.F", 4, []string{"Failed"}},
+		{"c_test.go", "f.N", 3, []string{"Name"}},
+		{"b_test.go", "f.F", 3, []string{"Fuzz(func(t *testing.T, a []byte)", "Fail", "FailNow",
+			"Failed", "Fatal", "Fatalf"}},
+	}
+	Run(t, data, func(t *testing.T, env *Env) {
+		for _, test := range tests {
+			env.OpenFile(test.file)
+			env.Await(env.DoneWithOpen())
+			pos := env.RegexpSearch(test.file, test.pat)
+			pos.Column += test.offset // character user just typed? will type?
+			completions := env.Completion(test.file, pos)
+			result := compareCompletionResults(test.want, completions.Items)
+			if result != "" {
+				t.Errorf("pat %q %q", test.pat, result)
+				for i, it := range completions.Items {
+					t.Errorf("%d got %q %q", i, it.Label, it.Detail)
+				}
+			}
+		}
+	})
+}
diff --git a/gopls/internal/regtest/completion/completion_test.go b/gopls/internal/regtest/completion/completion_test.go
index 795f7ae..c0b4736 100644
--- a/gopls/internal/regtest/completion/completion_test.go
+++ b/gopls/internal/regtest/completion/completion_test.go
@@ -248,11 +248,15 @@
 	var got []string
 	for _, item := range gotItems {
 		got = append(got, item.Label)
+		if item.Label != item.InsertText && item.TextEdit == nil {
+			// Label should be the same as InsertText, if InsertText is to be used
+			return fmt.Sprintf("label not the same as InsertText %#v", item)
+		}
 	}
 
 	for i, v := range got {
 		if v != want[i] {
-			return fmt.Sprintf("completion results are not the same: got %v, want %v", got, want)
+			return fmt.Sprintf("%d completion result not the same: got %q, want %q", i, v, want[i])
 		}
 	}
 
@@ -542,3 +546,101 @@
 		}
 	})
 }
+
+func TestDefinition(t *testing.T) {
+	stuff := `
+-- go.mod --
+module mod.com
+
+go 1.18
+-- a_test.go --
+package foo
+func T()
+func TestG()
+func TestM()
+func TestMi()
+func Ben()
+func Fuz()
+func Testx()
+func TestMe(t *testing.T)
+func BenchmarkFoo()
+`
+	// All those parentheses are needed for the completion code to see
+	// later lines as being definitions
+	tests := []struct {
+		pat  string
+		want []string
+	}{
+		{"T", []string{"TestXxx(t *testing.T)", "TestMain(m *testing.M)"}},
+		{"TestM", []string{"TestMain(m *testing.M)", "TestM(t *testing.T)"}},
+		{"TestMi", []string{"TestMi(t *testing.T)"}},
+		{"TestG", []string{"TestG(t *testing.T)"}},
+		{"B", []string{"BenchmarkXxx(b *testing.B)"}},
+		{"BenchmarkFoo", []string{"BenchmarkFoo(b *testing.B)"}},
+		{"F", []string{"FuzzXxx(f *testing.F)"}},
+		{"Testx", nil},
+		{"TestMe", []string{"TestMe"}},
+	}
+	fname := "a_test.go"
+	Run(t, stuff, func(t *testing.T, env *Env) {
+		env.OpenFile(fname)
+		env.Await(env.DoneWithOpen())
+		for _, tst := range tests {
+			pos := env.RegexpSearch(fname, tst.pat)
+			pos.Column += len(tst.pat)
+			completions := env.Completion(fname, pos)
+			result := compareCompletionResults(tst.want, completions.Items)
+			if result != "" {
+				t.Errorf("%s failed: %s:%q", tst.pat, result, tst.want)
+				for i, it := range completions.Items {
+					t.Errorf("%d got %q %q", i, it.Label, it.Detail)
+				}
+			}
+		}
+	})
+}
+
+func TestGoWorkCompletion(t *testing.T) {
+	const files = `
+-- go.work --
+go 1.18
+
+use ./a
+use ./a/ba
+use ./a/b/
+use ./dir/foo
+use ./dir/foobar/
+-- a/go.mod --
+-- go.mod --
+-- a/bar/go.mod --
+-- a/b/c/d/e/f/go.mod --
+-- dir/bar --
+-- dir/foobar/go.mod --
+`
+
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("go.work")
+
+		tests := []struct {
+			re   string
+			want []string
+		}{
+			{`use ()\.`, []string{".", "./a", "./a/bar", "./dir/foobar"}},
+			{`use \.()`, []string{"", "/a", "/a/bar", "/dir/foobar"}},
+			{`use \./()`, []string{"a", "a/bar", "dir/foobar"}},
+			{`use ./a()`, []string{"", "/b/c/d/e/f", "/bar"}},
+			{`use ./a/b()`, []string{"/c/d/e/f", "ar"}},
+			{`use ./a/b/()`, []string{`c/d/e/f`}},
+			{`use ./a/ba()`, []string{"r"}},
+			{`use ./dir/foo()`, []string{"bar"}},
+			{`use ./dir/foobar/()`, []string{}},
+		}
+		for _, tt := range tests {
+			completions := env.Completion("go.work", env.RegexpSearch("go.work", tt.re))
+			diff := compareCompletionResults(tt.want, completions.Items)
+			if diff != "" {
+				t.Errorf("%s: %s", tt.re, diff)
+			}
+		}
+	})
+}
diff --git a/gopls/internal/regtest/completion/postfix_snippet_test.go b/gopls/internal/regtest/completion/postfix_snippet_test.go
index 1f5b7cf..2674d55 100644
--- a/gopls/internal/regtest/completion/postfix_snippet_test.go
+++ b/gopls/internal/regtest/completion/postfix_snippet_test.go
@@ -13,6 +13,8 @@
 )
 
 func TestPostfixSnippetCompletion(t *testing.T) {
+	t.Skipf("skipping test due to suspected synchronization bug; see https://go.dev/issue/50707")
+
 	const mod = `
 -- go.mod --
 module mod.com
@@ -372,6 +374,44 @@
 }
 `,
 		},
+		{
+			name: "string split",
+			before: `
+package foo
+
+func foo() []string { 
+	x := "test"
+	return x.split
+}`,
+			after: `
+package foo
+
+import "strings"
+
+func foo() []string { 
+	x := "test"
+	return strings.Split(x, "$0")
+}`,
+		},
+		{
+			name: "string slice join",
+			before: `
+package foo
+
+func foo() string {
+	x := []string{"a", "test"}
+	return x.join
+}`,
+			after: `
+package foo
+
+import "strings"
+
+func foo() string {
+	x := []string{"a", "test"}
+	return strings.Join(x, "$0")
+}`,
+		},
 	}
 
 	r := WithOptions(Options(func(o *source.Options) {
diff --git a/gopls/internal/regtest/diagnostics/diagnostics_test.go b/gopls/internal/regtest/diagnostics/diagnostics_test.go
index 9ee102c..c18dfbf 100644
--- a/gopls/internal/regtest/diagnostics/diagnostics_test.go
+++ b/gopls/internal/regtest/diagnostics/diagnostics_test.go
@@ -638,6 +638,8 @@
 
 // Test for golang/go#38211.
 func Test_Issue38211(t *testing.T) {
+	t.Skipf("Skipping flaky test: https://golang.org/issue/44098")
+
 	testenv.NeedsGo1Point(t, 14)
 	const ardanLabs = `
 -- go.mod --
@@ -967,6 +969,8 @@
 // This is a copy of the scenario_default/quickfix_empty_files.txt test from
 // govim. Reproduces golang/go#39646.
 func TestQuickFixEmptyFiles(t *testing.T) {
+	t.Skip("too flaky: golang/go#48773")
+
 	testenv.NeedsGo1Point(t, 15)
 
 	const mod = `
@@ -1509,6 +1513,7 @@
 // TestProgressBarErrors confirms that critical workspace load errors are shown
 // and updated via progress reports.
 func TestProgressBarErrors(t *testing.T) {
+	t.Skip("too flaky: golang/go#46930")
 	testenv.NeedsGo1Point(t, 14)
 
 	const pkg = `
@@ -2133,3 +2138,114 @@
 		)
 	})
 }
+
+func TestLangVersion(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18) // Requires types.Config.GoVersion, new in 1.18.
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- main.go --
+package main
+
+const C = 0b10
+`
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.Await(env.DiagnosticAtRegexpWithMessage("main.go", `0b10`, "go1.13 or later"))
+		env.WriteWorkspaceFile("go.mod", "module mod.com \n\ngo 1.13\n")
+		env.Await(EmptyDiagnostics("main.go"))
+	})
+}
+
+func TestNoQuickFixForUndeclaredConstraint(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.18
+-- main.go --
+package main
+
+func F[T C](_ T) {
+}
+`
+
+	Run(t, files, func(t *testing.T, env *Env) {
+		var d protocol.PublishDiagnosticsParams
+		env.Await(
+			OnceMet(
+				env.DiagnosticAtRegexpWithMessage("main.go", `C`, "undeclared name"),
+				ReadDiagnostics("main.go", &d),
+			),
+		)
+		if fixes := env.GetQuickFixes("main.go", d.Diagnostics); len(fixes) != 0 {
+			t.Errorf("got quick fixes %v, wanted none", fixes)
+		}
+	})
+}
+
+func TestEditGoDirective(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.16
+-- main.go --
+package main
+
+func F[T any](_ T) {
+}
+`
+	Run(t, files, func(_ *testing.T, env *Env) { // Create a new workspace-level directory and empty file.
+		var d protocol.PublishDiagnosticsParams
+		env.Await(
+			OnceMet(
+				env.DiagnosticAtRegexpWithMessage("main.go", `T any`, "type parameters require"),
+				ReadDiagnostics("main.go", &d),
+			),
+		)
+
+		env.ApplyQuickFixes("main.go", d.Diagnostics)
+
+		env.Await(
+			EmptyDiagnostics("main.go"),
+		)
+	})
+}
+
+func TestEditGoDirectiveWorkspace(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.16
+-- go.work --
+go 1.18
+
+use .
+-- main.go --
+package main
+
+func F[T any](_ T) {
+}
+`
+	Run(t, files, func(_ *testing.T, env *Env) { // Create a new workspace-level directory and empty file.
+		var d protocol.PublishDiagnosticsParams
+		env.Await(
+			OnceMet(
+				env.DiagnosticAtRegexpWithMessage("main.go", `T any`, "type parameters require"),
+				ReadDiagnostics("main.go", &d),
+			),
+		)
+
+		env.ApplyQuickFixes("main.go", d.Diagnostics)
+
+		env.Await(
+			EmptyDiagnostics("main.go"),
+		)
+	})
+}
diff --git a/gopls/internal/regtest/diagnostics/undeclared_test.go b/gopls/internal/regtest/diagnostics/undeclared_test.go
new file mode 100644
index 0000000..79f7d42
--- /dev/null
+++ b/gopls/internal/regtest/diagnostics/undeclared_test.go
@@ -0,0 +1,67 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package diagnostics
+
+import (
+	"testing"
+
+	"golang.org/x/tools/internal/lsp/protocol"
+	. "golang.org/x/tools/internal/lsp/regtest"
+)
+
+func TestUndeclaredDiagnostics(t *testing.T) {
+	src := `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- a/a.go --
+package a
+
+func _() int {
+	return x
+}
+-- b/b.go --
+package b
+
+func _() int {
+	var y int
+	y = y
+	return y
+}
+`
+	Run(t, src, func(t *testing.T, env *Env) {
+		isUnnecessary := func(diag protocol.Diagnostic) bool {
+			for _, tag := range diag.Tags {
+				if tag == protocol.Unnecessary {
+					return true
+				}
+			}
+			return false
+		}
+
+		// 'x' is undeclared, but still necessary.
+		env.OpenFile("a/a.go")
+		env.Await(env.DiagnosticAtRegexp("a/a.go", "x"))
+		diags := env.DiagnosticsFor("a/a.go")
+		if got := len(diags.Diagnostics); got != 1 {
+			t.Errorf("len(Diagnostics) = %d, want 1", got)
+		}
+		if diag := diags.Diagnostics[0]; isUnnecessary(diag) {
+			t.Errorf("%v tagged unnecessary, want necessary", diag)
+		}
+
+		// 'y = y' is pointless, and should be detected as unnecessary.
+		env.OpenFile("b/b.go")
+		env.Await(env.DiagnosticAtRegexp("b/b.go", "y = y"))
+		diags = env.DiagnosticsFor("b/b.go")
+		if got := len(diags.Diagnostics); got != 1 {
+			t.Errorf("len(Diagnostics) = %d, want 1", got)
+		}
+		if diag := diags.Diagnostics[0]; !isUnnecessary(diag) {
+			t.Errorf("%v tagged necessary, want unnecessary", diag)
+		}
+	})
+}
diff --git a/gopls/internal/regtest/misc/add_import_test.go b/gopls/internal/regtest/misc/add_import_test.go
deleted file mode 100644
index 8eb96cf..0000000
--- a/gopls/internal/regtest/misc/add_import_test.go
+++ /dev/null
@@ -1,59 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package misc
-
-import (
-	"testing"
-
-	"golang.org/x/tools/internal/lsp/command"
-	"golang.org/x/tools/internal/lsp/protocol"
-	. "golang.org/x/tools/internal/lsp/regtest"
-	"golang.org/x/tools/internal/lsp/tests"
-)
-
-func TestAddImport(t *testing.T) {
-	const before = `package main
-
-import "fmt"
-
-func main() {
-	fmt.Println("hello world")
-}
-`
-
-	const want = `package main
-
-import (
-	"bytes"
-	"fmt"
-)
-
-func main() {
-	fmt.Println("hello world")
-}
-`
-
-	Run(t, "", func(t *testing.T, env *Env) {
-		env.CreateBuffer("main.go", before)
-		cmd, err := command.NewAddImportCommand("Add Import", command.AddImportArgs{
-			URI:        protocol.URIFromSpanURI(env.Sandbox.Workdir.URI("main.go").SpanURI()),
-			ImportPath: "bytes",
-		})
-		if err != nil {
-			t.Fatal(err)
-		}
-		_, err = env.Editor.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{
-			Command:   "gopls.add_import",
-			Arguments: cmd.Arguments,
-		})
-		if err != nil {
-			t.Fatal(err)
-		}
-		got := env.Editor.BufferText("main.go")
-		if got != want {
-			t.Fatalf("gopls.add_import failed\n%s", tests.Diff(t, want, got))
-		}
-	})
-}
diff --git a/gopls/internal/regtest/misc/call_hierarchy_test.go b/gopls/internal/regtest/misc/call_hierarchy_test.go
new file mode 100644
index 0000000..9d98896
--- /dev/null
+++ b/gopls/internal/regtest/misc/call_hierarchy_test.go
@@ -0,0 +1,35 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+package misc
+
+import (
+	"testing"
+
+	"golang.org/x/tools/internal/lsp/protocol"
+	. "golang.org/x/tools/internal/lsp/regtest"
+)
+
+// Test for golang/go#49125
+func TestCallHierarchy_Issue49125(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- p.go --
+package pkg
+`
+	// TODO(rfindley): this could probably just be a marker test.
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("p.go")
+		pos := env.RegexpSearch("p.go", "pkg")
+
+		var params protocol.CallHierarchyPrepareParams
+		params.TextDocument.URI = env.Sandbox.Workdir.URI("p.go")
+		params.Position = pos.ToProtocolPosition()
+
+		// Check that this doesn't panic.
+		env.Editor.Server.PrepareCallHierarchy(env.Ctx, &params)
+	})
+}
diff --git a/gopls/internal/regtest/misc/definition_test.go b/gopls/internal/regtest/misc/definition_test.go
index e6181c7..2f5a548 100644
--- a/gopls/internal/regtest/misc/definition_test.go
+++ b/gopls/internal/regtest/misc/definition_test.go
@@ -9,7 +9,9 @@
 	"strings"
 	"testing"
 
+	"golang.org/x/tools/internal/lsp/protocol"
 	. "golang.org/x/tools/internal/lsp/regtest"
+	"golang.org/x/tools/internal/testenv"
 
 	"golang.org/x/tools/internal/lsp/fake"
 	"golang.org/x/tools/internal/lsp/tests"
@@ -234,3 +236,56 @@
 		})
 	}
 }
+
+// Test for golang/go#47825.
+func TestImportTestVariant(t *testing.T) {
+	testenv.NeedsGo1Point(t, 13)
+
+	const mod = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- client/test/role.go --
+package test
+
+import _ "mod.com/client"
+
+type RoleSetup struct{}
+-- client/client_role_test.go --
+package client_test
+
+import (
+	"testing"
+	_ "mod.com/client"
+	ctest "mod.com/client/test"
+)
+
+func TestClient(t *testing.T) {
+	_ = ctest.RoleSetup{}
+}
+-- client/client_test.go --
+package client
+
+import "testing"
+
+func TestClient(t *testing.T) {}
+-- client.go --
+package client
+`
+	Run(t, mod, func(t *testing.T, env *Env) {
+		env.OpenFile("client/client_role_test.go")
+		env.GoToDefinition("client/client_role_test.go", env.RegexpSearch("client/client_role_test.go", "RoleSetup"))
+	})
+}
+
+// This test exercises a crashing pattern from golang/go#49223.
+func TestGoToCrashingDefinition_Issue49223(t *testing.T) {
+	Run(t, "", func(t *testing.T, env *Env) {
+		params := &protocol.DefinitionParams{}
+		params.TextDocument.URI = protocol.DocumentURI("fugitive%3A///Users/user/src/mm/ems/.git//0/pkg/domain/treasury/provider.go")
+		params.Position.Character = 18
+		params.Position.Line = 0
+		env.Editor.Server.Definition(env.Ctx, params)
+	})
+}
diff --git a/gopls/internal/regtest/misc/fix_test.go b/gopls/internal/regtest/misc/fix_test.go
index 8c5662a..8318ae5 100644
--- a/gopls/internal/regtest/misc/fix_test.go
+++ b/gopls/internal/regtest/misc/fix_test.go
@@ -78,7 +78,8 @@
 		env.OpenFile("main.go")
 		var d protocol.PublishDiagnosticsParams
 		env.Await(OnceMet(
-			env.DiagnosticAtRegexpWithMessage("main.go", `return`, "wrong number of return values"),
+			// The error message here changed in 1.18; "return values" covers both forms.
+			env.DiagnosticAtRegexpWithMessage("main.go", `return`, "return values"),
 			ReadDiagnostics("main.go", &d),
 		))
 		codeActions := env.CodeAction("main.go", d.Diagnostics)
diff --git a/gopls/internal/regtest/misc/formatting_test.go b/gopls/internal/regtest/misc/formatting_test.go
index 1e14237..75d8f62 100644
--- a/gopls/internal/regtest/misc/formatting_test.go
+++ b/gopls/internal/regtest/misc/formatting_test.go
@@ -268,3 +268,102 @@
 		})
 	}
 }
+
+func TestFormattingOfGeneratedFile_Issue49555(t *testing.T) {
+	const input = `
+-- main.go --
+// Code generated by generator.go. DO NOT EDIT.
+
+package main
+
+import "fmt"
+
+func main() {
+
+
+
+
+	fmt.Print("hello")
+}
+`
+
+	Run(t, input, func(t *testing.T, env *Env) {
+		wantErrSuffix := "file is generated"
+
+		env.OpenFile("main.go")
+		err := env.Editor.FormatBuffer(env.Ctx, "main.go")
+		if err == nil {
+			t.Fatal("expected error, got nil")
+		}
+		// Check only the suffix because an error contains a dynamic path to main.go
+		if !strings.HasSuffix(err.Error(), wantErrSuffix) {
+			t.Fatalf("unexpected error %q, want suffix %q", err.Error(), wantErrSuffix)
+		}
+	})
+}
+
+func TestGofumptFormatting(t *testing.T) {
+
+	// Exercise some gofumpt formatting rules:
+	//  - No empty lines following an assignment operator
+	//  - Octal integer literals should use the 0o prefix on modules using Go
+	//    1.13 and later. Requires LangVersion to be correctly resolved.
+	//  - std imports must be in a separate group at the top. Requires ModulePath
+	//    to be correctly resolved.
+	const input = `
+-- go.mod --
+module foo
+
+go 1.17
+-- foo.go --
+package foo
+
+import (
+	"foo/bar"
+	"fmt"
+)
+
+const perm = 0755
+
+func foo() {
+	foo :=
+		"bar"
+	fmt.Println(foo, bar.Bar)
+}
+-- foo.go.formatted --
+package foo
+
+import (
+	"fmt"
+
+	"foo/bar"
+)
+
+const perm = 0o755
+
+func foo() {
+	foo := "bar"
+	fmt.Println(foo, bar.Bar)
+}
+-- bar/bar.go --
+package bar
+
+const Bar = 42
+`
+
+	WithOptions(
+		EditorConfig{
+			Settings: map[string]interface{}{
+				"gofumpt": true,
+			},
+		},
+	).Run(t, input, func(t *testing.T, env *Env) {
+		env.OpenFile("foo.go")
+		env.FormatBuffer("foo.go")
+		got := env.Editor.BufferText("foo.go")
+		want := env.ReadWorkspaceFile("foo.go.formatted")
+		if got != want {
+			t.Errorf("unexpected formatting result:\n%s", tests.Diff(t, want, got))
+		}
+	})
+}
diff --git a/gopls/internal/regtest/misc/generate_test.go b/gopls/internal/regtest/misc/generate_test.go
index 4478951..1dc22d7 100644
--- a/gopls/internal/regtest/misc/generate_test.go
+++ b/gopls/internal/regtest/misc/generate_test.go
@@ -16,6 +16,8 @@
 )
 
 func TestGenerateProgress(t *testing.T) {
+	t.Skipf("skipping flaky test: https://golang.org/issue/49901")
+
 	const generatedWorkspace = `
 -- go.mod --
 module fake.test
diff --git a/gopls/internal/regtest/misc/hover_test.go b/gopls/internal/regtest/misc/hover_test.go
index 1442178..04dc740 100644
--- a/gopls/internal/regtest/misc/hover_test.go
+++ b/gopls/internal/regtest/misc/hover_test.go
@@ -8,6 +8,7 @@
 	"strings"
 	"testing"
 
+	"golang.org/x/tools/internal/lsp/fake"
 	. "golang.org/x/tools/internal/lsp/regtest"
 	"golang.org/x/tools/internal/testenv"
 )
@@ -23,6 +24,7 @@
 package structs
 
 type Mixed struct {
+	// Exported comment
 	Exported   int
 	unexported string
 }
@@ -39,7 +41,7 @@
 
 require golang.org/x/structs v1.0.0
 -- go.sum --
-golang.org/x/structs v1.0.0 h1:3DlrFfd3OsEen7FnCHfqtnJvjBZ8ZFKmrD/+HjpdJj0=
+golang.org/x/structs v1.0.0 h1:Ito/a7hBYZaNKShFrZKjfBA/SIPvmBrcPCBWPx5QeKk=
 golang.org/x/structs v1.0.0/go.mod h1:47gkSIdo5AaQaWJS0upVORsxfEr1LL1MWv9dmYF3iq4=
 -- main.go --
 package main
@@ -47,9 +49,11 @@
 import "golang.org/x/structs"
 
 func main() {
-	var _ structs.Mixed
+	var m structs.Mixed
+	_ = m.Exported
 }
 `
+
 	// TODO: use a nested workspace folder here.
 	WithOptions(
 		ProxyFiles(proxy),
@@ -60,12 +64,19 @@
 		if !strings.Contains(got.Value, "unexported") {
 			t.Errorf("Workspace hover: missing expected field 'unexported'. Got:\n%q", got.Value)
 		}
+
 		cacheFile, _ := env.GoToDefinition("main.go", mixedPos)
 		argPos := env.RegexpSearch(cacheFile, "printMixed.*(Mixed)")
 		got, _ = env.Hover(cacheFile, argPos)
 		if !strings.Contains(got.Value, "unexported") {
 			t.Errorf("Non-workspace hover: missing expected field 'unexported'. Got:\n%q", got.Value)
 		}
+
+		exportedFieldPos := env.RegexpSearch("main.go", "Exported")
+		got, _ = env.Hover("main.go", exportedFieldPos)
+		if !strings.Contains(got.Value, "comment") {
+			t.Errorf("Workspace hover: missing comment for field 'Exported'. Got:\n%q", got.Value)
+		}
 	})
 }
 
@@ -99,3 +110,33 @@
 		}
 	})
 }
+
+// Tests that hovering does not trigger the panic in golang/go#48249.
+func TestPanicInHoverBrokenCode(t *testing.T) {
+	testenv.NeedsGo1Point(t, 13)
+	const source = `
+-- main.go --
+package main
+
+type Example struct`
+	Run(t, source, func(t *testing.T, env *Env) {
+		env.OpenFile("main.go")
+		env.Editor.Hover(env.Ctx, "main.go", env.RegexpSearch("main.go", "Example"))
+	})
+}
+
+func TestHoverRune_48492(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.18
+-- main.go --
+package main
+`
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("main.go")
+		env.EditBuffer("main.go", fake.NewEdit(0, 0, 1, 0, "package main\nfunc main() {\nconst x = `\nfoo\n`\n}"))
+		env.Editor.Hover(env.Ctx, "main.go", env.RegexpSearch("main.go", "foo"))
+	})
+}
diff --git a/gopls/internal/regtest/misc/import_test.go b/gopls/internal/regtest/misc/import_test.go
new file mode 100644
index 0000000..d5b6bcf
--- /dev/null
+++ b/gopls/internal/regtest/misc/import_test.go
@@ -0,0 +1,133 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package misc
+
+import (
+	"testing"
+
+	"github.com/google/go-cmp/cmp"
+	"golang.org/x/tools/internal/lsp/command"
+	"golang.org/x/tools/internal/lsp/protocol"
+	. "golang.org/x/tools/internal/lsp/regtest"
+	"golang.org/x/tools/internal/lsp/tests"
+)
+
+func TestAddImport(t *testing.T) {
+	const before = `package main
+
+import "fmt"
+
+func main() {
+	fmt.Println("hello world")
+}
+`
+
+	const want = `package main
+
+import (
+	"bytes"
+	"fmt"
+)
+
+func main() {
+	fmt.Println("hello world")
+}
+`
+
+	Run(t, "", func(t *testing.T, env *Env) {
+		env.CreateBuffer("main.go", before)
+		cmd, err := command.NewAddImportCommand("Add Import", command.AddImportArgs{
+			URI:        env.Sandbox.Workdir.URI("main.go"),
+			ImportPath: "bytes",
+		})
+		if err != nil {
+			t.Fatal(err)
+		}
+		env.ExecuteCommand(&protocol.ExecuteCommandParams{
+			Command:   "gopls.add_import",
+			Arguments: cmd.Arguments,
+		}, nil)
+		got := env.Editor.BufferText("main.go")
+		if got != want {
+			t.Fatalf("gopls.add_import failed\n%s", tests.Diff(t, want, got))
+		}
+	})
+}
+
+func TestListImports(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- foo.go --
+package foo
+const C = 1
+-- import_strings_test.go --
+package foo
+import (
+	x "strings"
+	"testing"
+)
+
+func TestFoo(t *testing.T) {}
+-- import_testing_test.go --
+package foo
+
+import "testing"
+
+func TestFoo2(t *testing.T) {}
+`
+	tests := []struct {
+		filename string
+		want     command.ListImportsResult
+	}{
+		{
+			filename: "import_strings_test.go",
+			want: command.ListImportsResult{
+				Imports: []command.FileImport{
+					{Name: "x", Path: "strings"},
+					{Path: "testing"},
+				},
+				PackageImports: []command.PackageImport{
+					{Path: "strings"},
+					{Path: "testing"},
+				},
+			},
+		},
+		{
+			filename: "import_testing_test.go",
+			want: command.ListImportsResult{
+				Imports: []command.FileImport{
+					{Path: "testing"},
+				},
+				PackageImports: []command.PackageImport{
+					{Path: "strings"},
+					{Path: "testing"},
+				},
+			},
+		},
+	}
+
+	Run(t, files, func(t *testing.T, env *Env) {
+		for _, tt := range tests {
+			cmd, err := command.NewListImportsCommand("List Imports", command.URIArg{
+				URI: env.Sandbox.Workdir.URI(tt.filename),
+			})
+			if err != nil {
+				t.Fatal(err)
+			}
+			var result command.ListImportsResult
+			env.ExecuteCommand(&protocol.ExecuteCommandParams{
+				Command:   command.ListImports.ID(),
+				Arguments: cmd.Arguments,
+			}, &result)
+			if diff := cmp.Diff(tt.want, result); diff != "" {
+				t.Errorf("unexpected list imports result for %q (-want +got):\n%s", tt.filename, diff)
+			}
+		}
+
+	})
+}
diff --git a/gopls/internal/regtest/misc/references_test.go b/gopls/internal/regtest/misc/references_test.go
index f3a23e4..7682516 100644
--- a/gopls/internal/regtest/misc/references_test.go
+++ b/gopls/internal/regtest/misc/references_test.go
@@ -42,3 +42,42 @@
 		}
 	})
 }
+
+// This reproduces and tests golang/go#48400.
+func TestReferencesPanicOnError(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- main.go --
+package main
+
+type t interface {
+	error
+}
+
+type s struct{}
+
+func (*s) Error() string {
+	return ""
+}
+
+func _() {
+	var s s
+	_ = s.Error()
+}
+`
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("main.go")
+		file, pos := env.GoToDefinition("main.go", env.RegexpSearch("main.go", `Error`))
+		refs, err := env.Editor.References(env.Ctx, file, pos)
+		if err == nil {
+			t.Fatalf("expected error for references, instead got %v", refs)
+		}
+		wantErr := "no position for func (error).Error() string"
+		if err.Error() != wantErr {
+			t.Fatalf("expected error with message %s, instead got %s", wantErr, err.Error())
+		}
+	})
+}
diff --git a/gopls/internal/regtest/misc/settings_test.go b/gopls/internal/regtest/misc/settings_test.go
new file mode 100644
index 0000000..7704c3c
--- /dev/null
+++ b/gopls/internal/regtest/misc/settings_test.go
@@ -0,0 +1,36 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package misc
+
+import (
+	"testing"
+
+	. "golang.org/x/tools/internal/lsp/regtest"
+)
+
+func TestEmptyDirectoryFilters_Issue51843(t *testing.T) {
+	const src = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- main.go --
+package main
+
+func main() {
+}
+`
+
+	WithOptions(
+		EditorConfig{
+			Settings: map[string]interface{}{
+				"directoryFilters": []string{""},
+			},
+		},
+	).Run(t, src, func(t *testing.T, env *Env) {
+		// No need to do anything. Issue golang/go#51843 is triggered by the empty
+		// directory filter above.
+	})
+}
diff --git a/gopls/internal/regtest/misc/template_test.go b/gopls/internal/regtest/misc/template_test.go
deleted file mode 100644
index 2bb61fb..0000000
--- a/gopls/internal/regtest/misc/template_test.go
+++ /dev/null
@@ -1,72 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package misc
-
-import (
-	"strings"
-	"testing"
-
-	"golang.org/x/tools/internal/lsp/protocol"
-	. "golang.org/x/tools/internal/lsp/regtest"
-)
-
-const filesA = `
--- go.mod --
-module mod.com
-
-go 1.12
--- b.gotmpl --
-{{define "A"}}goo{{end}}
--- a.tmpl --
-{{template "A"}}
-`
-
-func TestSuffixes(t *testing.T) {
-	WithOptions(
-		EditorConfig{
-			AllExperiments: true,
-		},
-	).Run(t, filesA, func(t *testing.T, env *Env) {
-		env.OpenFile("a.tmpl")
-		x := env.RegexpSearch("a.tmpl", `A`)
-		file, pos := env.GoToDefinition("a.tmpl", x)
-		refs := env.References(file, pos)
-		if len(refs) != 2 {
-			t.Fatalf("got %v reference(s), want 2", len(refs))
-		}
-		// make sure we got one from b.gotmpl
-		want := env.Sandbox.Workdir.URI("b.gotmpl")
-		if refs[0].URI != want && refs[1].URI != want {
-			t.Errorf("failed to find reference to %s", shorten(want))
-			for i, r := range refs {
-				t.Logf("%d: URI:%s %v", i, shorten(r.URI), r.Range)
-			}
-		}
-
-		content, npos := env.Hover(file, pos)
-		if pos != npos {
-			t.Errorf("pos? got %v, wanted %v", npos, pos)
-		}
-		if content.Value != "template A defined" {
-			t.Errorf("got %s, wanted 'template A defined", content.Value)
-		}
-	})
-}
-
-// shorten long URIs
-func shorten(fn protocol.DocumentURI) string {
-	if len(fn) <= 20 {
-		return string(fn)
-	}
-	pieces := strings.Split(string(fn), "/")
-	if len(pieces) < 2 {
-		return string(fn)
-	}
-	j := len(pieces)
-	return pieces[j-2] + "/" + pieces[j-1]
-}
-
-// Hover,  SemTok, Diagnose with errors
-// and better coverage
diff --git a/gopls/internal/regtest/misc/vendor_test.go b/gopls/internal/regtest/misc/vendor_test.go
index 4e02799..0e615f2 100644
--- a/gopls/internal/regtest/misc/vendor_test.go
+++ b/gopls/internal/regtest/misc/vendor_test.go
@@ -5,6 +5,7 @@
 package misc
 
 import (
+	"runtime"
 	"testing"
 
 	. "golang.org/x/tools/internal/lsp/regtest"
@@ -26,6 +27,9 @@
 
 func TestInconsistentVendoring(t *testing.T) {
 	testenv.NeedsGo1Point(t, 14)
+	if runtime.GOOS == "windows" {
+		t.Skipf("skipping test due to flakiness on Windows: https://golang.org/issue/49646")
+	}
 
 	const pkgThatUsesVendoring = `
 -- go.mod --
diff --git a/gopls/internal/regtest/misc/vuln_test.go b/gopls/internal/regtest/misc/vuln_test.go
new file mode 100644
index 0000000..94fde71
--- /dev/null
+++ b/gopls/internal/regtest/misc/vuln_test.go
@@ -0,0 +1,43 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package misc
+
+import (
+	"testing"
+
+	"golang.org/x/tools/internal/lsp/command"
+	"golang.org/x/tools/internal/lsp/protocol"
+	. "golang.org/x/tools/internal/lsp/regtest"
+)
+
+func TestRunVulncheckExpError(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- foo.go --
+package foo
+`
+	Run(t, files, func(t *testing.T, env *Env) {
+		cmd, err := command.NewRunVulncheckExpCommand("Run Vulncheck Exp", command.VulncheckArgs{
+			Dir: "/invalid/file/url", // invalid arg
+		})
+		if err != nil {
+			t.Fatal(err)
+		}
+
+		params := &protocol.ExecuteCommandParams{
+			Command:   command.RunVulncheckExp.ID(),
+			Arguments: cmd.Arguments,
+		}
+
+		response, err := env.Editor.ExecuteCommand(env.Ctx, params)
+		// We want an error!
+		if err == nil {
+			t.Errorf("got success, want invalid file URL error: %v", response)
+		}
+	})
+}
diff --git a/gopls/internal/regtest/misc/workspace_symbol_test.go b/gopls/internal/regtest/misc/workspace_symbol_test.go
new file mode 100644
index 0000000..a21d473
--- /dev/null
+++ b/gopls/internal/regtest/misc/workspace_symbol_test.go
@@ -0,0 +1,131 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package misc
+
+import (
+	"testing"
+
+	"golang.org/x/tools/internal/lsp/protocol"
+	. "golang.org/x/tools/internal/lsp/regtest"
+	"golang.org/x/tools/internal/lsp/source"
+	"golang.org/x/tools/internal/testenv"
+)
+
+func TestWorkspaceSymbolMissingMetadata(t *testing.T) {
+	// We get 2 symbols on 1.12, for some reason.
+	testenv.NeedsGo1Point(t, 13)
+
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.17
+-- a.go --
+package p
+
+const C1 = "a.go"
+-- ignore.go --
+
+// +build ignore
+
+package ignore
+
+const C2 = "ignore.go"
+`
+
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("a.go")
+		syms := env.WorkspaceSymbol("C")
+		if got, want := len(syms), 1; got != want {
+			t.Errorf("got %d symbols, want %d", got, want)
+		}
+
+		// Opening up an ignored file will result in an overlay with missing
+		// metadata, but this shouldn't break workspace symbols requests.
+		env.OpenFile("ignore.go")
+		syms = env.WorkspaceSymbol("C")
+		if got, want := len(syms), 1; got != want {
+			t.Errorf("got %d symbols, want %d", got, want)
+		}
+	})
+}
+
+func TestWorkspaceSymbolSorting(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.17
+-- a/a.go --
+package a
+
+const (
+	Foo = iota
+	FooBar
+	Fooey
+	Fooex
+	Fooest
+)
+`
+
+	var symbolMatcher = string(source.SymbolFastFuzzy)
+	WithOptions(
+		EditorConfig{
+			SymbolMatcher: &symbolMatcher,
+		},
+	).Run(t, files, func(t *testing.T, env *Env) {
+		want := []string{
+			"Foo",    // prefer exact segment matches first
+			"FooBar", // ...followed by exact word matches
+			"Fooex",  // shorter than Fooest, FooBar, lexically before Fooey
+			"Fooey",  // shorter than Fooest, Foobar
+			"Fooest",
+		}
+		got := env.WorkspaceSymbol("Foo")
+		compareSymbols(t, got, want)
+	})
+}
+
+func TestWorkspaceSymbolSpecialPatterns(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.17
+-- a/a.go --
+package a
+
+const (
+	AxxBxxCxx
+	ABC
+)
+`
+
+	var symbolMatcher = string(source.SymbolFastFuzzy)
+	WithOptions(
+		EditorConfig{
+			SymbolMatcher: &symbolMatcher,
+		},
+	).Run(t, files, func(t *testing.T, env *Env) {
+		compareSymbols(t, env.WorkspaceSymbol("ABC"), []string{"ABC", "AxxBxxCxx"})
+		compareSymbols(t, env.WorkspaceSymbol("'ABC"), []string{"ABC"})
+		compareSymbols(t, env.WorkspaceSymbol("^mod.com"), []string{"mod.com/a.ABC", "mod.com/a.AxxBxxCxx"})
+		compareSymbols(t, env.WorkspaceSymbol("^mod.com Axx"), []string{"mod.com/a.AxxBxxCxx"})
+		compareSymbols(t, env.WorkspaceSymbol("C$"), []string{"ABC"})
+	})
+}
+
+func compareSymbols(t *testing.T, got []protocol.SymbolInformation, want []string) {
+	t.Helper()
+	if len(got) != len(want) {
+		t.Errorf("got %d symbols, want %d", len(got), len(want))
+	}
+
+	for i := range got {
+		if got[i].Name != want[i] {
+			t.Errorf("got[%d] = %q, want %q", i, got[i].Name, want[i])
+		}
+	}
+}
diff --git a/gopls/internal/regtest/modfile/modfile_test.go b/gopls/internal/regtest/modfile/modfile_test.go
index 6c94bfb..868aa70 100644
--- a/gopls/internal/regtest/modfile/modfile_test.go
+++ b/gopls/internal/regtest/modfile/modfile_test.go
@@ -6,6 +6,7 @@
 
 import (
 	"path/filepath"
+	"runtime"
 	"strings"
 	"testing"
 
@@ -238,6 +239,62 @@
 	})
 }
 
+// Tests that multiple missing dependencies gives good single fixes.
+func TestMissingDependencyFixesWithGoWork(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+	const mod = `
+-- go.work --
+go 1.18
+
+use (
+	./a
+)
+-- a/go.mod --
+module mod.com
+
+go 1.12
+
+-- a/main.go --
+package main
+
+import "example.com/blah"
+import "random.org/blah"
+
+var _, _ = blah.Name, hello.Name
+`
+
+	const want = `module mod.com
+
+go 1.12
+
+require random.org v1.2.3
+`
+
+	RunMultiple{
+		{"default", WithOptions(ProxyFiles(proxy), WorkspaceFolders("a"))},
+		{"nested", WithOptions(ProxyFiles(proxy))},
+	}.Run(t, mod, func(t *testing.T, env *Env) {
+		env.OpenFile("a/main.go")
+		var d protocol.PublishDiagnosticsParams
+		env.Await(
+			OnceMet(
+				env.DiagnosticAtRegexp("a/main.go", `"random.org/blah"`),
+				ReadDiagnostics("a/main.go", &d),
+			),
+		)
+		var randomDiag protocol.Diagnostic
+		for _, diag := range d.Diagnostics {
+			if strings.Contains(diag.Message, "random.org") {
+				randomDiag = diag
+			}
+		}
+		env.ApplyQuickFixes("a/main.go", []protocol.Diagnostic{randomDiag})
+		if got := env.ReadWorkspaceFile("a/go.mod"); got != want {
+			t.Fatalf("unexpected go.mod content:\n%s", tests.Diff(t, want, got))
+		}
+	})
+}
+
 func TestIndirectDependencyFix(t *testing.T) {
 	testenv.NeedsGo1Point(t, 14)
 
@@ -513,6 +570,10 @@
 
 // Reproduces golang/go#38232.
 func TestUnknownRevision(t *testing.T) {
+	if runtime.GOOS == "plan9" {
+		t.Skipf("skipping test that fails for unknown reasons on plan9; see https://go.dev/issue/50477")
+	}
+
 	testenv.NeedsGo1Point(t, 14)
 
 	const unknown = `
@@ -1115,3 +1176,20 @@
 		)
 	})
 }
+
+func TestInvalidGoVersion(t *testing.T) {
+	testenv.NeedsGo1Point(t, 14) // Times out on 1.13 for reasons unclear. Not worth worrying about.
+	const files = `
+-- go.mod --
+module mod.com
+
+go foo
+-- main.go --
+package main
+`
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.Await(env.DiagnosticAtRegexpWithMessage("go.mod", `go foo`, "invalid go version"))
+		env.WriteWorkspaceFile("go.mod", "module mod.com \n\ngo 1.12\n")
+		env.Await(EmptyDiagnostics("go.mod"))
+	})
+}
diff --git a/gopls/internal/regtest/template/template_test.go b/gopls/internal/regtest/template/template_test.go
new file mode 100644
index 0000000..b0acdfe
--- /dev/null
+++ b/gopls/internal/regtest/template/template_test.go
@@ -0,0 +1,230 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+	"strings"
+	"testing"
+
+	"golang.org/x/tools/gopls/internal/hooks"
+	"golang.org/x/tools/internal/lsp/protocol"
+	. "golang.org/x/tools/internal/lsp/regtest"
+)
+
+func TestMain(m *testing.M) {
+	Main(m, hooks.Options)
+}
+
+func TestMultilineTokens(t *testing.T) {
+	// 51731: panic: runtime error: slice bounds out of range [38:3]
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.17
+-- hi.tmpl --
+{{if (foÜx .X.Y)}}πŸ˜€{{$A := 
+	"hi"
+	}}{{.Z $A}}{{else}}
+{{$A.X 12}}
+{{foo (.X.Y) 23 ($A.Z)}}
+{{end}}
+`
+	WithOptions(
+		EditorConfig{
+			Settings: map[string]interface{}{
+				"templateExtensions": []string{"tmpl"},
+				"semanticTokens":     true,
+			},
+		},
+	).Run(t, files, func(t *testing.T, env *Env) {
+		var p protocol.SemanticTokensParams
+		p.TextDocument.URI = env.Sandbox.Workdir.URI("hi.tmpl")
+		toks, err := env.Editor.Server.SemanticTokensFull(env.Ctx, &p)
+		if err != nil {
+			t.Errorf("semantic token failed: %v", err)
+		}
+		if toks == nil || len(toks.Data) == 0 {
+			t.Errorf("got no semantic tokens")
+		}
+	})
+}
+
+func TestTemplatesFromExtensions(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- hello.tmpl --
+{{range .Planets}}
+Hello {{}} <-- missing body
+{{end}}
+`
+	WithOptions(
+		EditorConfig{
+			Settings: map[string]interface{}{
+				"templateExtensions": []string{"tmpl"},
+				"semanticTokens":     true,
+			},
+		},
+	).Run(t, files, func(t *testing.T, env *Env) {
+		// TODO: can we move this diagnostic onto {{}}?
+		env.Await(env.DiagnosticAtRegexp("hello.tmpl", "()Hello {{}}"))
+		d := env.DiagnosticsFor("hello.tmpl").Diagnostics // issue 50786: check for Source
+		if len(d) != 1 {
+			t.Errorf("expected 1 diagnostic, got %d", len(d))
+			return
+		}
+		if d[0].Source != "template" {
+			t.Errorf("expected Source 'template', got %q", d[0].Source)
+		}
+		// issue 50801 (even broken templates could return some semantic tokens)
+		var p protocol.SemanticTokensParams
+		p.TextDocument.URI = env.Sandbox.Workdir.URI("hello.tmpl")
+		toks, err := env.Editor.Server.SemanticTokensFull(env.Ctx, &p)
+		if err != nil {
+			t.Errorf("semantic token failed: %v", err)
+		}
+		if toks == nil || len(toks.Data) == 0 {
+			t.Errorf("got no semantic tokens")
+		}
+
+		env.WriteWorkspaceFile("hello.tmpl", "{{range .Planets}}\nHello {{.}}\n{{end}}")
+		env.Await(EmptyDiagnostics("hello.tmpl"))
+	})
+}
+
+func TestTemplatesObserveDirectoryFilters(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- a/a.tmpl --
+A {{}} <-- missing body
+-- b/b.tmpl --
+B {{}} <-- missing body
+`
+
+	WithOptions(
+		EditorConfig{
+			Settings: map[string]interface{}{
+				"templateExtensions": []string{"tmpl"},
+			},
+			DirectoryFilters: []string{"-b"},
+		},
+	).Run(t, files, func(t *testing.T, env *Env) {
+		env.Await(
+			OnceMet(env.DiagnosticAtRegexp("a/a.tmpl", "()A")),
+			NoDiagnostics("b/b.tmpl"),
+		)
+	})
+}
+
+func TestTemplatesFromLangID(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+`
+
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.CreateBuffer("hello.tmpl", "")
+		env.Await(
+			OnceMet(
+				env.DoneWithOpen(),
+				NoDiagnostics("hello.tmpl"), // Don't get spurious errors for empty templates.
+			),
+		)
+		env.SetBufferContent("hello.tmpl", "{{range .Planets}}\nHello {{}}\n{{end}}")
+		env.Await(env.DiagnosticAtRegexp("hello.tmpl", "()Hello {{}}"))
+		env.RegexpReplace("hello.tmpl", "{{}}", "{{.}}")
+		env.Await(EmptyOrNoDiagnostics("hello.tmpl"))
+	})
+}
+
+func TestClosingTemplatesMakesDiagnosticsDisappear(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- hello.tmpl --
+{{range .Planets}}
+Hello {{}} <-- missing body
+{{end}}
+`
+
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("hello.tmpl")
+		env.Await(env.DiagnosticAtRegexp("hello.tmpl", "()Hello {{}}"))
+		// Since we don't have templateExtensions configured, closing hello.tmpl
+		// should make its diagnostics disappear.
+		env.CloseBuffer("hello.tmpl")
+		env.Await(EmptyDiagnostics("hello.tmpl"))
+	})
+}
+
+func TestMultipleSuffixes(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- b.gotmpl --
+{{define "A"}}goo{{end}}
+-- a.tmpl --
+{{template "A"}}
+`
+
+	WithOptions(
+		EditorConfig{
+			Settings: map[string]interface{}{
+				"templateExtensions": []string{"tmpl", "gotmpl"},
+			},
+		},
+	).Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("a.tmpl")
+		x := env.RegexpSearch("a.tmpl", `A`)
+		file, pos := env.GoToDefinition("a.tmpl", x)
+		refs := env.References(file, pos)
+		if len(refs) != 2 {
+			t.Fatalf("got %v reference(s), want 2", len(refs))
+		}
+		// make sure we got one from b.gotmpl
+		want := env.Sandbox.Workdir.URI("b.gotmpl")
+		if refs[0].URI != want && refs[1].URI != want {
+			t.Errorf("failed to find reference to %s", shorten(want))
+			for i, r := range refs {
+				t.Logf("%d: URI:%s %v", i, shorten(r.URI), r.Range)
+			}
+		}
+
+		content, npos := env.Hover(file, pos)
+		if pos != npos {
+			t.Errorf("pos? got %v, wanted %v", npos, pos)
+		}
+		if content.Value != "template A defined" {
+			t.Errorf("got %s, wanted 'template A defined", content.Value)
+		}
+	})
+}
+
+// shorten long URIs
+func shorten(fn protocol.DocumentURI) string {
+	if len(fn) <= 20 {
+		return string(fn)
+	}
+	pieces := strings.Split(string(fn), "/")
+	if len(pieces) < 2 {
+		return string(fn)
+	}
+	j := len(pieces)
+	return pieces[j-2] + "/" + pieces[j-1]
+}
+
+// Hover needs tests
diff --git a/gopls/internal/regtest/workspace/workspace_test.go b/gopls/internal/regtest/workspace/workspace_test.go
index 4c5d1fc..ed2c9ef 100644
--- a/gopls/internal/regtest/workspace/workspace_test.go
+++ b/gopls/internal/regtest/workspace/workspace_test.go
@@ -5,17 +5,16 @@
 package workspace
 
 import (
-	"encoding/json"
 	"fmt"
-	"io/ioutil"
 	"path/filepath"
+	"sort"
 	"strings"
 	"testing"
 
 	"golang.org/x/tools/gopls/internal/hooks"
 	. "golang.org/x/tools/internal/lsp/regtest"
+	"golang.org/x/tools/internal/lsp/source"
 
-	"golang.org/x/tools/internal/lsp/command"
 	"golang.org/x/tools/internal/lsp/fake"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/testenv"
@@ -137,6 +136,38 @@
 	}
 }
 
+// make sure that directory filters work
+func TestFilters(t *testing.T) {
+	for _, tt := range []struct {
+		name, rootPath string
+	}{
+		{
+			name:     "module root",
+			rootPath: "pkg",
+		},
+	} {
+		t.Run(tt.name, func(t *testing.T) {
+			opts := []RunOption{ProxyFiles(workspaceProxy)}
+			if tt.rootPath != "" {
+				opts = append(opts, WorkspaceFolders(tt.rootPath))
+			}
+			f := func(o *source.Options) {
+				o.DirectoryFilters = append(o.DirectoryFilters, "-inner")
+			}
+			opts = append(opts, Options(f))
+			WithOptions(opts...).Run(t, workspaceModule, func(t *testing.T, env *Env) {
+				syms := env.WorkspaceSymbol("Hi")
+				sort.Slice(syms, func(i, j int) bool { return syms[i].ContainerName < syms[j].ContainerName })
+				for i, s := range syms {
+					if strings.Contains(s.ContainerName, "/inner") {
+						t.Errorf("%s %v %s %s %d\n", s.Name, s.Kind, s.ContainerName, tt.name, i)
+					}
+				}
+			})
+		})
+	}
+}
+
 // Make sure that analysis diagnostics are cleared for the whole package when
 // the only opened file is closed. This test was inspired by the experience in
 // VS Code, where clicking on a reference result triggers a
@@ -160,6 +191,8 @@
 // This test checks that gopls updates the set of files it watches when a
 // replace target is added to the go.mod.
 func TestWatchReplaceTargets(t *testing.T) {
+	t.Skipf("skipping known-flaky test: see https://go.dev/issue/50748")
+
 	WithOptions(
 		ProxyFiles(workspaceProxy),
 		WorkspaceFolders("pkg"),
@@ -650,13 +683,12 @@
 -- go.work --
 go 1.17
 
-directory (
+use (
 	./moda/a
 )
 `
 	WithOptions(
 		ProxyFiles(workspaceModuleProxy),
-		Modes(Experimental),
 	).Run(t, multiModule, func(t *testing.T, env *Env) {
 		// Initially, the gopls.mod should cause only the a.com module to be
 		// loaded. Validate this by jumping to a definition in b.com and ensuring
@@ -684,7 +716,7 @@
 		env.WriteWorkspaceFile("go.work", `
 go 1.17
 
-directory (
+use (
 	./moda/a
 	./modb
 )
@@ -695,15 +727,20 @@
 		env.OpenFile("modb/go.mod")
 		env.Await(env.DoneWithOpen())
 
-		var d protocol.PublishDiagnosticsParams
-		env.Await(
-			OnceMet(
-				env.DiagnosticAtRegexpWithMessage("modb/go.mod", `require example.com v1.2.3`, "has not been downloaded"),
-				ReadDiagnostics("modb/go.mod", &d),
-			),
-		)
-		env.ApplyQuickFixes("modb/go.mod", d.Diagnostics)
-		env.Await(env.DiagnosticAtRegexp("modb/b/b.go", "x"))
+		//  TODO(golang/go#50862): the go command drops error messages when using
+		//  go.work, so we need to build our go.mod diagnostics in a different way.
+		if testenv.Go1Point() < 18 {
+			var d protocol.PublishDiagnosticsParams
+			env.Await(
+				OnceMet(
+					env.DiagnosticAtRegexpWithMessage("modb/go.mod", `require example.com v1.2.3`, "has not been downloaded"),
+					ReadDiagnostics("modb/go.mod", &d),
+				),
+			)
+			env.ApplyQuickFixes("modb/go.mod", d.Diagnostics)
+			env.Await(env.DiagnosticAtRegexp("modb/b/b.go", "x"))
+		}
+
 		// Jumping to definition should now go to b.com in the workspace.
 		if err := checkHelloLocation("modb/b/b.go"); err != nil {
 			t.Fatal(err)
@@ -715,7 +752,7 @@
 		env.Await(env.DoneWithOpen())
 		env.SetBufferContent("go.work", `go 1.17
 
-directory (
+use (
 	./moda/a
 )`)
 
@@ -723,7 +760,7 @@
 		// should clear outstanding diagnostics...
 		env.Await(OnceMet(
 			env.DoneWithChange(),
-			EmptyDiagnostics("modb/go.mod"),
+			EmptyOrNoDiagnostics("modb/go.mod"),
 		))
 		// ...but does not yet cause a workspace reload, so we should still jump to modb.
 		if err := checkHelloLocation("modb/b/b.go"); err != nil {
@@ -734,6 +771,161 @@
 		if err := checkHelloLocation("b.com@v1.2.3/b/b.go"); err != nil {
 			t.Fatal(err)
 		}
+
+		// Test Formatting.
+		env.SetBufferContent("go.work", `go 1.18
+  use      (
+
+
+
+		./moda/a
+)
+`) // TODO(matloob): For some reason there's a "start position 7:0 is out of bounds" error when the ")" is on the last character/line in the file. Rob probably knows what's going on.
+		env.SaveBuffer("go.work")
+		env.Await(env.DoneWithSave())
+		gotWorkContents := env.ReadWorkspaceFile("go.work")
+		wantWorkContents := `go 1.18
+
+use (
+	./moda/a
+)
+`
+		if gotWorkContents != wantWorkContents {
+			t.Fatalf("formatted contents of workspace: got %q; want %q", gotWorkContents, wantWorkContents)
+		}
+	})
+}
+
+func TestUseGoWorkDiagnosticMissingModule(t *testing.T) {
+	const files = `
+-- go.work --
+go 1.18
+
+use ./foo
+-- bar/go.mod --
+module example.com/bar
+`
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("go.work")
+		env.Await(
+			env.DiagnosticAtRegexpWithMessage("go.work", "use", "directory ./foo does not contain a module"),
+		)
+		// The following tests is a regression test against an issue where we weren't
+		// copying the workFile struct field on workspace when a new one was created in
+		// (*workspace).invalidate. Set the buffer content to a working file so that
+		// invalidate recognizes the workspace to be change and copies over the workspace
+		// struct, and then set the content back to the old contents to make sure
+		// the diagnostic still shows up.
+		env.SetBufferContent("go.work", "go 1.18 \n\n use ./bar\n")
+		env.Await(
+			env.NoDiagnosticAtRegexp("go.work", "use"),
+		)
+		env.SetBufferContent("go.work", "go 1.18 \n\n use ./foo\n")
+		env.Await(
+			env.DiagnosticAtRegexpWithMessage("go.work", "use", "directory ./foo does not contain a module"),
+		)
+	})
+}
+
+func TestUseGoWorkDiagnosticSyntaxError(t *testing.T) {
+	const files = `
+-- go.work --
+go 1.18
+
+usa ./foo
+replace
+`
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("go.work")
+		env.Await(
+			env.DiagnosticAtRegexpWithMessage("go.work", "usa", "unknown directive: usa"),
+			env.DiagnosticAtRegexpWithMessage("go.work", "replace", "usage: replace"),
+		)
+	})
+}
+
+func TestUseGoWorkHover(t *testing.T) {
+	const files = `
+-- go.work --
+go 1.18
+
+use ./foo
+use (
+	./bar
+	./bar/baz
+)
+-- foo/go.mod --
+module example.com/foo
+-- bar/go.mod --
+module example.com/bar
+-- bar/baz/go.mod --
+module example.com/bar/baz
+`
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("go.work")
+
+		tcs := map[string]string{
+			`\./foo`:      "example.com/foo",
+			`(?m)\./bar$`: "example.com/bar",
+			`\./bar/baz`:  "example.com/bar/baz",
+		}
+
+		for hoverRE, want := range tcs {
+			pos := env.RegexpSearch("go.work", hoverRE)
+			got, _ := env.Hover("go.work", pos)
+			if got.Value != want {
+				t.Errorf(`hover on %q: got %q, want %q`, hoverRE, got, want)
+			}
+		}
+	})
+}
+
+func TestExpandToGoWork(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+	const workspace = `
+-- moda/a/go.mod --
+module a.com
+
+require b.com v1.2.3
+-- moda/a/a.go --
+package a
+
+import (
+	"b.com/b"
+)
+
+func main() {
+	var x int
+	_ = b.Hello()
+}
+-- modb/go.mod --
+module b.com
+
+require example.com v1.2.3
+-- modb/b/b.go --
+package b
+
+func Hello() int {
+	var x int
+}
+-- go.work --
+go 1.17
+
+use (
+	./moda/a
+	./modb
+)
+`
+	WithOptions(
+		WorkspaceFolders("moda/a"),
+	).Run(t, workspace, func(t *testing.T, env *Env) {
+		env.OpenFile("moda/a/a.go")
+		env.Await(env.DoneWithOpen())
+		location, _ := env.GoToDefinition("moda/a/a.go", env.RegexpSearch("moda/a/a.go", "Hello"))
+		want := "modb/b/b.go"
+		if !strings.HasSuffix(location, want) {
+			t.Errorf("expected %s, got %v", want, location)
+		}
 	})
 }
 
@@ -817,77 +1009,6 @@
 	})
 }
 
-func TestWorkspaceDirAccess(t *testing.T) {
-	const multiModule = `
--- moda/a/go.mod --
-module a.com
-
--- moda/a/a.go --
-package main
-
-func main() {
-	fmt.Println("Hello")
-}
--- modb/go.mod --
-module b.com
--- modb/b/b.go --
-package main
-
-func main() {
-	fmt.Println("World")
-}
-`
-	WithOptions(
-		Modes(Experimental),
-		SendPID(),
-	).Run(t, multiModule, func(t *testing.T, env *Env) {
-		params := &protocol.ExecuteCommandParams{
-			Command:   command.WorkspaceMetadata.ID(),
-			Arguments: []json.RawMessage{json.RawMessage("{}")},
-		}
-		var result command.WorkspaceMetadataResult
-		env.ExecuteCommand(params, &result)
-
-		if n := len(result.Workspaces); n != 1 {
-			env.T.Fatalf("got %d workspaces, want 1", n)
-		}
-		// Don't factor this out of Server.addFolders. vscode-go expects this
-		// directory.
-		modPath := filepath.Join(result.Workspaces[0].ModuleDir, "go.mod")
-		gotb, err := ioutil.ReadFile(modPath)
-		if err != nil {
-			t.Fatalf("reading expected workspace modfile: %v", err)
-		}
-		got := string(gotb)
-		for _, want := range []string{"a.com v1.9999999.0-goplsworkspace", "b.com v1.9999999.0-goplsworkspace"} {
-			if !strings.Contains(got, want) {
-				// want before got here, since the go.mod is multi-line
-				t.Fatalf("workspace go.mod missing %q. got:\n%s", want, got)
-			}
-		}
-		workdir := env.Sandbox.Workdir.RootURI().SpanURI().Filename()
-		env.WriteWorkspaceFile("gopls.mod", fmt.Sprintf(`
-				module gopls-workspace
-
-				require (
-					a.com v1.9999999.0-goplsworkspace
-				)
-
-				replace a.com => %s/moda/a
-				`, workdir))
-		env.Await(env.DoneWithChangeWatchedFiles())
-		gotb, err = ioutil.ReadFile(modPath)
-		if err != nil {
-			t.Fatalf("reading expected workspace modfile: %v", err)
-		}
-		got = string(gotb)
-		want := "b.com v1.9999999.0-goplsworkspace"
-		if strings.Contains(got, want) {
-			t.Fatalf("workspace go.mod contains unexpected %q. got:\n%s", want, got)
-		}
-	})
-}
-
 func TestDirectoryFiltersLoads(t *testing.T) {
 	// exclude, and its error, should be excluded from the workspace.
 	const files = `
@@ -1081,3 +1202,42 @@
 		)
 	})
 }
+
+func TestAddGoWork(t *testing.T) {
+	const nomod = `
+-- a/go.mod --
+module a.com
+
+go 1.16
+-- a/main.go --
+package main
+
+func main() {}
+-- b/go.mod --
+module b.com
+
+go 1.16
+-- b/main.go --
+package main
+
+func main() {}
+`
+	WithOptions(
+		Modes(Singleton),
+	).Run(t, nomod, func(t *testing.T, env *Env) {
+		env.OpenFile("a/main.go")
+		env.OpenFile("b/main.go")
+		env.Await(
+			DiagnosticAt("a/main.go", 0, 0),
+			DiagnosticAt("b/main.go", 0, 0),
+		)
+		env.WriteWorkspaceFile("go.work", `go 1.16
+
+use (
+	a
+	b
+)
+`)
+		env.Await(NoOutstandingDiagnostics())
+	})
+}
diff --git a/gopls/internal/vulncheck/cache.go b/gopls/internal/vulncheck/cache.go
new file mode 100644
index 0000000..524ccfa
--- /dev/null
+++ b/gopls/internal/vulncheck/cache.go
@@ -0,0 +1,124 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package vulncheck
+
+import (
+	"encoding/json"
+	"go/build"
+	"io/ioutil"
+	"os"
+	"path/filepath"
+	"time"
+
+	"golang.org/x/vuln/client"
+	"golang.org/x/vuln/osv"
+)
+
+// copy from x/vuln/cmd/govulncheck/cache.go
+
+// NOTE: this cache implementation should be kept internal to the go tooling
+// (i.e. cmd/go/internal/something) so that the vulndb cache is owned by the
+// go command. Also it is currently NOT CONCURRENCY SAFE since it does not
+// implement file locking. If ported to the stdlib it should use
+// cmd/go/internal/lockedfile.
+
+// The cache uses a single JSON index file for each vulnerability database
+// which contains the map from packages to the time the last
+// vulnerability for that package was added/modified and the time that
+// the index was retrieved from the vulnerability database. The JSON
+// format is as follows:
+//
+// $GOPATH/pkg/mod/cache/download/vulndb/{db hostname}/indexes/index.json
+//   {
+//       Retrieved time.Time
+//       Index client.DBIndex
+//   }
+//
+// Each package also has a JSON file which contains the array of vulnerability
+// entries for the package. The JSON format is as follows:
+//
+// $GOPATH/pkg/mod/cache/download/vulndb/{db hostname}/{import path}/vulns.json
+//   []*osv.Entry
+
+// fsCache is file-system cache implementing osv.Cache
+// TODO: make cache thread-safe
+type fsCache struct {
+	rootDir string
+}
+
+// use cfg.GOMODCACHE available in cmd/go/internal?
+var defaultCacheRoot = filepath.Join(build.Default.GOPATH, "/pkg/mod/cache/download/vulndb")
+
+func defaultCache() *fsCache {
+	return &fsCache{rootDir: defaultCacheRoot}
+}
+
+type cachedIndex struct {
+	Retrieved time.Time
+	Index     client.DBIndex
+}
+
+func (c *fsCache) ReadIndex(dbName string) (client.DBIndex, time.Time, error) {
+	b, err := ioutil.ReadFile(filepath.Join(c.rootDir, dbName, "index.json"))
+	if err != nil {
+		if os.IsNotExist(err) {
+			return nil, time.Time{}, nil
+		}
+		return nil, time.Time{}, err
+	}
+	var index cachedIndex
+	if err := json.Unmarshal(b, &index); err != nil {
+		return nil, time.Time{}, err
+	}
+	return index.Index, index.Retrieved, nil
+}
+
+func (c *fsCache) WriteIndex(dbName string, index client.DBIndex, retrieved time.Time) error {
+	path := filepath.Join(c.rootDir, dbName)
+	if err := os.MkdirAll(path, 0755); err != nil {
+		return err
+	}
+	j, err := json.Marshal(cachedIndex{
+		Index:     index,
+		Retrieved: retrieved,
+	})
+	if err != nil {
+		return err
+	}
+	if err := ioutil.WriteFile(filepath.Join(path, "index.json"), j, 0666); err != nil {
+		return err
+	}
+	return nil
+}
+
+func (c *fsCache) ReadEntries(dbName string, p string) ([]*osv.Entry, error) {
+	b, err := ioutil.ReadFile(filepath.Join(c.rootDir, dbName, p, "vulns.json"))
+	if err != nil {
+		if os.IsNotExist(err) {
+			return nil, nil
+		}
+		return nil, err
+	}
+	var entries []*osv.Entry
+	if err := json.Unmarshal(b, &entries); err != nil {
+		return nil, err
+	}
+	return entries, nil
+}
+
+func (c *fsCache) WriteEntries(dbName string, p string, entries []*osv.Entry) error {
+	path := filepath.Join(c.rootDir, dbName, p)
+	if err := os.MkdirAll(path, 0777); err != nil {
+		return err
+	}
+	j, err := json.Marshal(entries)
+	if err != nil {
+		return err
+	}
+	if err := ioutil.WriteFile(filepath.Join(path, "vulns.json"), j, 0666); err != nil {
+		return err
+	}
+	return nil
+}
diff --git a/gopls/internal/vulncheck/command.go b/gopls/internal/vulncheck/command.go
new file mode 100644
index 0000000..32b98ae
--- /dev/null
+++ b/gopls/internal/vulncheck/command.go
@@ -0,0 +1,139 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package vulncheck
+
+import (
+	"context"
+	"fmt"
+	"os"
+	"strings"
+
+	"golang.org/x/tools/go/packages"
+	"golang.org/x/tools/internal/lsp/command"
+	"golang.org/x/vuln/client"
+	"golang.org/x/vuln/vulncheck"
+)
+
+func init() {
+	Govulncheck = govulncheck
+}
+
+func govulncheck(ctx context.Context, cfg *packages.Config, args command.VulncheckArgs) (res command.VulncheckResult, _ error) {
+	if args.Pattern == "" {
+		args.Pattern = "."
+	}
+
+	dbClient, err := client.NewClient(findGOVULNDB(cfg), client.Options{HTTPCache: defaultCache()})
+	if err != nil {
+		return res, err
+	}
+
+	c := cmd{Client: dbClient}
+	vulns, err := c.Run(ctx, cfg, args.Pattern)
+	if err != nil {
+		return res, err
+	}
+
+	res.Vuln = vulns
+	return res, err
+}
+
+func findGOVULNDB(cfg *packages.Config) []string {
+	for _, kv := range cfg.Env {
+		if strings.HasPrefix(kv, "GOVULNDB=") {
+			return strings.Split(kv[len("GOVULNDB="):], ",")
+		}
+	}
+	if GOVULNDB := os.Getenv("GOVULNDB"); GOVULNDB != "" {
+		return strings.Split(GOVULNDB, ",")
+	}
+	return []string{"https://storage.googleapis.com/go-vulndb"}
+}
+
+type Vuln = command.Vuln
+type CallStack = command.CallStack
+type StackEntry = command.StackEntry
+
+// cmd is an in-process govulncheck command runner
+// that uses the provided client.Client.
+type cmd struct {
+	Client client.Client
+}
+
+// Run runs the govulncheck after loading packages using the provided packages.Config.
+func (c *cmd) Run(ctx context.Context, cfg *packages.Config, patterns ...string) (_ []Vuln, err error) {
+	// TODO: how&where can we ensure cfg is the right config for the given patterns?
+
+	// vulncheck.Source may panic if the packages are incomplete. (e.g. broken code or failed dependency fetch)
+	defer func() {
+		if r := recover(); r != nil {
+			err = fmt.Errorf("cannot run vulncheck: %v", r)
+		}
+	}()
+	return c.run(ctx, cfg, patterns)
+}
+
+func (c *cmd) run(ctx context.Context, packagesCfg *packages.Config, patterns []string) ([]Vuln, error) {
+	packagesCfg.Mode |= packages.NeedModule | packages.NeedName | packages.NeedFiles |
+		packages.NeedCompiledGoFiles | packages.NeedImports | packages.NeedTypes |
+		packages.NeedTypesSizes | packages.NeedSyntax | packages.NeedTypesInfo | packages.NeedDeps
+
+	loadedPkgs, err := packages.Load(packagesCfg, patterns...)
+	if err != nil {
+		return nil, err
+	}
+	pkgs := vulncheck.Convert(loadedPkgs)
+	res, err := vulncheck.Source(ctx, pkgs, &vulncheck.Config{
+		Client:      c.Client,
+		ImportsOnly: false,
+	})
+	cs := vulncheck.CallStacks(res)
+
+	return toVulns(loadedPkgs, cs)
+
+	// TODO: add import graphs.
+}
+
+func packageModule(p *packages.Package) *packages.Module {
+	m := p.Module
+	if m == nil {
+		return nil
+	}
+	if r := m.Replace; r != nil {
+		return r
+	}
+	return m
+}
+
+func toVulns(pkgs []*packages.Package, callstacks map[*vulncheck.Vuln][]vulncheck.CallStack) ([]Vuln, error) {
+	// Build a map from module paths to versions.
+	moduleVersions := map[string]string{}
+	packages.Visit(pkgs, nil, func(p *packages.Package) {
+		if m := packageModule(p); m != nil {
+			moduleVersions[m.Path] = m.Version
+		}
+	})
+
+	var vulns []Vuln
+	for v, trace := range callstacks {
+		vuln := Vuln{
+			ID:             v.OSV.ID,
+			Details:        v.OSV.Details,
+			Aliases:        v.OSV.Aliases,
+			Symbol:         v.Symbol,
+			PkgPath:        v.PkgPath,
+			ModPath:        v.ModPath,
+			URL:            href(v.OSV),
+			CurrentVersion: moduleVersions[v.ModPath],
+			FixedVersion:   fixedVersion(v.OSV),
+			CallStacks:     toCallStacks(trace),
+		}
+		vulns = append(vulns, vuln)
+	}
+	return vulns, nil
+}
diff --git a/gopls/internal/vulncheck/command_test.go b/gopls/internal/vulncheck/command_test.go
new file mode 100644
index 0000000..93fd9b9
--- /dev/null
+++ b/gopls/internal/vulncheck/command_test.go
@@ -0,0 +1,378 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package vulncheck
+
+import (
+	"bytes"
+	"context"
+	"fmt"
+	"os"
+	"path/filepath"
+	"sort"
+	"testing"
+
+	"github.com/google/go-cmp/cmp"
+	"github.com/google/go-cmp/cmp/cmpopts"
+	"golang.org/x/tools/go/packages"
+	"golang.org/x/tools/internal/lsp/cache"
+	"golang.org/x/tools/internal/lsp/fake"
+	"golang.org/x/tools/internal/lsp/source"
+	"golang.org/x/tools/internal/lsp/tests"
+	"golang.org/x/vuln/client"
+	"golang.org/x/vuln/osv"
+	"golang.org/x/vuln/vulncheck"
+)
+
+func TestCmd_Run(t *testing.T) {
+	runTest(t, workspace1, proxy1, func(ctx context.Context, snapshot source.Snapshot) {
+		cmd := &cmd{Client: testClient1}
+		cfg := packagesCfg(ctx, snapshot)
+		result, err := cmd.Run(ctx, cfg, "./...")
+		if err != nil {
+			t.Fatal(err)
+		}
+		// Check that we find the right number of vulnerabilities.
+		// There should be three entries as there are three vulnerable
+		// symbols in the two import-reachable OSVs.
+		var got []report
+		for _, v := range result {
+			got = append(got, toReport(v))
+		}
+
+		var want = []report{
+			{
+				Vuln: Vuln{
+					ID:             "GO-2022-01",
+					Symbol:         "VulnData.Vuln1",
+					PkgPath:        "golang.org/amod/avuln",
+					ModPath:        "golang.org/amod",
+					URL:            "https://pkg.go.dev/vuln/GO-2022-01",
+					CurrentVersion: "v1.1.3",
+					FixedVersion:   "v1.0.4",
+				},
+				CallStacksStr: []string{
+					"golang.org/cmod/c.I.t0 called from golang.org/entry/x.X [approx.] (x.go:8)\n" +
+						"golang.org/amod/avuln.VulnData.Vuln1 (avuln.go:3)\n",
+				},
+			},
+			{
+				Vuln: Vuln{
+					ID:             "GO-2022-01",
+					Symbol:         "VulnData.Vuln2",
+					PkgPath:        "golang.org/amod/avuln",
+					ModPath:        "golang.org/amod",
+					URL:            "https://pkg.go.dev/vuln/GO-2022-01",
+					CurrentVersion: "v1.1.3",
+					FixedVersion:   "v1.0.4",
+				},
+				CallStacksStr: []string{
+					"C1 called from golang.org/entry/x.X (x.go:8)\n" +
+						"Vuln2 called from golang.org/cmod/c.C1 (c.go:13)\n" +
+						"golang.org/amod/avuln.VulnData.Vuln2 (avuln.go:4)\n",
+				},
+			},
+			{
+				Vuln: Vuln{
+					ID:             "GO-2022-02",
+					Symbol:         "Vuln",
+					PkgPath:        "golang.org/bmod/bvuln",
+					ModPath:        "golang.org/bmod",
+					URL:            "https://pkg.go.dev/vuln/GO-2022-02",
+					CurrentVersion: "v0.5.0",
+				},
+				CallStacksStr: []string{
+					"t0 called from golang.org/entry/y.Y [approx.] (y.go:5)\n" +
+						"golang.org/bmod/bvuln.Vuln (bvuln.go:2)\n",
+					"Y called from golang.org/entry/x.CallY (x.go:12)\n" +
+						"t0 called from golang.org/entry/y.Y [approx.] (y.go:5)\n" +
+						"golang.org/bmod/bvuln.Vuln (bvuln.go:2)\n",
+				},
+			},
+		}
+		// sort reports for stability before comparison.
+		for _, rpts := range [][]report{got, want} {
+			sort.Slice(rpts, func(i, j int) bool {
+				a, b := got[i], got[j]
+				if b.ID != b.ID {
+					return a.ID < b.ID
+				}
+				if a.PkgPath != b.PkgPath {
+					return a.PkgPath < b.PkgPath
+				}
+				return a.Symbol < b.Symbol
+			})
+		}
+		if diff := cmp.Diff(want, got, cmpopts.IgnoreFields(report{}, "Vuln.CallStacks")); diff != "" {
+			t.Error(diff)
+		}
+
+	})
+}
+
+type report struct {
+	Vuln
+	// Trace is stringified Vuln.CallStacks
+	CallStacksStr []string
+}
+
+func toReport(v Vuln) report {
+	var r = report{Vuln: v}
+	for _, s := range v.CallStacks {
+		r.CallStacksStr = append(r.CallStacksStr, CallStackString(s))
+	}
+	return r
+}
+
+func CallStackString(callstack CallStack) string {
+	var b bytes.Buffer
+	for _, entry := range callstack {
+		fname := filepath.Base(entry.URI.SpanURI().Filename())
+		fmt.Fprintf(&b, "%v (%v:%d)\n", entry.Name, fname, entry.Pos.Line)
+	}
+	return b.String()
+}
+
+const workspace1 = `
+-- go.mod --
+module golang.org/entry
+
+require (
+	golang.org/cmod v1.1.3
+)
+go 1.18
+-- x/x.go --
+package x
+
+import 	(
+   "golang.org/cmod/c"
+   "golang.org/entry/y"
+)
+
+func X() {
+	c.C1().Vuln1() // vuln use: X -> Vuln1
+}
+
+func CallY() {
+	y.Y()  // vuln use: CallY -> y.Y -> bvuln.Vuln 
+}
+
+-- y/y.go --
+package y
+
+import "golang.org/cmod/c"
+
+func Y() {
+	c.C2()() // vuln use: Y -> bvuln.Vuln
+}
+`
+
+const proxy1 = `
+-- golang.org/cmod@v1.1.3/go.mod --
+module golang.org/cmod
+
+go 1.12
+-- golang.org/cmod@v1.1.3/c/c.go --
+package c
+
+import (
+	"golang.org/amod/avuln"
+	"golang.org/bmod/bvuln"
+)
+
+type I interface {
+	Vuln1()
+}
+
+func C1() I {
+	v := avuln.VulnData{}
+	v.Vuln2() // vuln use
+	return v
+}
+
+func C2() func() {
+	return bvuln.Vuln
+}
+-- golang.org/amod@v1.1.3/go.mod --
+module golang.org/amod
+
+go 1.14
+-- golang.org/amod@v1.1.3/avuln/avuln.go --
+package avuln
+
+type VulnData struct {}
+func (v VulnData) Vuln1() {}
+func (v VulnData) Vuln2() {}
+-- golang.org/bmod@v0.5.0/go.mod --
+module golang.org/bmod
+
+go 1.14
+-- golang.org/bmod@v0.5.0/bvuln/bvuln.go --
+package bvuln
+
+func Vuln() {
+	// something evil
+}
+`
+
+// testClient contains the following test vulnerabilities
+//   golang.org/amod/avuln.{VulnData.Vuln1, vulnData.Vuln2}
+//   golang.org/bmod/bvuln.{Vuln}
+var testClient1 = &mockClient{
+	ret: map[string][]*osv.Entry{
+		"golang.org/amod": {
+			{
+				ID: "GO-2022-01",
+				References: []osv.Reference{
+					{
+						Type: "href",
+						URL:  "pkg.go.dev/vuln/GO-2022-01",
+					},
+				},
+				Affected: []osv.Affected{{
+					Package:           osv.Package{Name: "golang.org/amod/avuln"},
+					Ranges:            osv.Affects{{Type: osv.TypeSemver, Events: []osv.RangeEvent{{Introduced: "1.0.0"}, {Fixed: "1.0.4"}, {Introduced: "1.1.2"}}}},
+					EcosystemSpecific: osv.EcosystemSpecific{Symbols: []string{"VulnData.Vuln1", "VulnData.Vuln2"}},
+				}},
+			},
+		},
+		"golang.org/bmod": {
+			{
+				ID: "GO-2022-02",
+				Affected: []osv.Affected{{
+					Package:           osv.Package{Name: "golang.org/bmod/bvuln"},
+					Ranges:            osv.Affects{{Type: osv.TypeSemver}},
+					EcosystemSpecific: osv.EcosystemSpecific{Symbols: []string{"Vuln"}},
+				}},
+			},
+		},
+	},
+}
+
+var goldenReport1 = []string{`
+{
+	ID: "GO-2022-01",
+	Symbol: "VulnData.Vuln1",
+	PkgPath: "golang.org/amod/avuln",
+	ModPath: "golang.org/amod",
+	URL: "https://pkg.go.dev/vuln/GO-2022-01",
+	CurrentVersion "v1.1.3",
+	FixedVersion "v1.0.4",
+	"call_stacks": [
+	 "golang.org/cmod/c.I.t0 called from golang.org/entry/x.X [approx.] (x.go:8)\ngolang.org/amod/avuln.VulnData.Vuln1 (avuln.go:3)\n\n"
+	]
+}
+`,
+	`
+{
+	"id": "GO-2022-02",
+	"symbol": "Vuln",
+	"pkg_path": "golang.org/bmod/bvuln",
+	"mod_path": "golang.org/bmod",
+	"url": "https://pkg.go.dev/vuln/GO-2022-02",
+	"current_version": "v0.5.0",
+	"call_stacks": [
+	 "t0 called from golang.org/entry/y.Y [approx.] (y.go:5)\ngolang.org/bmod/bvuln.Vuln (bvuln.go:2)\n\n",
+	 "Y called from golang.org/entry/x.CallY (x.go:12)\nt0 called from golang.org/entry/y.Y [approx.] (y.go:5)\ngolang.org/bmod/bvuln.Vuln (bvuln.go:2)\n\n"
+	]
+}
+`,
+	`
+{
+	"id": "GO-2022-01",
+	"symbol": "VulnData.Vuln2",
+	"pkg_path": "golang.org/amod/avuln",
+	"mod_path": "golang.org/amod",
+	"url": "https://pkg.go.dev/vuln/GO-2022-01",
+	"current_version": "v1.1.3",
+	FixedVersion: "v1.0.4",
+	"call_stacks": [
+	 "C1 called from golang.org/entry/x.X (x.go:8)\nVuln2 called from golang.org/cmod/c.C1 (c.go:13)\ngolang.org/amod/avuln.VulnData.Vuln2 (avuln.go:4)\n\n"
+	]
+}
+`,
+}
+
+type mockClient struct {
+	client.Client
+	ret map[string][]*osv.Entry
+}
+
+func (mc *mockClient) GetByModule(ctx context.Context, a string) ([]*osv.Entry, error) {
+	return mc.ret[a], nil
+}
+
+func runTest(t *testing.T, workspaceData, proxyData string, test func(context.Context, source.Snapshot)) {
+	ws, err := fake.NewSandbox(&fake.SandboxConfig{
+		Files:      fake.UnpackTxt(workspaceData),
+		ProxyFiles: fake.UnpackTxt(proxyData),
+	})
+	if err != nil {
+		t.Fatal(err)
+	}
+	defer ws.Close()
+
+	ctx := tests.Context(t)
+
+	// get the module cache populated and the go.sum file at the root auto-generated.
+	dir := ws.Workdir.RootURI().SpanURI().Filename()
+	if err := ws.RunGoCommand(ctx, dir, "list", []string{"-mod=mod", "..."}, true); err != nil {
+		t.Fatal(err)
+	}
+
+	cache := cache.New(nil)
+	session := cache.NewSession(ctx)
+	options := source.DefaultOptions().Clone()
+	tests.DefaultOptions(options)
+	session.SetOptions(options)
+	envs := []string{}
+	for k, v := range ws.GoEnv() {
+		envs = append(envs, k+"="+v)
+	}
+	options.SetEnvSlice(envs)
+	name := ws.RootDir()
+	folder := ws.Workdir.RootURI().SpanURI()
+	view, snapshot, release, err := session.NewView(ctx, name, folder, options)
+	if err != nil {
+		t.Fatal(err)
+	}
+	defer release()
+	defer view.Shutdown(ctx)
+
+	test(ctx, snapshot)
+}
+
+func sortStrs(s []string) []string {
+	sort.Strings(s)
+	return s
+}
+
+func pkgPaths(pkgs []*vulncheck.Package) []string {
+	var r []string
+	for _, p := range pkgs {
+		r = append(r, p.PkgPath)
+	}
+	return sortStrs(r)
+}
+
+// TODO: expose this as a method of Snapshot.
+func packagesCfg(ctx context.Context, snapshot source.Snapshot) *packages.Config {
+	view := snapshot.View()
+	viewBuildFlags := view.Options().BuildFlags
+	var viewEnv []string
+	if e := view.Options().EnvSlice(); e != nil {
+		viewEnv = append(os.Environ(), e...)
+	}
+	return &packages.Config{
+		// Mode will be set by cmd.Run.
+		Context:    ctx,
+		Tests:      true,
+		BuildFlags: viewBuildFlags,
+		Env:        viewEnv,
+		Dir:        view.Folder().Filename(),
+	}
+}
diff --git a/gopls/internal/vulncheck/util.go b/gopls/internal/vulncheck/util.go
new file mode 100644
index 0000000..a85b55b
--- /dev/null
+++ b/gopls/internal/vulncheck/util.go
@@ -0,0 +1,130 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package vulncheck
+
+import (
+	"fmt"
+	"go/token"
+	"strings"
+
+	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/vuln/osv"
+	"golang.org/x/vuln/vulncheck"
+)
+
+// fixedVersion returns the semantic version of the module
+// version with a fix. The semantic version is
+// as defined by SemVer 2.0.0, with no leading “v” prefix.
+// Returns an empty string if there is no reported fix.
+func fixedVersion(info *osv.Entry) string {
+	var fixed string
+	for _, a := range info.Affected {
+		for _, r := range a.Ranges {
+			if r.Type != "SEMVER" {
+				continue
+			}
+			for _, e := range r.Events {
+				if e.Fixed != "" {
+					// assuming the later entry has higher semver.
+					// TODO: check assumption.
+					fixed = "v" + e.Fixed
+				}
+			}
+		}
+	}
+	return fixed
+}
+
+const maxNumCallStacks = 64
+
+func toCallStacks(src []vulncheck.CallStack) []CallStack {
+	if len(src) > maxNumCallStacks {
+		src = src[:maxNumCallStacks]
+	}
+	var dest []CallStack
+	for _, s := range src {
+		dest = append(dest, toCallStack(s))
+	}
+	return dest
+}
+
+func toCallStack(src vulncheck.CallStack) CallStack {
+	var dest []StackEntry
+	for _, e := range src {
+		dest = append(dest, toStackEntry(e))
+	}
+	return dest
+}
+
+func toStackEntry(src vulncheck.StackEntry) StackEntry {
+	f, call := src.Function, src.Call
+	pos := f.Pos
+	desc := funcName(f)
+	if src.Call != nil {
+		pos = src.Call.Pos
+		desc = funcNameInCallSite(call) + " called from " + desc
+		if !call.Resolved {
+			// In case of a statically unresolved call site, communicate to the client
+			// that this was approximately resolved to f
+
+			desc += " [approx.]"
+		}
+	}
+	return StackEntry{
+		Name: desc,
+		URI:  filenameToURI(pos),
+		Pos:  posToPosition(pos),
+	}
+}
+
+func funcName(fn *vulncheck.FuncNode) string {
+	return strings.TrimPrefix(fn.String(), "*")
+}
+
+func funcNameInCallSite(call *vulncheck.CallSite) string {
+	if call.RecvType == "" {
+		return call.Name
+	}
+	return fmt.Sprintf("%s.%s", call.RecvType, call.Name)
+}
+
+// href returns a URL embedded in the entry if any.
+// If no suitable URL is found, it returns a default entry in
+// pkg.go.dev/vuln.
+func href(vuln *osv.Entry) string {
+	for _, affected := range vuln.Affected {
+		if url := affected.DatabaseSpecific.URL; url != "" {
+			return url
+		}
+	}
+	for _, r := range vuln.References {
+		if r.Type == "WEB" {
+			return r.URL
+		}
+	}
+	return fmt.Sprintf("https://pkg.go.dev/vuln/%s", vuln.ID)
+}
+
+func filenameToURI(pos *token.Position) protocol.DocumentURI {
+	if pos == nil || pos.Filename == "" {
+		return ""
+	}
+	return protocol.URIFromPath(pos.Filename)
+}
+
+func posToPosition(pos *token.Position) (p protocol.Position) {
+	// token.Position.Line starts from 1, and
+	// LSP protocol's position line is 0-based.
+	if pos != nil {
+		p.Line = uint32(pos.Line - 1)
+		// TODO(hyangah): LSP uses UTF16 column.
+		// We need utility like span.ToUTF16Column,
+		// but somthing that does not require file contents.
+	}
+	return p
+}
diff --git a/gopls/internal/vulncheck/vulncheck.go b/gopls/internal/vulncheck/vulncheck.go
new file mode 100644
index 0000000..2c4d0d2
--- /dev/null
+++ b/gopls/internal/vulncheck/vulncheck.go
@@ -0,0 +1,23 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package vulncheck provides an analysis command
+// that runs vulnerability analysis using data from
+// golang.org/x/vuln/vulncheck.
+// This package requires go1.18 or newer.
+package vulncheck
+
+import (
+	"context"
+	"errors"
+
+	"golang.org/x/tools/go/packages"
+	"golang.org/x/tools/internal/lsp/command"
+)
+
+// Govulncheck runs the in-process govulncheck implementation.
+// With go1.18+, this is swapped with the real implementation.
+var Govulncheck = func(ctx context.Context, cfg *packages.Config, args command.VulncheckArgs) (res command.VulncheckResult, _ error) {
+	return res, errors.New("not implemented")
+}
diff --git a/gopls/main.go b/gopls/main.go
index 2e099e7..f73eabf 100644
--- a/gopls/main.go
+++ b/gopls/main.go
@@ -13,6 +13,7 @@
 
 import (
 	"context"
+	"golang.org/x/tools/internal/analysisinternal"
 	"os"
 
 	"golang.org/x/tools/gopls/internal/hooks"
@@ -21,6 +22,10 @@
 )
 
 func main() {
+	// In 1.18, diagnostics for Fuzz tests must not be used by cmd/vet.
+	// So the code for Fuzz tests diagnostics is guarded behind flag analysisinternal.DiagnoseFuzzTests
+	// Turn on analysisinternal.DiagnoseFuzzTests for gopls
+	analysisinternal.DiagnoseFuzzTests = true
 	ctx := context.Background()
 	tool.Main(ctx, cmd.New("gopls", "", nil, hooks.Options), os.Args[1:])
 }
diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go
index 01f6e82..78ee2c0 100644
--- a/internal/analysisinternal/analysis.go
+++ b/internal/analysisinternal/analysis.go
@@ -17,6 +17,9 @@
 	"golang.org/x/tools/internal/lsp/fuzzy"
 )
 
+// Flag to gate diagnostics for fuzz tests in 1.18.
+var DiagnoseFuzzTests bool = false
+
 var (
 	GetTypeErrors func(p interface{}) []types.Error
 	SetTypeErrors func(p interface{}, errors []types.Error)
diff --git a/internal/gocommand/invoke.go b/internal/gocommand/invoke.go
index 8659a0c..f753368 100644
--- a/internal/gocommand/invoke.go
+++ b/internal/gocommand/invoke.go
@@ -9,7 +9,6 @@
 	"bytes"
 	"context"
 	"fmt"
-	exec "golang.org/x/sys/execabs"
 	"io"
 	"os"
 	"regexp"
@@ -18,6 +17,8 @@
 	"sync"
 	"time"
 
+	exec "golang.org/x/sys/execabs"
+
 	"golang.org/x/tools/internal/event"
 )
 
@@ -131,9 +132,16 @@
 	Verb       string
 	Args       []string
 	BuildFlags []string
-	ModFlag    string
-	ModFile    string
-	Overlay    string
+
+	// If ModFlag is set, the go command is invoked with -mod=ModFlag.
+	ModFlag string
+
+	// If ModFile is set, the go command is invoked with -modfile=ModFile.
+	ModFile string
+
+	// If Overlay is set, the go command is invoked with -overlay=Overlay.
+	Overlay string
+
 	// If CleanEnv is set, the invocation will run only with the environment
 	// in Env, not starting with os.Environ.
 	CleanEnv   bool
diff --git a/internal/gocommand/vendor.go b/internal/gocommand/vendor.go
index 5e75bd6..2d3d408 100644
--- a/internal/gocommand/vendor.go
+++ b/internal/gocommand/vendor.go
@@ -38,10 +38,10 @@
 // with the supplied context.Context and Invocation. The Invocation can contain pre-defined fields,
 // of which only Verb and Args are modified to run the appropriate Go command.
 // Inspired by setDefaultBuildMod in modload/init.go
-func VendorEnabled(ctx context.Context, inv Invocation, r *Runner) (*ModuleJSON, bool, error) {
+func VendorEnabled(ctx context.Context, inv Invocation, r *Runner) (bool, *ModuleJSON, error) {
 	mainMod, go114, err := getMainModuleAnd114(ctx, inv, r)
 	if err != nil {
-		return nil, false, err
+		return false, nil, err
 	}
 
 	// We check the GOFLAGS to see if there is anything overridden or not.
@@ -49,7 +49,7 @@
 	inv.Args = []string{"GOFLAGS"}
 	stdout, err := r.Run(ctx, inv)
 	if err != nil {
-		return nil, false, err
+		return false, nil, err
 	}
 	goflags := string(bytes.TrimSpace(stdout.Bytes()))
 	matches := modFlagRegexp.FindStringSubmatch(goflags)
@@ -57,25 +57,27 @@
 	if len(matches) != 0 {
 		modFlag = matches[1]
 	}
-	if modFlag != "" {
-		// Don't override an explicit '-mod=' argument.
-		return mainMod, modFlag == "vendor", nil
+	// Don't override an explicit '-mod=' argument.
+	if modFlag == "vendor" {
+		return true, mainMod, nil
+	} else if modFlag != "" {
+		return false, nil, nil
 	}
 	if mainMod == nil || !go114 {
-		return mainMod, false, nil
+		return false, nil, nil
 	}
 	// Check 1.14's automatic vendor mode.
 	if fi, err := os.Stat(filepath.Join(mainMod.Dir, "vendor")); err == nil && fi.IsDir() {
 		if mainMod.GoVersion != "" && semver.Compare("v"+mainMod.GoVersion, "v1.14") >= 0 {
 			// The Go version is at least 1.14, and a vendor directory exists.
 			// Set -mod=vendor by default.
-			return mainMod, true, nil
+			return true, mainMod, nil
 		}
 	}
-	return mainMod, false, nil
+	return false, nil, nil
 }
 
-// getMainModuleAnd114 gets the main module's information and whether the
+// getMainModuleAnd114 gets one of the main modules' information and whether the
 // go command in use is 1.14+. This is the information needed to figure out
 // if vendoring should be enabled.
 func getMainModuleAnd114(ctx context.Context, inv Invocation, r *Runner) (*ModuleJSON, bool, error) {
diff --git a/internal/imports/fix_test.go b/internal/imports/fix_test.go
index bfd3cfa..ef0f8ae 100644
--- a/internal/imports/fix_test.go
+++ b/internal/imports/fix_test.go
@@ -659,6 +659,37 @@
 `,
 	},
 
+	// Blank line can be added even when first import of group has comment with quote
+	{
+		name: "new_section_where_trailing_comment_has_quote",
+		in: `package main
+
+import (
+	"context"
+	bar "local.com/bar"
+	baz "local.com/baz"
+	buzz "local.com/buzz"
+	"github.com/golang/snappy" // this is a "typical" import
+)
+
+var _, _, _, _, _ = context.Background, bar.B, baz.B, buzz.B, snappy.ErrCorrupt
+`,
+		out: `package main
+
+import (
+	"context"
+
+	"github.com/golang/snappy" // this is a "typical" import
+
+	bar "local.com/bar"
+	baz "local.com/baz"
+	buzz "local.com/buzz"
+)
+
+var _, _, _, _, _ = context.Background, bar.B, baz.B, buzz.B, snappy.ErrCorrupt
+`,
+	},
+
 	// Non-idempotent comment formatting
 	// golang.org/issue/8035
 	{
diff --git a/internal/imports/imports.go b/internal/imports/imports.go
index 2815edc..2597398 100644
--- a/internal/imports/imports.go
+++ b/internal/imports/imports.go
@@ -306,7 +306,7 @@
 	return b.Bytes()
 }
 
-var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+)"`)
+var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+?)"`)
 
 func addImportSpaces(r io.Reader, breaks []string) ([]byte, error) {
 	var out bytes.Buffer
diff --git a/internal/imports/mkstdlib.go b/internal/imports/mkstdlib.go
index 8eb4e12..47714bf 100644
--- a/internal/imports/mkstdlib.go
+++ b/internal/imports/mkstdlib.go
@@ -70,6 +70,7 @@
 		mustOpen(api("go1.15.txt")),
 		mustOpen(api("go1.16.txt")),
 		mustOpen(api("go1.17.txt")),
+		mustOpen(api("go1.18.txt")),
 
 		// The API of the syscall/js package needs to be computed explicitly,
 		// because it's not included in the GOROOT/api/go1.*.txt files at this time.
diff --git a/internal/imports/mod.go b/internal/imports/mod.go
index dff6d55..2bcf41f 100644
--- a/internal/imports/mod.go
+++ b/internal/imports/mod.go
@@ -34,7 +34,8 @@
 	scannedRoots   map[gopathwalk.Root]bool
 
 	initialized   bool
-	main          *gocommand.ModuleJSON
+	mains         []*gocommand.ModuleJSON
+	mainByDir     map[string]*gocommand.ModuleJSON
 	modsByModPath []*gocommand.ModuleJSON // All modules, ordered by # of path components in module Path...
 	modsByDir     []*gocommand.ModuleJSON // ...or Dir.
 
@@ -69,21 +70,21 @@
 		Logf:       r.env.Logf,
 		WorkingDir: r.env.WorkingDir,
 	}
-	mainMod, vendorEnabled, err := gocommand.VendorEnabled(context.TODO(), inv, r.env.GocmdRunner)
+	vendorEnabled, mainModVendor, err := gocommand.VendorEnabled(context.TODO(), inv, r.env.GocmdRunner)
 	if err != nil {
 		return err
 	}
 
-	if mainMod != nil && vendorEnabled {
+	if mainModVendor != nil && vendorEnabled {
 		// Vendor mode is on, so all the non-Main modules are irrelevant,
 		// and we need to search /vendor for everything.
-		r.main = mainMod
+		r.mains = []*gocommand.ModuleJSON{mainModVendor}
 		r.dummyVendorMod = &gocommand.ModuleJSON{
 			Path: "",
-			Dir:  filepath.Join(mainMod.Dir, "vendor"),
+			Dir:  filepath.Join(mainModVendor.Dir, "vendor"),
 		}
-		r.modsByModPath = []*gocommand.ModuleJSON{mainMod, r.dummyVendorMod}
-		r.modsByDir = []*gocommand.ModuleJSON{mainMod, r.dummyVendorMod}
+		r.modsByModPath = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
+		r.modsByDir = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
 	} else {
 		// Vendor mode is off, so run go list -m ... to find everything.
 		err := r.initAllMods()
@@ -122,8 +123,10 @@
 	r.roots = []gopathwalk.Root{
 		{filepath.Join(goenv["GOROOT"], "/src"), gopathwalk.RootGOROOT},
 	}
-	if r.main != nil {
-		r.roots = append(r.roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule})
+	r.mainByDir = make(map[string]*gocommand.ModuleJSON)
+	for _, main := range r.mains {
+		r.roots = append(r.roots, gopathwalk.Root{main.Dir, gopathwalk.RootCurrentModule})
+		r.mainByDir[main.Dir] = main
 	}
 	if vendorEnabled {
 		r.roots = append(r.roots, gopathwalk.Root{r.dummyVendorMod.Dir, gopathwalk.RootOther})
@@ -189,7 +192,7 @@
 		r.modsByModPath = append(r.modsByModPath, mod)
 		r.modsByDir = append(r.modsByDir, mod)
 		if mod.Main {
-			r.main = mod
+			r.mains = append(r.mains, mod)
 		}
 	}
 	return nil
@@ -609,7 +612,7 @@
 	}
 	switch root.Type {
 	case gopathwalk.RootCurrentModule:
-		importPath = path.Join(r.main.Path, filepath.ToSlash(subdir))
+		importPath = path.Join(r.mainByDir[root.Path].Path, filepath.ToSlash(subdir))
 	case gopathwalk.RootModuleCache:
 		matches := modCacheRegexp.FindStringSubmatch(subdir)
 		if len(matches) == 0 {
diff --git a/internal/imports/mod_test.go b/internal/imports/mod_test.go
index 51bc967..5f71805 100644
--- a/internal/imports/mod_test.go
+++ b/internal/imports/mod_test.go
@@ -552,6 +552,336 @@
 	mt.assertModuleFoundInDir("example.com/vv", "v", `main/v12$`)
 }
 
+// Tests that go.work files are respected.
+func TestModWorkspace(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+
+	mt := setup(t, `
+-- go.work --
+go 1.18
+
+use (
+	./a
+	./b
+)
+-- a/go.mod --
+module example.com/a
+
+go 1.18
+-- a/a.go --
+package a
+-- b/go.mod --
+module example.com/b
+
+go 1.18
+-- b/b.go --
+package b
+`, "")
+	defer mt.cleanup()
+
+	mt.assertModuleFoundInDir("example.com/a", "a", `main/a$`)
+	mt.assertModuleFoundInDir("example.com/b", "b", `main/b$`)
+	mt.assertScanFinds("example.com/a", "a")
+	mt.assertScanFinds("example.com/b", "b")
+}
+
+// Tests replaces in workspaces. Uses the directory layout in the cmd/go
+// work_replace test. It tests both that replaces in go.work files are
+// respected and that a wildcard replace in go.work overrides a versioned replace
+// in go.mod.
+func TestModWorkspaceReplace(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+
+	mt := setup(t, `
+-- go.work --
+use m
+
+replace example.com/dep => ./dep
+replace example.com/other => ./other2
+
+-- m/go.mod --
+module example.com/m
+
+require example.com/dep v1.0.0
+require example.com/other v1.0.0
+
+replace example.com/other v1.0.0 => ./other
+-- m/m.go --
+package m
+
+import "example.com/dep"
+import "example.com/other"
+
+func F() {
+	dep.G()
+	other.H()
+}
+-- dep/go.mod --
+module example.com/dep
+-- dep/dep.go --
+package dep
+
+func G() {
+}
+-- other/go.mod --
+module example.com/other
+-- other/dep.go --
+package other
+
+func G() {
+}
+-- other2/go.mod --
+module example.com/other
+-- other2/dep.go --
+package other2
+
+func G() {
+}
+`, "")
+	defer mt.cleanup()
+
+	mt.assertScanFinds("example.com/m", "m")
+	mt.assertScanFinds("example.com/dep", "dep")
+	mt.assertModuleFoundInDir("example.com/other", "other2", "main/other2$")
+	mt.assertScanFinds("example.com/other", "other2")
+}
+
+// Tests a case where conflicting replaces are overridden by a replace
+// in the go.work file.
+func TestModWorkspaceReplaceOverride(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+
+	mt := setup(t, `-- go.work --
+use m
+use n
+replace example.com/dep => ./dep3
+-- m/go.mod --
+module example.com/m
+
+require example.com/dep v1.0.0
+replace example.com/dep => ./dep1
+-- m/m.go --
+package m
+
+import "example.com/dep"
+
+func F() {
+	dep.G()
+}
+-- n/go.mod --
+module example.com/n
+
+require example.com/dep v1.0.0
+replace example.com/dep => ./dep2
+-- n/n.go --
+package n
+
+import "example.com/dep"
+
+func F() {
+	dep.G()
+}
+-- dep1/go.mod --
+module example.com/dep
+-- dep1/dep.go --
+package dep
+
+func G() {
+}
+-- dep2/go.mod --
+module example.com/dep
+-- dep2/dep.go --
+package dep
+
+func G() {
+}
+-- dep3/go.mod --
+module example.com/dep
+-- dep3/dep.go --
+package dep
+
+func G() {
+}
+`, "")
+
+	mt.assertScanFinds("example.com/m", "m")
+	mt.assertScanFinds("example.com/n", "n")
+	mt.assertScanFinds("example.com/dep", "dep")
+	mt.assertModuleFoundInDir("example.com/dep", "dep", "main/dep3$")
+}
+
+// Tests that the correct versions of modules are found in
+// workspaces with module pruning. This is based on the
+// cmd/go mod_prune_all script test.
+func TestModWorkspacePrune(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+
+	mt := setup(t, `
+-- go.work --
+go 1.18
+
+use (
+	./a
+	./p
+)
+
+replace example.com/b v1.0.0 => ./b
+replace example.com/q v1.0.0 => ./q1_0_0
+replace example.com/q v1.0.5 => ./q1_0_5
+replace example.com/q v1.1.0 => ./q1_1_0
+replace example.com/r v1.0.0 => ./r
+replace example.com/w v1.0.0 => ./w
+replace example.com/x v1.0.0 => ./x
+replace example.com/y v1.0.0 => ./y
+replace example.com/z v1.0.0 => ./z1_0_0
+replace example.com/z v1.1.0 => ./z1_1_0
+
+-- a/go.mod --
+module example.com/a
+
+go 1.18
+
+require example.com/b v1.0.0
+require example.com/z v1.0.0
+-- a/foo.go --
+package main
+
+import "example.com/b"
+
+func main() {
+	b.B()
+}
+-- b/go.mod --
+module example.com/b
+
+go 1.18
+
+require example.com/q v1.1.0
+-- b/b.go --
+package b
+
+func B() {
+}
+-- p/go.mod --
+module example.com/p
+
+go 1.18
+
+require example.com/q v1.0.0
+
+replace example.com/q v1.0.0 => ../q1_0_0
+replace example.com/q v1.1.0 => ../q1_1_0
+-- p/main.go --
+package main
+
+import "example.com/q"
+
+func main() {
+	q.PrintVersion()
+}
+-- q1_0_0/go.mod --
+module example.com/q
+
+go 1.18
+-- q1_0_0/q.go --
+package q
+
+import "fmt"
+
+func PrintVersion() {
+	fmt.Println("version 1.0.0")
+}
+-- q1_0_5/go.mod --
+module example.com/q
+
+go 1.18
+
+require example.com/r v1.0.0
+-- q1_0_5/q.go --
+package q
+
+import _ "example.com/r"
+-- q1_1_0/go.mod --
+module example.com/q
+
+require example.com/w v1.0.0
+require example.com/z v1.1.0
+
+go 1.18
+-- q1_1_0/q.go --
+package q
+
+import _ "example.com/w"
+import _ "example.com/z"
+
+import "fmt"
+
+func PrintVersion() {
+	fmt.Println("version 1.1.0")
+}
+-- r/go.mod --
+module example.com/r
+
+go 1.18
+
+require example.com/r v1.0.0
+-- r/r.go --
+package r
+-- w/go.mod --
+module example.com/w
+
+go 1.18
+
+require example.com/x v1.0.0
+-- w/w.go --
+package w
+-- w/w_test.go --
+package w
+
+import _ "example.com/x"
+-- x/go.mod --
+module example.com/x
+
+go 1.18
+-- x/x.go --
+package x
+-- x/x_test.go --
+package x
+import _ "example.com/y"
+-- y/go.mod --
+module example.com/y
+
+go 1.18
+-- y/y.go --
+package y
+-- z1_0_0/go.mod --
+module example.com/z
+
+go 1.18
+
+require example.com/q v1.0.5
+-- z1_0_0/z.go --
+package z
+
+import _ "example.com/q"
+-- z1_1_0/go.mod --
+module example.com/z
+
+go 1.18
+-- z1_1_0/z.go --
+package z
+`, "")
+
+	mt.assertScanFinds("example.com/w", "w")
+	mt.assertScanFinds("example.com/q", "q")
+	mt.assertScanFinds("example.com/x", "x")
+	mt.assertScanFinds("example.com/z", "z")
+	mt.assertModuleFoundInDir("example.com/w", "w", "main/w$")
+	mt.assertModuleFoundInDir("example.com/q", "q", "main/q1_1_0$")
+	mt.assertModuleFoundInDir("example.com/x", "x", "main/x$")
+	mt.assertModuleFoundInDir("example.com/z", "z", "main/z1_1_0$")
+}
+
 // Tests that we handle GO111MODULE=on with no go.mod file. See #30855.
 func TestNoMainModule(t *testing.T) {
 	testenv.NeedsGo1Point(t, 12)
diff --git a/internal/imports/sortimports.go b/internal/imports/sortimports.go
index be8ffa2..dc52372 100644
--- a/internal/imports/sortimports.go
+++ b/internal/imports/sortimports.go
@@ -9,6 +9,7 @@
 import (
 	"go/ast"
 	"go/token"
+	"log"
 	"sort"
 	"strconv"
 )
@@ -60,6 +61,7 @@
 
 // mergeImports merges all the import declarations into the first one.
 // Taken from golang.org/x/tools/ast/astutil.
+// This does not adjust line numbers properly
 func mergeImports(fset *token.FileSet, f *ast.File) {
 	if len(f.Decls) <= 1 {
 		return
@@ -237,8 +239,17 @@
 		p := s.Pos()
 		line := fset.File(p).Line(p)
 		for previousLine := line - 1; previousLine >= firstSpecLine; {
-			fset.File(p).MergeLine(previousLine)
-			previousLine--
+			// MergeLine can panic. Avoid the panic at the cost of not removing the blank line
+			// golang/go#50329
+			if previousLine > 0 && previousLine < fset.File(p).LineCount() {
+				fset.File(p).MergeLine(previousLine)
+				previousLine--
+			} else {
+				// try to gather some data to diagnose how this could happen
+				req := "Please report what the imports section of your go file looked like."
+				log.Printf("panic avoided: first:%d line:%d previous:%d max:%d. %s",
+					firstSpecLine, line, previousLine, fset.File(p).LineCount(), req)
+			}
 		}
 	}
 	return specs
diff --git a/internal/imports/zstdlib.go b/internal/imports/zstdlib.go
index 7de2be9..437fbb7 100644
--- a/internal/imports/zstdlib.go
+++ b/internal/imports/zstdlib.go
@@ -88,6 +88,7 @@
 		"ContainsAny",
 		"ContainsRune",
 		"Count",
+		"Cut",
 		"Equal",
 		"EqualFold",
 		"ErrTooLarge",
@@ -711,6 +712,11 @@
 		"ValueConverter",
 		"Valuer",
 	},
+	"debug/buildinfo": []string{
+		"BuildInfo",
+		"Read",
+		"ReadFile",
+	},
 	"debug/dwarf": []string{
 		"AddrType",
 		"ArrayType",
@@ -1944,6 +1950,7 @@
 		"R_PPC64_REL24_NOTOC",
 		"R_PPC64_REL32",
 		"R_PPC64_REL64",
+		"R_PPC64_RELATIVE",
 		"R_PPC64_SECTOFF_DS",
 		"R_PPC64_SECTOFF_LO_DS",
 		"R_PPC64_TLS",
@@ -2547,6 +2554,7 @@
 		"Symbol",
 	},
 	"debug/plan9obj": []string{
+		"ErrNoSymbols",
 		"File",
 		"FileHeader",
 		"Magic386",
@@ -2906,6 +2914,7 @@
 		"Importer",
 		"IncDecStmt",
 		"IndexExpr",
+		"IndexListExpr",
 		"Inspect",
 		"InterfaceType",
 		"IsExported",
@@ -3179,6 +3188,7 @@
 		"SUB",
 		"SUB_ASSIGN",
 		"SWITCH",
+		"TILDE",
 		"TYPE",
 		"Token",
 		"UnaryPrec",
@@ -3187,6 +3197,7 @@
 		"XOR_ASSIGN",
 	},
 	"go/types": []string{
+		"ArgumentError",
 		"Array",
 		"AssertableTo",
 		"AssignableTo",
@@ -3205,6 +3216,7 @@
 		"Complex64",
 		"Config",
 		"Const",
+		"Context",
 		"ConvertibleTo",
 		"DefPredeclaredTestFuncs",
 		"Default",
@@ -3224,6 +3236,8 @@
 		"ImporterFrom",
 		"Info",
 		"Initializer",
+		"Instance",
+		"Instantiate",
 		"Int",
 		"Int16",
 		"Int32",
@@ -3254,6 +3268,7 @@
 		"NewChan",
 		"NewChecker",
 		"NewConst",
+		"NewContext",
 		"NewField",
 		"NewFunc",
 		"NewInterface",
@@ -3268,10 +3283,14 @@
 		"NewPointer",
 		"NewScope",
 		"NewSignature",
+		"NewSignatureType",
 		"NewSlice",
 		"NewStruct",
+		"NewTerm",
 		"NewTuple",
 		"NewTypeName",
+		"NewTypeParam",
+		"NewUnion",
 		"NewVar",
 		"Nil",
 		"Object",
@@ -3296,11 +3315,15 @@
 		"StdSizes",
 		"String",
 		"Struct",
+		"Term",
 		"Tuple",
 		"Typ",
 		"Type",
 		"TypeAndValue",
+		"TypeList",
 		"TypeName",
+		"TypeParam",
+		"TypeParamList",
 		"TypeString",
 		"Uint",
 		"Uint16",
@@ -3308,6 +3331,7 @@
 		"Uint64",
 		"Uint8",
 		"Uintptr",
+		"Union",
 		"Universe",
 		"Unsafe",
 		"UnsafePointer",
@@ -4080,9 +4104,11 @@
 		"SRV",
 		"SplitHostPort",
 		"TCPAddr",
+		"TCPAddrFromAddrPort",
 		"TCPConn",
 		"TCPListener",
 		"UDPAddr",
+		"UDPAddrFromAddrPort",
 		"UDPConn",
 		"UnixAddr",
 		"UnixConn",
@@ -4142,6 +4168,7 @@
 		"ListenAndServe",
 		"ListenAndServeTLS",
 		"LocalAddrContextKey",
+		"MaxBytesHandler",
 		"MaxBytesReader",
 		"MethodConnect",
 		"MethodDelete",
@@ -4338,6 +4365,25 @@
 		"ParseDate",
 		"ReadMessage",
 	},
+	"net/netip": []string{
+		"Addr",
+		"AddrFrom16",
+		"AddrFrom4",
+		"AddrFromSlice",
+		"AddrPort",
+		"AddrPortFrom",
+		"IPv4Unspecified",
+		"IPv6LinkLocalAllNodes",
+		"IPv6Unspecified",
+		"MustParseAddr",
+		"MustParseAddrPort",
+		"MustParsePrefix",
+		"ParseAddr",
+		"ParseAddrPort",
+		"ParsePrefix",
+		"Prefix",
+		"PrefixFrom",
+	},
 	"net/rpc": []string{
 		"Accept",
 		"Call",
@@ -4641,6 +4687,8 @@
 		"Method",
 		"New",
 		"NewAt",
+		"Pointer",
+		"PointerTo",
 		"Ptr",
 		"PtrTo",
 		"RecvDir",
@@ -4819,9 +4867,11 @@
 	},
 	"runtime/debug": []string{
 		"BuildInfo",
+		"BuildSetting",
 		"FreeOSMemory",
 		"GCStats",
 		"Module",
+		"ParseBuildInfo",
 		"PrintStack",
 		"ReadBuildInfo",
 		"ReadGCStats",
@@ -4939,11 +4989,13 @@
 	},
 	"strings": []string{
 		"Builder",
+		"Clone",
 		"Compare",
 		"Contains",
 		"ContainsAny",
 		"ContainsRune",
 		"Count",
+		"Cut",
 		"EqualFold",
 		"Fields",
 		"FieldsFunc",
@@ -9793,6 +9845,7 @@
 		"Syscall18",
 		"Syscall6",
 		"Syscall9",
+		"SyscallN",
 		"Sysctl",
 		"SysctlUint32",
 		"Sysctlnode",
@@ -10202,7 +10255,6 @@
 		"Value",
 		"ValueError",
 		"ValueOf",
-		"Wrapper",
 	},
 	"testing": []string{
 		"AllocsPerRun",
@@ -10213,9 +10265,11 @@
 		"CoverBlock",
 		"CoverMode",
 		"Coverage",
+		"F",
 		"Init",
 		"InternalBenchmark",
 		"InternalExample",
+		"InternalFuzzTarget",
 		"InternalTest",
 		"M",
 		"Main",
@@ -10313,9 +10367,11 @@
 		"ActionNode",
 		"BoolNode",
 		"BranchNode",
+		"BreakNode",
 		"ChainNode",
 		"CommandNode",
 		"CommentNode",
+		"ContinueNode",
 		"DotNode",
 		"FieldNode",
 		"IdentifierNode",
@@ -10329,9 +10385,11 @@
 		"Node",
 		"NodeAction",
 		"NodeBool",
+		"NodeBreak",
 		"NodeChain",
 		"NodeCommand",
 		"NodeComment",
+		"NodeContinue",
 		"NodeDot",
 		"NodeField",
 		"NodeIdentifier",
@@ -10727,6 +10785,7 @@
 		"IsSurrogate",
 	},
 	"unicode/utf8": []string{
+		"AppendRune",
 		"DecodeLastRune",
 		"DecodeLastRuneInString",
 		"DecodeRune",
diff --git a/internal/jsonrpc2/serve.go b/internal/jsonrpc2/serve.go
index b9e31a8..d587971 100644
--- a/internal/jsonrpc2/serve.go
+++ b/internal/jsonrpc2/serve.go
@@ -6,7 +6,6 @@
 
 import (
 	"context"
-	"fmt"
 	"io"
 	"net"
 	"os"
@@ -65,47 +64,69 @@
 // the provided server. If idleTimeout is non-zero, ListenAndServe exits after
 // there are no clients for this duration, otherwise it exits only on error.
 func Serve(ctx context.Context, ln net.Listener, server StreamServer, idleTimeout time.Duration) error {
-	ctx, cancel := context.WithCancel(ctx)
-	defer cancel()
-	// Max duration: ~290 years; surely that's long enough.
-	const forever = 1<<63 - 1
-	if idleTimeout <= 0 {
-		idleTimeout = forever
-	}
-	connTimer := time.NewTimer(idleTimeout)
-
 	newConns := make(chan net.Conn)
-	doneListening := make(chan error)
 	closedConns := make(chan error)
-
+	activeConns := 0
+	var acceptErr error
 	go func() {
+		defer close(newConns)
 		for {
-			nc, err := ln.Accept()
-			if err != nil {
-				select {
-				case doneListening <- fmt.Errorf("Accept(): %w", err):
-				case <-ctx.Done():
-				}
+			var nc net.Conn
+			nc, acceptErr = ln.Accept()
+			if acceptErr != nil {
 				return
 			}
 			newConns <- nc
 		}
 	}()
 
-	activeConns := 0
+	ctx, cancel := context.WithCancel(ctx)
+	defer func() {
+		// Signal the Accept goroutine to stop immediately
+		// and terminate all newly-accepted connections until it returns.
+		ln.Close()
+		for nc := range newConns {
+			nc.Close()
+		}
+		// Cancel pending ServeStream callbacks and wait for them to finish.
+		cancel()
+		for activeConns > 0 {
+			err := <-closedConns
+			if !isClosingError(err) {
+				event.Error(ctx, "closed a connection", err)
+			}
+			activeConns--
+		}
+	}()
+
+	// Max duration: ~290 years; surely that's long enough.
+	const forever = 1<<63 - 1
+	if idleTimeout <= 0 {
+		idleTimeout = forever
+	}
+	connTimer := time.NewTimer(idleTimeout)
+	defer connTimer.Stop()
+
 	for {
 		select {
-		case netConn := <-newConns:
+		case netConn, ok := <-newConns:
+			if !ok {
+				return acceptErr
+			}
+			if activeConns == 0 && !connTimer.Stop() {
+				// connTimer.C may receive a value even after Stop returns.
+				// (See https://golang.org/issue/37196.)
+				<-connTimer.C
+			}
 			activeConns++
-			connTimer.Stop()
 			stream := NewHeaderStream(netConn)
 			go func() {
 				conn := NewConn(stream)
-				closedConns <- server.ServeStream(ctx, conn)
+				err := server.ServeStream(ctx, conn)
 				stream.Close()
+				closedConns <- err
 			}()
-		case err := <-doneListening:
-			return err
+
 		case err := <-closedConns:
 			if !isClosingError(err) {
 				event.Error(ctx, "closed a connection", err)
@@ -114,10 +135,12 @@
 			if activeConns == 0 {
 				connTimer.Reset(idleTimeout)
 			}
+
 		case <-connTimer.C:
 			return ErrIdleTimeout
+
 		case <-ctx.Done():
-			return ctx.Err()
+			return nil
 		}
 	}
 }
diff --git a/internal/jsonrpc2_v2/conn.go b/internal/jsonrpc2_v2/conn.go
index 606c3f9..018175e 100644
--- a/internal/jsonrpc2_v2/conn.go
+++ b/internal/jsonrpc2_v2/conn.go
@@ -22,7 +22,8 @@
 // ConnectionOptions itself implements Binder returning itself unmodified, to
 // allow for the simple cases where no per connection information is needed.
 type Binder interface {
-	// Bind is invoked when creating a new connection.
+	// Bind returns the ConnectionOptions to use when establishing the passed-in
+	// Connection.
 	// The connection is not ready to use when Bind is called.
 	Bind(context.Context, *Connection) (ConnectionOptions, error)
 }
@@ -234,10 +235,10 @@
 	return json.Unmarshal(r.result, result)
 }
 
-// Respond deliverers a response to an incoming Call.
-// It is an error to not call this exactly once for any message for which a
-// handler has previously returned ErrAsyncResponse. It is also an error to
-// call this for any other message.
+// Respond delivers a response to an incoming Call.
+//
+// Respond must be called exactly once for any message for which a handler
+// returns ErrAsyncResponse. It must not be called for any other message.
 func (c *Connection) Respond(id ID, result interface{}, rerr error) error {
 	pending := <-c.incomingBox
 	defer func() { c.incomingBox <- pending }()
@@ -321,8 +322,8 @@
 			// cancelled by id
 			if msg.IsCall() {
 				pending := <-c.incomingBox
-				c.incomingBox <- pending
 				pending[msg.ID] = entry
+				c.incomingBox <- pending
 			}
 			// send the message to the incoming queue
 			toQueue <- entry
@@ -346,7 +347,7 @@
 	}
 }
 
-// manageQueue reads incoming requests, attempts to proccess them with the preempter, or queue them
+// manageQueue reads incoming requests, attempts to process them with the preempter, or queue them
 // up for normal handling.
 func (c *Connection) manageQueue(ctx context.Context, preempter Preempter, fromRead <-chan *incoming, toDeliver chan<- *incoming) {
 	defer close(toDeliver)
diff --git a/internal/jsonrpc2_v2/jsonrpc2.go b/internal/jsonrpc2_v2/jsonrpc2.go
index 271f42c..e685584 100644
--- a/internal/jsonrpc2_v2/jsonrpc2.go
+++ b/internal/jsonrpc2_v2/jsonrpc2.go
@@ -15,11 +15,19 @@
 var (
 	// ErrIdleTimeout is returned when serving timed out waiting for new connections.
 	ErrIdleTimeout = errors.New("timed out waiting for new connections")
-	// ErrNotHandled is returned from a handler to indicate it did not handle the
-	// message.
+
+	// ErrNotHandled is returned from a Handler or Preempter to indicate it did
+	// not handle the request.
+	//
+	// If a Handler returns ErrNotHandled, the server replies with
+	// ErrMethodNotFound.
 	ErrNotHandled = errors.New("JSON RPC not handled")
+
 	// ErrAsyncResponse is returned from a handler to indicate it will generate a
 	// response asynchronously.
+	//
+	// ErrAsyncResponse must not be returned for notifications,
+	// which do not receive responses.
 	ErrAsyncResponse = errors.New("JSON RPC asynchronous response")
 )
 
@@ -28,17 +36,33 @@
 // Primarily this is used for cancel handlers or notifications for which out of
 // order processing is not an issue.
 type Preempter interface {
-	// Preempt is invoked for each incoming request before it is queued.
-	// If the request is a call, it must return a value or an error for the reply.
-	// Preempt should not block or start any new messages on the connection.
-	Preempt(ctx context.Context, req *Request) (interface{}, error)
+	// Preempt is invoked for each incoming request before it is queued for handling.
+	//
+	// If Preempt returns ErrNotHandled, the request will be queued,
+	// and eventually passed to a Handle call.
+	//
+	// Otherwise, the result and error are processed as if returned by Handle.
+	//
+	// Preempt must not block. (The Context passed to it is for Values only.)
+	Preempt(ctx context.Context, req *Request) (result interface{}, err error)
 }
 
 // Handler handles messages on a connection.
 type Handler interface {
-	// Handle is invoked for each incoming request.
-	// If the request is a call, it must return a value or an error for the reply.
-	Handle(ctx context.Context, req *Request) (interface{}, error)
+	// Handle is invoked sequentially for each incoming request that has not
+	// already been handled by a Preempter.
+	//
+	// If the Request has a nil ID, Handle must return a nil result,
+	// and any error may be logged but will not be reported to the caller.
+	//
+	// If the Request has a non-nil ID, Handle must return either a
+	// non-nil, JSON-marshalable result, or a non-nil error.
+	//
+	// The Context passed to Handle will be canceled if the
+	// connection is broken or the request is canceled or completed.
+	// (If Handle returns ErrAsyncResponse, ctx will remain uncanceled
+	// until either Cancel or Respond is called for the request's ID.)
+	Handle(ctx context.Context, req *Request) (result interface{}, err error)
 }
 
 type defaultHandler struct{}
@@ -60,15 +84,15 @@
 // async is a small helper for operations with an asynchronous result that you
 // can wait for.
 type async struct {
-	ready  chan struct{} // signals that the operation has completed
-	errBox chan error    // guards the operation result
+	ready    chan struct{} // closed when done
+	firstErr chan error    // 1-buffered; contains either nil or the first non-nil error
 }
 
 func newAsync() *async {
 	var a async
 	a.ready = make(chan struct{})
-	a.errBox = make(chan error, 1)
-	a.errBox <- nil
+	a.firstErr = make(chan error, 1)
+	a.firstErr <- nil
 	return &a
 }
 
@@ -87,15 +111,15 @@
 
 func (a *async) wait() error {
 	<-a.ready
-	err := <-a.errBox
-	a.errBox <- err
+	err := <-a.firstErr
+	a.firstErr <- err
 	return err
 }
 
 func (a *async) setError(err error) {
-	storedErr := <-a.errBox
+	storedErr := <-a.firstErr
 	if storedErr == nil {
 		storedErr = err
 	}
-	a.errBox <- storedErr
+	a.firstErr <- storedErr
 }
diff --git a/internal/jsonrpc2_v2/jsonrpc2_test.go b/internal/jsonrpc2_v2/jsonrpc2_test.go
index 1157779..4f4b7d9 100644
--- a/internal/jsonrpc2_v2/jsonrpc2_test.go
+++ b/internal/jsonrpc2_v2/jsonrpc2_test.go
@@ -60,6 +60,14 @@
 		notify{"unblock", "a"},
 		collect{"a", true, false},
 	}},
+	sequence{"concurrent", []invoker{
+		async{"a", "fork", "a"},
+		notify{"unblock", "a"},
+		async{"b", "fork", "b"},
+		notify{"unblock", "b"},
+		collect{"a", true, false},
+		collect{"b", true, false},
+	}},
 }
 
 type binder struct {
diff --git a/internal/jsonrpc2_v2/net.go b/internal/jsonrpc2_v2/net.go
index 0b413d8..4f20825 100644
--- a/internal/jsonrpc2_v2/net.go
+++ b/internal/jsonrpc2_v2/net.go
@@ -21,7 +21,7 @@
 	NetDialer       net.Dialer
 }
 
-// NetListener returns a new Listener that listents on a socket using the net package.
+// NetListener returns a new Listener that listens on a socket using the net package.
 func NetListener(ctx context.Context, network, address string, options NetListenOptions) (Listener, error) {
 	ln, err := options.NetListenConfig.Listen(ctx, network, address)
 	if err != nil {
@@ -83,7 +83,7 @@
 // NetPipeListener returns a new Listener that listens using net.Pipe.
 // It is only possibly to connect to it using the Dialier returned by the
 // Dialer method, each call to that method will generate a new pipe the other
-// side of which will be returnd from the Accept call.
+// side of which will be returned from the Accept call.
 func NetPipeListener(ctx context.Context) (Listener, error) {
 	return &netPiper{
 		done:   make(chan struct{}),
diff --git a/internal/jsonrpc2_v2/serve.go b/internal/jsonrpc2_v2/serve.go
index 98e8894..fb35166 100644
--- a/internal/jsonrpc2_v2/serve.go
+++ b/internal/jsonrpc2_v2/serve.go
@@ -18,17 +18,17 @@
 
 // Listener is implemented by protocols to accept new inbound connections.
 type Listener interface {
-	// Accept an inbound connection to a server.
-	// It must block until an inbound connection is made, or the listener is
-	// shut down.
+	// Accept accepts an inbound connection to a server.
+	// It blocks until either an inbound connection is made, or the listener is closed.
 	Accept(context.Context) (io.ReadWriteCloser, error)
 
-	// Close is used to ask a listener to stop accepting new connections.
+	// Close closes the listener.
+	// Any blocked Accept or Dial operations will unblock and return errors.
 	Close() error
 
 	// Dialer returns a dialer that can be used to connect to this listener
 	// locally.
-	// If a listener does not implement this it will return a nil.
+	// If a listener does not implement this it will return nil.
 	Dialer() Dialer
 }
 
diff --git a/internal/jsonrpc2_v2/wire.go b/internal/jsonrpc2_v2/wire.go
index 97b1ae8..4da129a 100644
--- a/internal/jsonrpc2_v2/wire.go
+++ b/internal/jsonrpc2_v2/wire.go
@@ -12,8 +12,6 @@
 // see http://www.jsonrpc.org/specification for details
 
 var (
-	// ErrUnknown should be used for all non coded errors.
-	ErrUnknown = NewError(-32001, "JSON RPC unknown error")
 	// ErrParse is used when invalid JSON was received by the server.
 	ErrParse = NewError(-32700, "JSON RPC parse error")
 	// ErrInvalidRequest is used when the JSON sent is not a valid Request object.
@@ -28,11 +26,13 @@
 	ErrInternal = NewError(-32603, "JSON RPC internal error")
 
 	// The following errors are not part of the json specification, but
-	// compliant extensions specific to this implimentation.
+	// compliant extensions specific to this implementation.
 
 	// ErrServerOverloaded is returned when a message was refused due to a
 	// server being temporarily unable to accept any new messages.
 	ErrServerOverloaded = NewError(-32000, "JSON RPC overloaded")
+	// ErrUnknown should be used for all non coded errors.
+	ErrUnknown = NewError(-32001, "JSON RPC unknown error")
 )
 
 const wireVersion = "2.0"
diff --git a/internal/lsp/analysis/fillreturns/fillreturns.go b/internal/lsp/analysis/fillreturns/fillreturns.go
index 94accef..4607f37 100644
--- a/internal/lsp/analysis/fillreturns/fillreturns.go
+++ b/internal/lsp/analysis/fillreturns/fillreturns.go
@@ -14,15 +14,15 @@
 	"go/format"
 	"go/types"
 	"regexp"
-	"strconv"
 	"strings"
 
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/ast/astutil"
 	"golang.org/x/tools/internal/analysisinternal"
+	"golang.org/x/tools/internal/typeparams"
 )
 
-const Doc = `suggested fixes for "wrong number of return values (want %d, got %d)"
+const Doc = `suggest fixes for errors due to an incorrect number of return values
 
 This checker provides suggested fixes for type errors of the
 type "wrong number of return values (want %d, got %d)". For example:
@@ -45,8 +45,6 @@
 	RunDespiteErrors: true,
 }
 
-var wrongReturnNumRegex = regexp.MustCompile(`wrong number of return values \(want (\d+), got (\d+)\)`)
-
 func run(pass *analysis.Pass) (interface{}, error) {
 	info := pass.TypesInfo
 	if info == nil {
@@ -57,7 +55,7 @@
 outer:
 	for _, typeErr := range errors {
 		// Filter out the errors that are not relevant to this analyzer.
-		if !FixesError(typeErr.Msg) {
+		if !FixesError(typeErr) {
 			continue
 		}
 		var file *ast.File
@@ -78,20 +76,32 @@
 		}
 		typeErrEndPos := analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), typeErr.Pos)
 
+		// TODO(rfindley): much of the error handling code below returns, when it
+		// should probably continue.
+
 		// Get the path for the relevant range.
 		path, _ := astutil.PathEnclosingInterval(file, typeErr.Pos, typeErrEndPos)
 		if len(path) == 0 {
 			return nil, nil
 		}
-		// Check to make sure the node of interest is a ReturnStmt.
-		ret, ok := path[0].(*ast.ReturnStmt)
-		if !ok {
+
+		// Find the enclosing return statement.
+		var ret *ast.ReturnStmt
+		var retIdx int
+		for i, n := range path {
+			if r, ok := n.(*ast.ReturnStmt); ok {
+				ret = r
+				retIdx = i
+				break
+			}
+		}
+		if ret == nil {
 			return nil, nil
 		}
 
 		// Get the function type that encloses the ReturnStmt.
 		var enclosingFunc *ast.FuncType
-		for _, n := range path {
+		for _, n := range path[retIdx+1:] {
 			switch node := n.(type) {
 			case *ast.FuncLit:
 				enclosingFunc = node.Type
@@ -106,6 +116,14 @@
 			continue
 		}
 
+		// Skip any generic enclosing functions, since type parameters don't
+		// have 0 values.
+		// TODO(rfindley): We should be able to handle this if the return
+		// values are all concrete types.
+		if tparams := typeparams.ForFuncType(enclosingFunc); tparams != nil && tparams.NumFields() > 0 {
+			return nil, nil
+		}
+
 		// Find the function declaration that encloses the ReturnStmt.
 		var outer *ast.FuncDecl
 		for _, p := range path {
@@ -118,7 +136,8 @@
 			return nil, nil
 		}
 
-		// Skip any return statements that contain function calls with multiple return values.
+		// Skip any return statements that contain function calls with multiple
+		// return values.
 		for _, expr := range ret.Results {
 			e, ok := expr.(*ast.CallExpr)
 			if !ok {
@@ -235,16 +254,23 @@
 	return types.AssignableTo(want, got) || types.ConvertibleTo(want, got)
 }
 
-func FixesError(msg string) bool {
-	matches := wrongReturnNumRegex.FindStringSubmatch(strings.TrimSpace(msg))
-	if len(matches) < 3 {
-		return false
+// Error messages have changed across Go versions. These regexps capture recent
+// incarnations.
+//
+// TODO(rfindley): once error codes are exported and exposed via go/packages,
+// use error codes rather than string matching here.
+var wrongReturnNumRegexes = []*regexp.Regexp{
+	regexp.MustCompile(`wrong number of return values \(want (\d+), got (\d+)\)`),
+	regexp.MustCompile(`too many return values`),
+	regexp.MustCompile(`not enough return values`),
+}
+
+func FixesError(err types.Error) bool {
+	msg := strings.TrimSpace(err.Msg)
+	for _, rx := range wrongReturnNumRegexes {
+		if rx.MatchString(msg) {
+			return true
+		}
 	}
-	if _, err := strconv.Atoi(matches[1]); err != nil {
-		return false
-	}
-	if _, err := strconv.Atoi(matches[2]); err != nil {
-		return false
-	}
-	return true
+	return false
 }
diff --git a/internal/lsp/analysis/fillreturns/fillreturns_test.go b/internal/lsp/analysis/fillreturns/fillreturns_test.go
index d1ad656..7ef0d46 100644
--- a/internal/lsp/analysis/fillreturns/fillreturns_test.go
+++ b/internal/lsp/analysis/fillreturns/fillreturns_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/internal/lsp/analysis/fillreturns"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.RunWithSuggestedFixes(t, testdata, fillreturns.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.RunWithSuggestedFixes(t, testdata, fillreturns.Analyzer, tests...)
 }
diff --git a/internal/lsp/analysis/fillreturns/testdata/src/a/a.go b/internal/lsp/analysis/fillreturns/testdata/src/a/a.go
index 44cb25f..7ab0ff1 100644
--- a/internal/lsp/analysis/fillreturns/testdata/src/a/a.go
+++ b/internal/lsp/analysis/fillreturns/testdata/src/a/a.go
@@ -25,80 +25,82 @@
 	return errors.New("foo")
 }
 
+// The error messages below changed in 1.18; "return values" covers both forms.
+
 func b() (string, int, error) {
-	return "", errors.New("foo") // want "wrong number of return values \\(want 3, got 2\\)"
+	return "", errors.New("foo") // want "return values"
 }
 
 func c() (string, int, error) {
-	return 7, errors.New("foo") // want "wrong number of return values \\(want 3, got 2\\)"
+	return 7, errors.New("foo") // want "return values"
 }
 
 func d() (string, int, error) {
-	return "", 7 // want "wrong number of return values \\(want 3, got 2\\)"
+	return "", 7 // want "return values"
 }
 
 func e() (T, error, *bool) {
-	return (z(http.ListenAndServe))("", nil) // want "wrong number of return values \\(want 3, got 1\\)"
+	return (z(http.ListenAndServe))("", nil) // want "return values"
 }
 
 func preserveLeft() (int, int, error) {
-	return 1, errors.New("foo") // want "wrong number of return values \\(want 3, got 2\\)"
+	return 1, errors.New("foo") // want "return values"
 }
 
 func matchValues() (int, error, string) {
-	return errors.New("foo"), 3 // want "wrong number of return values \\(want 3, got 2\\)"
+	return errors.New("foo"), 3 // want "return values"
 }
 
 func preventDataOverwrite() (int, string) {
-	return errors.New("foo") // want "wrong number of return values \\(want 2, got 1\\)"
+	return errors.New("foo") // want "return values"
 }
 
 func closure() (string, error) {
 	_ = func() (int, error) {
-		return // want "wrong number of return values \\(want 2, got 0\\)"
+		return // want "return values"
 	}
-	return // want "wrong number of return values \\(want 2, got 0\\)"
+	return // want "return values"
 }
 
 func basic() (uint8, uint16, uint32, uint64, int8, int16, int32, int64, float32, float64, complex64, complex128, byte, rune, uint, int, uintptr, string, bool, error) {
-	return // want "wrong number of return values \\(want 20, got 0\\)"
+	return // want "return values"
 }
 
 func complex() (*int, []int, [2]int, map[int]int) {
-	return // want "wrong number of return values \\(want 4, got 0\\)"
+	return // want "return values"
 }
 
 func structsAndInterfaces() (T, url.URL, T1, I, I1, io.Reader, Client, ast2.Stmt) {
-	return // want "wrong number of return values \\(want 8, got 0\\)"
+	return // want "return values"
 }
 
 func m() (int, error) {
 	if 1 == 2 {
-		return // want "wrong number of return values \\(want 2, got 0\\)"
+		return // want "return values"
 	} else if 1 == 3 {
-		return errors.New("foo") // want "wrong number of return values \\(want 2, got 1\\)"
+		return errors.New("foo") // want "return values"
 	} else {
-		return 1 // want "wrong number of return values \\(want 2, got 1\\)"
+		return 1 // want "return values"
 	}
-	return // want "wrong number of return values \\(want 2, got 0\\)"
+	return // want "return values"
 }
 
 func convertibleTypes() (ast2.Expr, int) {
-	return &ast2.ArrayType{} // want "wrong number of return values \\(want 2, got 1\\)"
+	return &ast2.ArrayType{} // want "return values"
 }
 
 func assignableTypes() (map[string]int, int) {
 	type X map[string]int
 	var x X
-	return x // want "wrong number of return values \\(want 2, got 1\\)"
+	return x // want "return values"
 }
 
 func interfaceAndError() (I, int) {
-	return errors.New("foo") // want "wrong number of return values \\(want 2, got 1\\)"
+	return errors.New("foo") // want "return values"
 }
 
 func funcOneReturn() (string, error) {
-	return strconv.Itoa(1) // want "wrong number of return values \\(want 2, got 1\\)"
+	return strconv.Itoa(1) // want "return values"
 }
 
 func funcMultipleReturn() (int, error, string) {
@@ -110,16 +112,16 @@
 }
 
 func multipleUnused() (int, string, string, string) {
-	return 3, 4, 5 // want "wrong number of return values \\(want 4, got 3\\)"
+	return 3, 4, 5 // want "return values"
 }
 
 func gotTooMany() int {
 	if true {
-		return 0, "" // want "wrong number of return values \\(want 1, got 2\\)"
+		return 0, "" // want "return values"
 	} else {
-		return 1, 0, nil // want "wrong number of return values \\(want 1, got 3\\)"
+		return 1, 0, nil // want "return values"
 	}
-	return 0, 5, false // want "wrong number of return values \\(want 1, got 3\\)"
+	return 0, 5, false // want "return values"
 }
 
 func fillVars() (int, string, ast.Node, bool, error) {
@@ -128,10 +130,10 @@
 	var t bool
 	if true {
 		err := errors.New("fail")
-		return // want "wrong number of return values \\(want 5, got 0\\)"
+		return // want "return values"
 	}
 	n := ast.NewIdent("ident")
 	int := 3
 	var b bool
-	return "" // want "wrong number of return values \\(want 5, got 1\\)"
+	return "" // want "return values"
 }
diff --git a/internal/lsp/analysis/fillreturns/testdata/src/a/a.go.golden b/internal/lsp/analysis/fillreturns/testdata/src/a/a.go.golden
index 1435ea0..f007a5f 100644
--- a/internal/lsp/analysis/fillreturns/testdata/src/a/a.go.golden
+++ b/internal/lsp/analysis/fillreturns/testdata/src/a/a.go.golden
@@ -25,80 +25,82 @@
 	return errors.New("foo")
 }
 
+// The error messages below changed in 1.18; "return values" covers both forms.
+
 func b() (string, int, error) {
-	return "", 0, errors.New("foo") // want "wrong number of return values \\(want 3, got 2\\)"
+	return "", 0, errors.New("foo") // want "return values"
 }
 
 func c() (string, int, error) {
-	return "", 7, errors.New("foo") // want "wrong number of return values \\(want 3, got 2\\)"
+	return "", 7, errors.New("foo") // want "return values"
 }
 
 func d() (string, int, error) {
-	return "", 7, nil // want "wrong number of return values \\(want 3, got 2\\)"
+	return "", 7, nil // want "return values"
 }
 
 func e() (T, error, *bool) {
-	return T{}, (z(http.ListenAndServe))("", nil), nil // want "wrong number of return values \\(want 3, got 1\\)"
+	return T{}, (z(http.ListenAndServe))("", nil), nil // want "return values"
 }
 
 func preserveLeft() (int, int, error) {
-	return 1, 0, errors.New("foo") // want "wrong number of return values \\(want 3, got 2\\)"
+	return 1, 0, errors.New("foo") // want "return values"
 }
 
 func matchValues() (int, error, string) {
-	return 3, errors.New("foo"), "" // want "wrong number of return values \\(want 3, got 2\\)"
+	return 3, errors.New("foo"), "" // want "return values"
 }
 
 func preventDataOverwrite() (int, string) {
-	return 0, "", errors.New("foo") // want "wrong number of return values \\(want 2, got 1\\)"
+	return 0, "", errors.New("foo") // want "return values"
 }
 
 func closure() (string, error) {
 	_ = func() (int, error) {
-		return 0, nil // want "wrong number of return values \\(want 2, got 0\\)"
+		return 0, nil // want "return values"
 	}
-	return "", nil // want "wrong number of return values \\(want 2, got 0\\)"
+	return "", nil // want "return values"
 }
 
 func basic() (uint8, uint16, uint32, uint64, int8, int16, int32, int64, float32, float64, complex64, complex128, byte, rune, uint, int, uintptr, string, bool, error) {
-	return 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, "", false, nil // want "wrong number of return values \\(want 20, got 0\\)"
+	return 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, "", false, nil // want "return values"
 }
 
 func complex() (*int, []int, [2]int, map[int]int) {
-	return nil, nil, nil, nil // want "wrong number of return values \\(want 4, got 0\\)"
+	return nil, nil, nil, nil // want "return values"
 }
 
 func structsAndInterfaces() (T, url.URL, T1, I, I1, io.Reader, Client, ast2.Stmt) {
-	return T{}, url.URL{}, T{}, nil, nil, nil, Client{}, nil // want "wrong number of return values \\(want 8, got 0\\)"
+	return T{}, url.URL{}, T{}, nil, nil, nil, Client{}, nil // want "return values"
 }
 
 func m() (int, error) {
 	if 1 == 2 {
-		return 0, nil // want "wrong number of return values \\(want 2, got 0\\)"
+		return 0, nil // want "return values"
 	} else if 1 == 3 {
-		return 0, errors.New("foo") // want "wrong number of return values \\(want 2, got 1\\)"
+		return 0, errors.New("foo") // want "return values"
 	} else {
-		return 1, nil // want "wrong number of return values \\(want 2, got 1\\)"
+		return 1, nil // want "return values"
 	}
-	return 0, nil // want "wrong number of return values \\(want 2, got 0\\)"
+	return 0, nil // want "return values"
 }
 
 func convertibleTypes() (ast2.Expr, int) {
-	return &ast2.ArrayType{}, 0 // want "wrong number of return values \\(want 2, got 1\\)"
+	return &ast2.ArrayType{}, 0 // want "return values"
 }
 
 func assignableTypes() (map[string]int, int) {
 	type X map[string]int
 	var x X
-	return x, 0 // want "wrong number of return values \\(want 2, got 1\\)"
+	return x, 0 // want "return values"
 }
 
 func interfaceAndError() (I, int) {
-	return errors.New("foo"), 0 // want "wrong number of return values \\(want 2, got 1\\)"
+	return errors.New("foo"), 0 // want "return values"
 }
 
 func funcOneReturn() (string, error) {
-	return strconv.Itoa(1), nil // want "wrong number of return values \\(want 2, got 1\\)"
+	return strconv.Itoa(1), nil // want "return values"
 }
 
 func funcMultipleReturn() (int, error, string) {
@@ -110,16 +112,16 @@
 }
 
 func multipleUnused() (int, string, string, string) {
-	return 3, "", "", "", 4, 5 // want "wrong number of return values \\(want 4, got 3\\)"
+	return 3, "", "", "", 4, 5 // want "return values"
 }
 
 func gotTooMany() int {
 	if true {
-		return 0 // want "wrong number of return values \\(want 1, got 2\\)"
+		return 0 // want "return values"
 	} else {
-		return 1 // want "wrong number of return values \\(want 1, got 3\\)"
+		return 1 // want "return values"
 	}
-	return 5 // want "wrong number of return values \\(want 1, got 3\\)"
+	return 5 // want "return values"
 }
 
 func fillVars() (int, string, ast.Node, bool, error) {
@@ -128,10 +130,10 @@
 	var t bool
 	if true {
 		err := errors.New("fail")
-		return eint, s, nil, false, err // want "wrong number of return values \\(want 5, got 0\\)"
+		return eint, s, nil, false, err // want "return values"
 	}
 	n := ast.NewIdent("ident")
 	int := 3
 	var b bool
-	return int, "", n, b, nil // want "wrong number of return values \\(want 5, got 1\\)"
+	return int, "", n, b, nil // want "return values"
 }
diff --git a/internal/lsp/analysis/fillreturns/testdata/src/a/typeparams/a.go b/internal/lsp/analysis/fillreturns/testdata/src/a/typeparams/a.go
new file mode 100644
index 0000000..8454bd2
--- /dev/null
+++ b/internal/lsp/analysis/fillreturns/testdata/src/a/typeparams/a.go
@@ -0,0 +1,5 @@
+package fillreturns
+
+func hello[T any]() int {
+	return
+}
diff --git a/internal/lsp/analysis/fillreturns/testdata/src/a/typeparams/a.go.golden b/internal/lsp/analysis/fillreturns/testdata/src/a/typeparams/a.go.golden
new file mode 100644
index 0000000..8454bd2
--- /dev/null
+++ b/internal/lsp/analysis/fillreturns/testdata/src/a/typeparams/a.go.golden
@@ -0,0 +1,5 @@
+package fillreturns
+
+func hello[T any]() int {
+	return
+}
diff --git a/internal/lsp/analysis/fillstruct/fillstruct.go b/internal/lsp/analysis/fillstruct/fillstruct.go
index 36a63a1..a4dd8cc 100644
--- a/internal/lsp/analysis/fillstruct/fillstruct.go
+++ b/internal/lsp/analysis/fillstruct/fillstruct.go
@@ -13,6 +13,7 @@
 	"go/format"
 	"go/token"
 	"go/types"
+	"strings"
 	"unicode"
 
 	"golang.org/x/tools/go/analysis"
@@ -21,6 +22,7 @@
 	"golang.org/x/tools/go/ast/inspector"
 	"golang.org/x/tools/internal/analysisinternal"
 	"golang.org/x/tools/internal/span"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 const Doc = `note incomplete struct initializations
@@ -65,6 +67,14 @@
 			return
 		}
 
+		// Ignore types that have type parameters for now.
+		// TODO: support type params.
+		if typ, ok := typ.(*types.Named); ok {
+			if tparams := typeparams.ForNamed(typ); tparams != nil && tparams.Len() > 0 {
+				return
+			}
+		}
+
 		// Find reference to the type declaration of the struct being initialized.
 		for {
 			p, ok := typ.Underlying().(*types.Pointer)
@@ -87,13 +97,25 @@
 		}
 
 		var fillable bool
+		var fillableFields []string
 		for i := 0; i < fieldCount; i++ {
 			field := obj.Field(i)
 			// Ignore fields that are not accessible in the current package.
 			if field.Pkg() != nil && field.Pkg() != pass.Pkg && !field.Exported() {
 				continue
 			}
+			// Ignore structs containing fields that have type parameters for now.
+			// TODO: support type params.
+			if typ, ok := field.Type().(*types.Named); ok {
+				if tparams := typeparams.ForNamed(typ); tparams != nil && tparams.Len() > 0 {
+					return
+				}
+			}
+			if _, ok := field.Type().(*typeparams.TypeParam); ok {
+				return
+			}
 			fillable = true
+			fillableFields = append(fillableFields, fmt.Sprintf("%s: %s", field.Name(), field.Type().String()))
 		}
 		if !fillable {
 			return
@@ -105,7 +127,21 @@
 		case *ast.SelectorExpr:
 			name = fmt.Sprintf("%s.%s", typ.X, typ.Sel.Name)
 		default:
-			name = "anonymous struct"
+			totalFields := len(fillableFields)
+			maxLen := 20
+			// Find the index to cut off printing of fields.
+			var i, fieldLen int
+			for i = range fillableFields {
+				if fieldLen > maxLen {
+					break
+				}
+				fieldLen += len(fillableFields[i])
+			}
+			fillableFields = fillableFields[:i]
+			if i < totalFields {
+				fillableFields = append(fillableFields, "...")
+			}
+			name = fmt.Sprintf("anonymous struct { %s }", strings.Join(fillableFields, ", "))
 		}
 		pass.Report(analysis.Diagnostic{
 			Message: fmt.Sprintf("Fill %s", name),
diff --git a/internal/lsp/analysis/fillstruct/fillstruct_test.go b/internal/lsp/analysis/fillstruct/fillstruct_test.go
index 34c9923..51a516c 100644
--- a/internal/lsp/analysis/fillstruct/fillstruct_test.go
+++ b/internal/lsp/analysis/fillstruct/fillstruct_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/internal/lsp/analysis/fillstruct"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, fillstruct.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.Run(t, testdata, fillstruct.Analyzer, tests...)
 }
diff --git a/internal/lsp/analysis/fillstruct/testdata/src/typeparams/typeparams.go b/internal/lsp/analysis/fillstruct/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..9029061
--- /dev/null
+++ b/internal/lsp/analysis/fillstruct/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,41 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package fillstruct
+
+type emptyStruct[A any] struct{}
+
+var _ = emptyStruct[int]{}
+
+type basicStruct[T any] struct {
+	foo T
+}
+
+var _ = basicStruct[int]{}
+
+type fooType[T any] T
+
+type twoArgStruct[F, B any] struct {
+	foo fooType[F]
+	bar fooType[B]
+}
+
+var _ = twoArgStruct[string, int]{}
+
+var _ = twoArgStruct[int, string]{
+	bar: "bar",
+}
+
+type nestedStruct struct {
+	bar   string
+	basic basicStruct[int]
+}
+
+var _ = nestedStruct{}
+
+func _[T any]() {
+	type S struct{ t T }
+	x := S{}
+	_ = x
+}
diff --git a/internal/lsp/analysis/infertypeargs/infertypeargs.go b/internal/lsp/analysis/infertypeargs/infertypeargs.go
new file mode 100644
index 0000000..119de50
--- /dev/null
+++ b/internal/lsp/analysis/infertypeargs/infertypeargs.go
@@ -0,0 +1,31 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package infertypeargs defines an analyzer that checks for explicit function
+// arguments that could be inferred.
+package infertypeargs
+
+import (
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+)
+
+const Doc = `check for unnecessary type arguments in call expressions
+
+Explicit type arguments may be omitted from call expressions if they can be
+inferred from function arguments, or from other type arguments:
+
+	func f[T any](T) {}
+	
+	func _() {
+		f[string]("foo") // string could be inferred
+	}
+`
+
+var Analyzer = &analysis.Analyzer{
+	Name:     "infertypeargs",
+	Doc:      Doc,
+	Requires: []*analysis.Analyzer{inspect.Analyzer},
+	Run:      run,
+}
diff --git a/internal/lsp/analysis/infertypeargs/infertypeargs_test.go b/internal/lsp/analysis/infertypeargs/infertypeargs_test.go
new file mode 100644
index 0000000..2957f46
--- /dev/null
+++ b/internal/lsp/analysis/infertypeargs/infertypeargs_test.go
@@ -0,0 +1,23 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package infertypeargs_test
+
+import (
+	"testing"
+
+	"golang.org/x/tools/go/analysis/analysistest"
+	"golang.org/x/tools/internal/lsp/analysis/infertypeargs"
+	"golang.org/x/tools/internal/testenv"
+	"golang.org/x/tools/internal/typeparams"
+)
+
+func Test(t *testing.T) {
+	testenv.NeedsGo1Point(t, 13)
+	if !typeparams.Enabled {
+		t.Skip("type params are not enabled")
+	}
+	testdata := analysistest.TestData()
+	analysistest.RunWithSuggestedFixes(t, testdata, infertypeargs.Analyzer, "a")
+}
diff --git a/internal/lsp/analysis/infertypeargs/run_go117.go b/internal/lsp/analysis/infertypeargs/run_go117.go
new file mode 100644
index 0000000..bc5c29b
--- /dev/null
+++ b/internal/lsp/analysis/infertypeargs/run_go117.go
@@ -0,0 +1,16 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.18
+// +build !go1.18
+
+package infertypeargs
+
+import "golang.org/x/tools/go/analysis"
+
+// This analyzer only relates to go1.18+, and uses the types.CheckExpr API that
+// was added in Go 1.13.
+func run(pass *analysis.Pass) (interface{}, error) {
+	return nil, nil
+}
diff --git a/internal/lsp/analysis/infertypeargs/run_go118.go b/internal/lsp/analysis/infertypeargs/run_go118.go
new file mode 100644
index 0000000..6645742
--- /dev/null
+++ b/internal/lsp/analysis/infertypeargs/run_go118.go
@@ -0,0 +1,111 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package infertypeargs
+
+import (
+	"go/ast"
+	"go/token"
+	"go/types"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
+)
+
+func run(pass *analysis.Pass) (interface{}, error) {
+	inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+	nodeFilter := []ast.Node{
+		(*ast.CallExpr)(nil),
+	}
+
+	inspect.Preorder(nodeFilter, func(node ast.Node) {
+		call := node.(*ast.CallExpr)
+		x, lbrack, indices, rbrack := typeparams.UnpackIndexExpr(call.Fun)
+		ident := calledIdent(x)
+		if ident == nil || len(indices) == 0 {
+			return // no explicit args, nothing to do
+		}
+
+		// Confirm that instantiation actually occurred at this ident.
+		idata, ok := typeparams.GetInstances(pass.TypesInfo)[ident]
+		if !ok {
+			return // something went wrong, but fail open
+		}
+		instance := idata.Type
+
+		// Start removing argument expressions from the right, and check if we can
+		// still infer the call expression.
+		required := len(indices) // number of type expressions that are required
+		for i := len(indices) - 1; i >= 0; i-- {
+			var fun ast.Expr
+			if i == 0 {
+				// No longer an index expression: just use the parameterized operand.
+				fun = x
+			} else {
+				fun = typeparams.PackIndexExpr(x, lbrack, indices[:i], indices[i-1].End())
+			}
+			newCall := &ast.CallExpr{
+				Fun:      fun,
+				Lparen:   call.Lparen,
+				Args:     call.Args,
+				Ellipsis: call.Ellipsis,
+				Rparen:   call.Rparen,
+			}
+			info := new(types.Info)
+			typeparams.InitInstanceInfo(info)
+			if err := types.CheckExpr(pass.Fset, pass.Pkg, call.Pos(), newCall, info); err != nil {
+				// Most likely inference failed.
+				break
+			}
+			newIData := typeparams.GetInstances(info)[ident]
+			newInstance := newIData.Type
+			if !types.Identical(instance, newInstance) {
+				// The inferred result type does not match the original result type, so
+				// this simplification is not valid.
+				break
+			}
+			required = i
+		}
+		if required < len(indices) {
+			var start, end token.Pos
+			var edit analysis.TextEdit
+			if required == 0 {
+				start, end = lbrack, rbrack+1 // erase the entire index
+				edit = analysis.TextEdit{Pos: start, End: end}
+			} else {
+				start = indices[required].Pos()
+				end = rbrack
+				//  erase from end of last arg to include last comma & white-spaces
+				edit = analysis.TextEdit{Pos: indices[required-1].End(), End: end}
+			}
+			pass.Report(analysis.Diagnostic{
+				Pos:     start,
+				End:     end,
+				Message: "unnecessary type arguments",
+				SuggestedFixes: []analysis.SuggestedFix{{
+					Message:   "simplify type arguments",
+					TextEdits: []analysis.TextEdit{edit},
+				}},
+			})
+		}
+	})
+
+	return nil, nil
+}
+
+func calledIdent(x ast.Expr) *ast.Ident {
+	switch x := x.(type) {
+	case *ast.Ident:
+		return x
+	case *ast.SelectorExpr:
+		return x.Sel
+	}
+	return nil
+}
diff --git a/internal/lsp/analysis/infertypeargs/testdata/src/a/basic.go b/internal/lsp/analysis/infertypeargs/testdata/src/a/basic.go
new file mode 100644
index 0000000..1c3d88b
--- /dev/null
+++ b/internal/lsp/analysis/infertypeargs/testdata/src/a/basic.go
@@ -0,0 +1,20 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the infertyepargs checker.
+
+package a
+
+func f[T any](T) {}
+
+func g[T any]() T { var x T; return x }
+
+func h[P interface{ ~*T }, T any]() {}
+
+func _() {
+	f[string]("hello") // want "unnecessary type arguments"
+	f[int](2)          // want "unnecessary type arguments"
+	_ = g[int]()
+	h[*int, int]() // want "unnecessary type arguments"
+}
diff --git a/internal/lsp/analysis/infertypeargs/testdata/src/a/basic.go.golden b/internal/lsp/analysis/infertypeargs/testdata/src/a/basic.go.golden
new file mode 100644
index 0000000..72348ff
--- /dev/null
+++ b/internal/lsp/analysis/infertypeargs/testdata/src/a/basic.go.golden
@@ -0,0 +1,20 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the infertyepargs checker.
+
+package a
+
+func f[T any](T) {}
+
+func g[T any]() T { var x T; return x }
+
+func h[P interface{ ~*T }, T any]() {}
+
+func _() {
+	f("hello") // want "unnecessary type arguments"
+	f(2)       // want "unnecessary type arguments"
+	_ = g[int]()
+	h[*int]() // want "unnecessary type arguments"
+}
diff --git a/internal/lsp/analysis/infertypeargs/testdata/src/a/imported.go b/internal/lsp/analysis/infertypeargs/testdata/src/a/imported.go
new file mode 100644
index 0000000..fc1f763
--- /dev/null
+++ b/internal/lsp/analysis/infertypeargs/testdata/src/a/imported.go
@@ -0,0 +1,12 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package a
+
+import "a/imported"
+
+func _() {
+	var x int
+	imported.F[int](x) // want "unnecessary type arguments"
+}
diff --git a/internal/lsp/analysis/infertypeargs/testdata/src/a/imported.go.golden b/internal/lsp/analysis/infertypeargs/testdata/src/a/imported.go.golden
new file mode 100644
index 0000000..6099545
--- /dev/null
+++ b/internal/lsp/analysis/infertypeargs/testdata/src/a/imported.go.golden
@@ -0,0 +1,12 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package a
+
+import "a/imported"
+
+func _() {
+	var x int
+	imported.F(x) // want "unnecessary type arguments"
+}
diff --git a/internal/lsp/analysis/infertypeargs/testdata/src/a/imported/imported.go b/internal/lsp/analysis/infertypeargs/testdata/src/a/imported/imported.go
new file mode 100644
index 0000000..f0610a8
--- /dev/null
+++ b/internal/lsp/analysis/infertypeargs/testdata/src/a/imported/imported.go
@@ -0,0 +1,7 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package imported
+
+func F[T any](T) {}
diff --git a/internal/lsp/analysis/infertypeargs/testdata/src/a/notypechange.go b/internal/lsp/analysis/infertypeargs/testdata/src/a/notypechange.go
new file mode 100644
index 0000000..c304f1d
--- /dev/null
+++ b/internal/lsp/analysis/infertypeargs/testdata/src/a/notypechange.go
@@ -0,0 +1,26 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// We should not suggest removing type arguments if doing so would change the
+// resulting type.
+
+package a
+
+func id[T any](t T) T { return t }
+
+var _ = id[int](1)        // want "unnecessary type arguments"
+var _ = id[string]("foo") // want "unnecessary type arguments"
+var _ = id[int64](2)
+
+func pair[T any](t T) (T, T) { return t, t }
+
+var _, _ = pair[int](3) // want "unnecessary type arguments"
+var _, _ = pair[int64](3)
+
+func noreturn[T any](t T) {}
+
+func _() {
+	noreturn[int64](4)
+	noreturn[int](4) // want "unnecessary type arguments"
+}
diff --git a/internal/lsp/analysis/infertypeargs/testdata/src/a/notypechange.go.golden b/internal/lsp/analysis/infertypeargs/testdata/src/a/notypechange.go.golden
new file mode 100644
index 0000000..93c6f70
--- /dev/null
+++ b/internal/lsp/analysis/infertypeargs/testdata/src/a/notypechange.go.golden
@@ -0,0 +1,26 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// We should not suggest removing type arguments if doing so would change the
+// resulting type.
+
+package a
+
+func id[T any](t T) T { return t }
+
+var _ = id(1)     // want "unnecessary type arguments"
+var _ = id("foo") // want "unnecessary type arguments"
+var _ = id[int64](2)
+
+func pair[T any](t T) (T, T) { return t, t }
+
+var _, _ = pair(3) // want "unnecessary type arguments"
+var _, _ = pair[int64](3)
+
+func noreturn[T any](t T) {}
+
+func _() {
+	noreturn[int64](4)
+	noreturn(4) // want "unnecessary type arguments"
+}
diff --git a/internal/lsp/analysis/nonewvars/nonewvars_test.go b/internal/lsp/analysis/nonewvars/nonewvars_test.go
index 3983bc5..dc58ab0 100644
--- a/internal/lsp/analysis/nonewvars/nonewvars_test.go
+++ b/internal/lsp/analysis/nonewvars/nonewvars_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/internal/lsp/analysis/nonewvars"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.RunWithSuggestedFixes(t, testdata, nonewvars.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.RunWithSuggestedFixes(t, testdata, nonewvars.Analyzer, tests...)
 }
diff --git a/internal/lsp/analysis/nonewvars/testdata/src/typeparams/a.go b/internal/lsp/analysis/nonewvars/testdata/src/typeparams/a.go
new file mode 100644
index 0000000..b381c9c
--- /dev/null
+++ b/internal/lsp/analysis/nonewvars/testdata/src/typeparams/a.go
@@ -0,0 +1,6 @@
+package nonewvars
+
+func hello[T any]() int {
+	var z T
+	z := 1 // want "no new variables on left side of :="
+}
diff --git a/internal/lsp/analysis/nonewvars/testdata/src/typeparams/a.go.golden b/internal/lsp/analysis/nonewvars/testdata/src/typeparams/a.go.golden
new file mode 100644
index 0000000..3a51173
--- /dev/null
+++ b/internal/lsp/analysis/nonewvars/testdata/src/typeparams/a.go.golden
@@ -0,0 +1,6 @@
+package nonewvars
+
+func hello[T any]() int {
+	var z T
+	z = 1 // want "no new variables on left side of :="
+}
diff --git a/internal/lsp/analysis/noresultvalues/noresultvalues.go b/internal/lsp/analysis/noresultvalues/noresultvalues.go
index 0e6b26f..b9f21f3 100644
--- a/internal/lsp/analysis/noresultvalues/noresultvalues.go
+++ b/internal/lsp/analysis/noresultvalues/noresultvalues.go
@@ -10,6 +10,7 @@
 	"bytes"
 	"go/ast"
 	"go/format"
+	"strings"
 
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/analysis/passes/inspect"
@@ -17,10 +18,11 @@
 	"golang.org/x/tools/internal/analysisinternal"
 )
 
-const Doc = `suggested fixes for "no result values expected"
+const Doc = `suggested fixes for unexpected return values
 
 This checker provides suggested fixes for type errors of the
-type "no result values expected". For example:
+type "no result values expected" or "too many return values".
+For example:
 	func z() { return nil }
 will turn into
 	func z() { return }
@@ -83,5 +85,6 @@
 }
 
 func FixesError(msg string) bool {
-	return msg == "no result values expected"
+	return msg == "no result values expected" ||
+		strings.HasPrefix(msg, "too many return values") && strings.Contains(msg, "want ()")
 }
diff --git a/internal/lsp/analysis/noresultvalues/noresultvalues_test.go b/internal/lsp/analysis/noresultvalues/noresultvalues_test.go
index 6b9451b..12198a1 100644
--- a/internal/lsp/analysis/noresultvalues/noresultvalues_test.go
+++ b/internal/lsp/analysis/noresultvalues/noresultvalues_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/internal/lsp/analysis/noresultvalues"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.RunWithSuggestedFixes(t, testdata, noresultvalues.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.RunWithSuggestedFixes(t, testdata, noresultvalues.Analyzer, tests...)
 }
diff --git a/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go b/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go
index 30265a4..3daa7f7 100644
--- a/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go
+++ b/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go
@@ -4,6 +4,6 @@
 
 package noresultvalues
 
-func x() { return nil } // want "no result values expected"
+func x() { return nil } // want `no result values expected|too many return values`
 
-func y() { return nil, "hello" } // want "no result values expected"
+func y() { return nil, "hello" } // want `no result values expected|too many return values`
diff --git a/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go.golden b/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go.golden
index 6b29cef..5e93aa4 100644
--- a/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go.golden
+++ b/internal/lsp/analysis/noresultvalues/testdata/src/a/a.go.golden
@@ -4,6 +4,6 @@
 
 package noresultvalues
 
-func x() { return } // want "no result values expected"
+func x() { return } // want `no result values expected|too many return values`
 
-func y() { return } // want "no result values expected"
+func y() { return } // want `no result values expected|too many return values`
diff --git a/internal/lsp/analysis/noresultvalues/testdata/src/typeparams/a.go b/internal/lsp/analysis/noresultvalues/testdata/src/typeparams/a.go
new file mode 100644
index 0000000..f8aa436
--- /dev/null
+++ b/internal/lsp/analysis/noresultvalues/testdata/src/typeparams/a.go
@@ -0,0 +1,6 @@
+package noresult
+
+func hello[T any]() {
+	var z T
+	return z // want `no result values expected|too many return values`
+}
diff --git a/internal/lsp/analysis/noresultvalues/testdata/src/typeparams/a.go.golden b/internal/lsp/analysis/noresultvalues/testdata/src/typeparams/a.go.golden
new file mode 100644
index 0000000..963e3f4
--- /dev/null
+++ b/internal/lsp/analysis/noresultvalues/testdata/src/typeparams/a.go.golden
@@ -0,0 +1,6 @@
+package noresult
+
+func hello[T any]() {
+	var z T
+	return // want `no result values expected|too many return values`
+}
diff --git a/internal/lsp/analysis/simplifyslice/simplifyslice_test.go b/internal/lsp/analysis/simplifyslice/simplifyslice_test.go
index 91db76a..cff6267 100644
--- a/internal/lsp/analysis/simplifyslice/simplifyslice_test.go
+++ b/internal/lsp/analysis/simplifyslice/simplifyslice_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/internal/lsp/analysis/simplifyslice"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.RunWithSuggestedFixes(t, testdata, simplifyslice.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.RunWithSuggestedFixes(t, testdata, simplifyslice.Analyzer, tests...)
 }
diff --git a/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go b/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..69db310
--- /dev/null
+++ b/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,39 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+//
+//go:build go1.18
+// +build go1.18
+
+package testdata
+
+type List[E any] []E
+
+// TODO(suzmue): add a test for generic slice expressions when https://github.com/golang/go/issues/48618 is closed.
+// type S interface{ ~[]int }
+
+var (
+	a [10]byte
+	b [20]float32
+	p List[int]
+
+	_ = p[0:]
+	_ = p[1:10]
+	_ = p[2:len(p)] // want "unneeded: len\\(p\\)"
+	_ = p[3:(len(p))]
+	_ = p[len(a) : len(p)-1]
+	_ = p[0:len(b)]
+	_ = p[2:len(p):len(p)]
+
+	_ = p[:]
+	_ = p[:10]
+	_ = p[:len(p)] // want "unneeded: len\\(p\\)"
+	_ = p[:(len(p))]
+	_ = p[:len(p)-1]
+	_ = p[:len(b)]
+	_ = p[:len(p):len(p)]
+)
+
+func foo[E any](a List[E]) {
+	_ = a[0:len(a)] // want "unneeded: len\\(a\\)"
+}
diff --git a/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden b/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden
new file mode 100644
index 0000000..99ca9e4
--- /dev/null
+++ b/internal/lsp/analysis/simplifyslice/testdata/src/typeparams/typeparams.go.golden
@@ -0,0 +1,39 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+//
+//go:build go1.18
+// +build go1.18
+
+package testdata
+
+type List[E any] []E
+
+// TODO(suzmue): add a test for generic slice expressions when https://github.com/golang/go/issues/48618 is closed.
+// type S interface{ ~[]int }
+
+var (
+	a [10]byte
+	b [20]float32
+	p List[int]
+
+	_ = p[0:]
+	_ = p[1:10]
+	_ = p[2:] // want "unneeded: len\\(p\\)"
+	_ = p[3:(len(p))]
+	_ = p[len(a) : len(p)-1]
+	_ = p[0:len(b)]
+	_ = p[2:len(p):len(p)]
+
+	_ = p[:]
+	_ = p[:10]
+	_ = p[:] // want "unneeded: len\\(p\\)"
+	_ = p[:(len(p))]
+	_ = p[:len(p)-1]
+	_ = p[:len(b)]
+	_ = p[:len(p):len(p)]
+)
+
+func foo[E any](a List[E]) {
+	_ = a[0:] // want "unneeded: len\\(a\\)"
+}
diff --git a/internal/lsp/analysis/stubmethods/stubmethods.go b/internal/lsp/analysis/stubmethods/stubmethods.go
new file mode 100644
index 0000000..c2a4138
--- /dev/null
+++ b/internal/lsp/analysis/stubmethods/stubmethods.go
@@ -0,0 +1,351 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package stubmethods
+
+import (
+	"bytes"
+	"fmt"
+	"go/ast"
+	"go/format"
+	"go/token"
+	"go/types"
+	"strconv"
+	"strings"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/internal/analysisinternal"
+	"golang.org/x/tools/internal/typesinternal"
+)
+
+const Doc = `stub methods analyzer
+
+This analyzer generates method stubs for concrete types
+in order to implement a target interface`
+
+var Analyzer = &analysis.Analyzer{
+	Name:             "stubmethods",
+	Doc:              Doc,
+	Requires:         []*analysis.Analyzer{inspect.Analyzer},
+	Run:              run,
+	RunDespiteErrors: true,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+	for _, err := range analysisinternal.GetTypeErrors(pass) {
+		ifaceErr := strings.Contains(err.Msg, "missing method") || strings.HasPrefix(err.Msg, "cannot convert")
+		if !ifaceErr {
+			continue
+		}
+		var file *ast.File
+		for _, f := range pass.Files {
+			if f.Pos() <= err.Pos && err.Pos < f.End() {
+				file = f
+				break
+			}
+		}
+		if file == nil {
+			continue
+		}
+		// Get the end position of the error.
+		_, _, endPos, ok := typesinternal.ReadGo116ErrorData(err)
+		if !ok {
+			var buf bytes.Buffer
+			if err := format.Node(&buf, pass.Fset, file); err != nil {
+				continue
+			}
+			endPos = analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), err.Pos)
+		}
+		path, _ := astutil.PathEnclosingInterval(file, err.Pos, endPos)
+		si := GetStubInfo(pass.TypesInfo, path, err.Pos)
+		if si == nil {
+			continue
+		}
+		qf := RelativeToFiles(si.Concrete.Obj().Pkg(), file, nil, nil)
+		pass.Report(analysis.Diagnostic{
+			Pos:     err.Pos,
+			End:     endPos,
+			Message: fmt.Sprintf("Implement %s", types.TypeString(si.Interface.Type(), qf)),
+		})
+	}
+	return nil, nil
+}
+
+// StubInfo represents a concrete type
+// that wants to stub out an interface type
+type StubInfo struct {
+	// Interface is the interface that the client wants to implement.
+	// When the interface is defined, the underlying object will be a TypeName.
+	// Note that we keep track of types.Object instead of types.Type in order
+	// to keep a reference to the declaring object's package and the ast file
+	// in the case where the concrete type file requires a new import that happens to be renamed
+	// in the interface file.
+	// TODO(marwan-at-work): implement interface literals.
+	Interface types.Object
+	Concrete  *types.Named
+	Pointer   bool
+}
+
+// GetStubInfo determines whether the "missing method error"
+// can be used to deduced what the concrete and interface types are.
+func GetStubInfo(ti *types.Info, path []ast.Node, pos token.Pos) *StubInfo {
+	for _, n := range path {
+		switch n := n.(type) {
+		case *ast.ValueSpec:
+			return fromValueSpec(ti, n, pos)
+		case *ast.ReturnStmt:
+			// An error here may not indicate a real error the user should know about, but it may.
+			// Therefore, it would be best to log it out for debugging/reporting purposes instead of ignoring
+			// it. However, event.Log takes a context which is not passed via the analysis package.
+			// TODO(marwan-at-work): properly log this error.
+			si, _ := fromReturnStmt(ti, pos, path, n)
+			return si
+		case *ast.AssignStmt:
+			return fromAssignStmt(ti, n, pos)
+		}
+	}
+	return nil
+}
+
+// fromReturnStmt analyzes a "return" statement to extract
+// a concrete type that is trying to be returned as an interface type.
+//
+// For example, func() io.Writer { return myType{} }
+// would return StubInfo with the interface being io.Writer and the concrete type being myType{}.
+func fromReturnStmt(ti *types.Info, pos token.Pos, path []ast.Node, rs *ast.ReturnStmt) (*StubInfo, error) {
+	returnIdx := -1
+	for i, r := range rs.Results {
+		if pos >= r.Pos() && pos <= r.End() {
+			returnIdx = i
+		}
+	}
+	if returnIdx == -1 {
+		return nil, fmt.Errorf("pos %d not within return statement bounds: [%d-%d]", pos, rs.Pos(), rs.End())
+	}
+	concObj, pointer := concreteType(rs.Results[returnIdx], ti)
+	if concObj == nil || concObj.Obj().Pkg() == nil {
+		return nil, nil
+	}
+	ef := enclosingFunction(path, ti)
+	if ef == nil {
+		return nil, fmt.Errorf("could not find the enclosing function of the return statement")
+	}
+	iface := ifaceType(ef.Results.List[returnIdx].Type, ti)
+	if iface == nil {
+		return nil, nil
+	}
+	return &StubInfo{
+		Concrete:  concObj,
+		Pointer:   pointer,
+		Interface: iface,
+	}, nil
+}
+
+// fromValueSpec returns *StubInfo from a variable declaration such as
+// var x io.Writer = &T{}
+func fromValueSpec(ti *types.Info, vs *ast.ValueSpec, pos token.Pos) *StubInfo {
+	var idx int
+	for i, vs := range vs.Values {
+		if pos >= vs.Pos() && pos <= vs.End() {
+			idx = i
+			break
+		}
+	}
+
+	valueNode := vs.Values[idx]
+	ifaceNode := vs.Type
+	callExp, ok := valueNode.(*ast.CallExpr)
+	// if the ValueSpec is `var _ = myInterface(...)`
+	// as opposed to `var _ myInterface = ...`
+	if ifaceNode == nil && ok && len(callExp.Args) == 1 {
+		ifaceNode = callExp.Fun
+		valueNode = callExp.Args[0]
+	}
+	concObj, pointer := concreteType(valueNode, ti)
+	if concObj == nil || concObj.Obj().Pkg() == nil {
+		return nil
+	}
+	ifaceObj := ifaceType(ifaceNode, ti)
+	if ifaceObj == nil {
+		return nil
+	}
+	return &StubInfo{
+		Concrete:  concObj,
+		Interface: ifaceObj,
+		Pointer:   pointer,
+	}
+}
+
+// fromAssignStmt returns *StubInfo from a variable re-assignment such as
+// var x io.Writer
+// x = &T{}
+func fromAssignStmt(ti *types.Info, as *ast.AssignStmt, pos token.Pos) *StubInfo {
+	idx := -1
+	var lhs, rhs ast.Expr
+	// Given a re-assignment interface conversion error,
+	// the compiler error shows up on the right hand side of the expression.
+	// For example, x = &T{} where x is io.Writer highlights the error
+	// under "&T{}" and not "x".
+	for i, hs := range as.Rhs {
+		if pos >= hs.Pos() && pos <= hs.End() {
+			idx = i
+			break
+		}
+	}
+	if idx == -1 {
+		return nil
+	}
+	// Technically, this should never happen as
+	// we would get a "cannot assign N values to M variables"
+	// before we get an interface conversion error. Nonetheless,
+	// guard against out of range index errors.
+	if idx >= len(as.Lhs) {
+		return nil
+	}
+	lhs, rhs = as.Lhs[idx], as.Rhs[idx]
+	ifaceObj := ifaceType(lhs, ti)
+	if ifaceObj == nil {
+		return nil
+	}
+	concType, pointer := concreteType(rhs, ti)
+	if concType == nil || concType.Obj().Pkg() == nil {
+		return nil
+	}
+	return &StubInfo{
+		Concrete:  concType,
+		Interface: ifaceObj,
+		Pointer:   pointer,
+	}
+}
+
+// RelativeToFiles returns a types.Qualifier that formats package names
+// according to the files where the concrete and interface types are defined.
+//
+// This is similar to types.RelativeTo except if a file imports the package with a different name,
+// then it will use it. And if the file does import the package but it is ignored,
+// then it will return the original name. It also prefers package names in ifaceFile in case
+// an import is missing from concFile but is present in ifaceFile.
+//
+// Additionally, if missingImport is not nil, the function will be called whenever the concFile
+// is presented with a package that is not imported. This is useful so that as types.TypeString is
+// formatting a function signature, it is identifying packages that will need to be imported when
+// stubbing an interface.
+func RelativeToFiles(concPkg *types.Package, concFile, ifaceFile *ast.File, missingImport func(name, path string)) types.Qualifier {
+	return func(other *types.Package) string {
+		if other == concPkg {
+			return ""
+		}
+
+		// Check if the concrete file already has the given import,
+		// if so return the default package name or the renamed import statement.
+		for _, imp := range concFile.Imports {
+			impPath, _ := strconv.Unquote(imp.Path.Value)
+			isIgnored := imp.Name != nil && (imp.Name.Name == "." || imp.Name.Name == "_")
+			if impPath == other.Path() && !isIgnored {
+				importName := other.Name()
+				if imp.Name != nil {
+					importName = imp.Name.Name
+				}
+				return importName
+			}
+		}
+
+		// If the concrete file does not have the import, check if the package
+		// is renamed in the interface file and prefer that.
+		var importName string
+		if ifaceFile != nil {
+			for _, imp := range ifaceFile.Imports {
+				impPath, _ := strconv.Unquote(imp.Path.Value)
+				isIgnored := imp.Name != nil && (imp.Name.Name == "." || imp.Name.Name == "_")
+				if impPath == other.Path() && !isIgnored {
+					if imp.Name != nil && imp.Name.Name != concPkg.Name() {
+						importName = imp.Name.Name
+					}
+					break
+				}
+			}
+		}
+
+		if missingImport != nil {
+			missingImport(importName, other.Path())
+		}
+
+		// Up until this point, importName must stay empty when calling missingImport,
+		// otherwise we'd end up with `import time "time"` which doesn't look idiomatic.
+		if importName == "" {
+			importName = other.Name()
+		}
+		return importName
+	}
+}
+
+// ifaceType will try to extract the types.Object that defines
+// the interface given the ast.Expr where the "missing method"
+// or "conversion" errors happen.
+func ifaceType(n ast.Expr, ti *types.Info) types.Object {
+	tv, ok := ti.Types[n]
+	if !ok {
+		return nil
+	}
+	typ := tv.Type
+	named, ok := typ.(*types.Named)
+	if !ok {
+		return nil
+	}
+	_, ok = named.Underlying().(*types.Interface)
+	if !ok {
+		return nil
+	}
+	// Interfaces defined in the "builtin" package return nil a Pkg().
+	// But they are still real interfaces that we need to make a special case for.
+	// Therefore, protect gopls from panicking if a new interface type was added in the future.
+	if named.Obj().Pkg() == nil && named.Obj().Name() != "error" {
+		return nil
+	}
+	return named.Obj()
+}
+
+// concreteType tries to extract the *types.Named that defines
+// the concrete type given the ast.Expr where the "missing method"
+// or "conversion" errors happened. If the concrete type is something
+// that cannot have methods defined on it (such as basic types), this
+// method will return a nil *types.Named. The second return parameter
+// is a boolean that indicates whether the concreteType was defined as a
+// pointer or value.
+func concreteType(n ast.Expr, ti *types.Info) (*types.Named, bool) {
+	tv, ok := ti.Types[n]
+	if !ok {
+		return nil, false
+	}
+	typ := tv.Type
+	ptr, isPtr := typ.(*types.Pointer)
+	if isPtr {
+		typ = ptr.Elem()
+	}
+	named, ok := typ.(*types.Named)
+	if !ok {
+		return nil, false
+	}
+	return named, isPtr
+}
+
+// enclosingFunction returns the signature and type of the function
+// enclosing the given position.
+func enclosingFunction(path []ast.Node, info *types.Info) *ast.FuncType {
+	for _, node := range path {
+		switch t := node.(type) {
+		case *ast.FuncDecl:
+			if _, ok := info.Defs[t.Name]; ok {
+				return t.Type
+			}
+		case *ast.FuncLit:
+			if _, ok := info.Types[t]; ok {
+				return t.Type
+			}
+		}
+	}
+	return nil
+}
diff --git a/internal/lsp/analysis/undeclaredname/testdata/src/a/channels.go b/internal/lsp/analysis/undeclaredname/testdata/src/a/channels.go
new file mode 100644
index 0000000..ecf00ec
--- /dev/null
+++ b/internal/lsp/analysis/undeclaredname/testdata/src/a/channels.go
@@ -0,0 +1,13 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package undeclared
+
+func channels(s string) {
+	undefinedChannels(c()) // want "undeclared name: undefinedChannels"
+}
+
+func c() (<-chan string, chan string) {
+	return make(<-chan string), make(chan string)
+}
diff --git a/internal/lsp/analysis/undeclaredname/testdata/src/a/consecutive_params.go b/internal/lsp/analysis/undeclaredname/testdata/src/a/consecutive_params.go
new file mode 100644
index 0000000..ab7b2ba
--- /dev/null
+++ b/internal/lsp/analysis/undeclaredname/testdata/src/a/consecutive_params.go
@@ -0,0 +1,10 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package undeclared
+
+func consecutiveParams() {
+	var s string
+	undefinedConsecutiveParams(s, s) // want "undeclared name: undefinedConsecutiveParams"
+}
diff --git a/internal/lsp/analysis/undeclaredname/testdata/src/a/error_param.go b/internal/lsp/analysis/undeclaredname/testdata/src/a/error_param.go
new file mode 100644
index 0000000..341a9d2
--- /dev/null
+++ b/internal/lsp/analysis/undeclaredname/testdata/src/a/error_param.go
@@ -0,0 +1,10 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package undeclared
+
+func errorParam() {
+	var err error
+	undefinedErrorParam(err) // want "undeclared name: undefinedErrorParam"
+}
diff --git a/internal/lsp/analysis/undeclaredname/testdata/src/a/literals.go b/internal/lsp/analysis/undeclaredname/testdata/src/a/literals.go
new file mode 100644
index 0000000..ab82463
--- /dev/null
+++ b/internal/lsp/analysis/undeclaredname/testdata/src/a/literals.go
@@ -0,0 +1,11 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package undeclared
+
+type T struct{}
+
+func literals() {
+	undefinedLiterals("hey compiler", T{}, &T{}) // want "undeclared name: undefinedLiterals"
+}
diff --git a/internal/lsp/analysis/undeclaredname/testdata/src/a/operation.go b/internal/lsp/analysis/undeclaredname/testdata/src/a/operation.go
new file mode 100644
index 0000000..9a54382
--- /dev/null
+++ b/internal/lsp/analysis/undeclaredname/testdata/src/a/operation.go
@@ -0,0 +1,11 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package undeclared
+
+import "time"
+
+func operation() {
+	undefinedOperation(10 * time.Second) // want "undeclared name: undefinedOperation"
+}
diff --git a/internal/lsp/analysis/undeclaredname/testdata/src/a/selector.go b/internal/lsp/analysis/undeclaredname/testdata/src/a/selector.go
new file mode 100644
index 0000000..9ed09a2
--- /dev/null
+++ b/internal/lsp/analysis/undeclaredname/testdata/src/a/selector.go
@@ -0,0 +1,10 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package undeclared
+
+func selector() {
+	m := map[int]bool{}
+	undefinedSelector(m[1]) // want "undeclared name: undefinedSelector"
+}
diff --git a/internal/lsp/analysis/undeclaredname/testdata/src/a/slice.go b/internal/lsp/analysis/undeclaredname/testdata/src/a/slice.go
new file mode 100644
index 0000000..d741c68
--- /dev/null
+++ b/internal/lsp/analysis/undeclaredname/testdata/src/a/slice.go
@@ -0,0 +1,9 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package undeclared
+
+func slice() {
+	undefinedSlice([]int{1, 2}) // want "undeclared name: undefinedSlice"
+}
diff --git a/internal/lsp/analysis/undeclaredname/testdata/src/a/tuple.go b/internal/lsp/analysis/undeclaredname/testdata/src/a/tuple.go
new file mode 100644
index 0000000..3148e8f
--- /dev/null
+++ b/internal/lsp/analysis/undeclaredname/testdata/src/a/tuple.go
@@ -0,0 +1,13 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package undeclared
+
+func tuple() {
+	undefinedTuple(b()) // want "undeclared name: undefinedTuple"
+}
+
+func b() (string, error) {
+	return "", nil
+}
diff --git a/internal/lsp/analysis/undeclaredname/testdata/src/a/unique_params.go b/internal/lsp/analysis/undeclaredname/testdata/src/a/unique_params.go
new file mode 100644
index 0000000..98f77a4
--- /dev/null
+++ b/internal/lsp/analysis/undeclaredname/testdata/src/a/unique_params.go
@@ -0,0 +1,11 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package undeclared
+
+func uniqueArguments() {
+	var s string
+	var i int
+	undefinedUniqueArguments(s, i, s) // want "undeclared name: undefinedUniqueArguments"
+}
diff --git a/internal/lsp/analysis/undeclaredname/undeclared.go b/internal/lsp/analysis/undeclaredname/undeclared.go
index df24d1d..22b552c 100644
--- a/internal/lsp/analysis/undeclaredname/undeclared.go
+++ b/internal/lsp/analysis/undeclaredname/undeclared.go
@@ -10,9 +10,11 @@
 	"bytes"
 	"fmt"
 	"go/ast"
+	"go/format"
 	"go/token"
 	"go/types"
 	"strings"
+	"unicode"
 
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/ast/astutil"
@@ -23,8 +25,17 @@
 const Doc = `suggested fixes for "undeclared name: <>"
 
 This checker provides suggested fixes for type errors of the
-type "undeclared name: <>". It will insert a new statement:
-"<> := ".`
+type "undeclared name: <>". It will either insert a new statement,
+such as:
+
+"<> := "
+
+or a new function declaration, such as:
+
+func <>(inferred parameters) {
+	panic("implement me!")
+}
+`
 
 var Analyzer = &analysis.Analyzer{
 	Name:             string(analysisinternal.UndeclaredName),
@@ -38,69 +49,94 @@
 
 func run(pass *analysis.Pass) (interface{}, error) {
 	for _, err := range analysisinternal.GetTypeErrors(pass) {
-		if !FixesError(err.Msg) {
-			continue
-		}
-		name := strings.TrimPrefix(err.Msg, undeclaredNamePrefix)
-		var file *ast.File
-		for _, f := range pass.Files {
-			if f.Pos() <= err.Pos && err.Pos < f.End() {
-				file = f
-				break
-			}
-		}
-		if file == nil {
-			continue
-		}
-
-		// Get the path for the relevant range.
-		path, _ := astutil.PathEnclosingInterval(file, err.Pos, err.Pos)
-		if len(path) < 2 {
-			continue
-		}
-		ident, ok := path[0].(*ast.Ident)
-		if !ok || ident.Name != name {
-			continue
-		}
-		// Skip selector expressions because it might be too complex
-		// to try and provide a suggested fix for fields and methods.
-		if _, ok := path[1].(*ast.SelectorExpr); ok {
-			continue
-		}
-		// TODO(golang.org/issue/34644): Handle call expressions with suggested
-		// fixes to create a function.
-		if _, ok := path[1].(*ast.CallExpr); ok {
-			continue
-		}
-		tok := pass.Fset.File(file.Pos())
-		if tok == nil {
-			continue
-		}
-		offset := pass.Fset.Position(err.Pos).Offset
-		end := tok.Pos(offset + len(name))
-		pass.Report(analysis.Diagnostic{
-			Pos:     err.Pos,
-			End:     end,
-			Message: err.Msg,
-		})
+		runForError(pass, err)
 	}
 	return nil, nil
 }
 
-func SuggestedFix(fset *token.FileSet, rng span.Range, content []byte, file *ast.File, _ *types.Package, _ *types.Info) (*analysis.SuggestedFix, error) {
+func runForError(pass *analysis.Pass, err types.Error) {
+	if !strings.HasPrefix(err.Msg, undeclaredNamePrefix) {
+		return
+	}
+	name := strings.TrimPrefix(err.Msg, undeclaredNamePrefix)
+	var file *ast.File
+	for _, f := range pass.Files {
+		if f.Pos() <= err.Pos && err.Pos < f.End() {
+			file = f
+			break
+		}
+	}
+	if file == nil {
+		return
+	}
+
+	// Get the path for the relevant range.
+	path, _ := astutil.PathEnclosingInterval(file, err.Pos, err.Pos)
+	if len(path) < 2 {
+		return
+	}
+	ident, ok := path[0].(*ast.Ident)
+	if !ok || ident.Name != name {
+		return
+	}
+
+	// Undeclared quick fixes only work in function bodies.
+	inFunc := false
+	for i := range path {
+		if _, inFunc = path[i].(*ast.FuncDecl); inFunc {
+			if i == 0 {
+				return
+			}
+			if _, isBody := path[i-1].(*ast.BlockStmt); !isBody {
+				return
+			}
+			break
+		}
+	}
+	if !inFunc {
+		return
+	}
+	// Skip selector expressions because it might be too complex
+	// to try and provide a suggested fix for fields and methods.
+	if _, ok := path[1].(*ast.SelectorExpr); ok {
+		return
+	}
+	tok := pass.Fset.File(file.Pos())
+	if tok == nil {
+		return
+	}
+	offset := pass.Fset.Position(err.Pos).Offset
+	end := tok.Pos(offset + len(name))
+	pass.Report(analysis.Diagnostic{
+		Pos:     err.Pos,
+		End:     end,
+		Message: err.Msg,
+	})
+}
+
+func SuggestedFix(fset *token.FileSet, rng span.Range, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
 	pos := rng.Start // don't use the end
 	path, _ := astutil.PathEnclosingInterval(file, pos, pos)
 	if len(path) < 2 {
-		return nil, fmt.Errorf("")
+		return nil, fmt.Errorf("no expression found")
 	}
 	ident, ok := path[0].(*ast.Ident)
 	if !ok {
-		return nil, fmt.Errorf("")
+		return nil, fmt.Errorf("no identifier found")
 	}
+
+	// Check for a possible call expression, in which case we should add a
+	// new function declaration.
+	if len(path) > 1 {
+		if _, ok := path[1].(*ast.CallExpr); ok {
+			return newFunctionDeclaration(path, file, pkg, info, fset)
+		}
+	}
+
 	// Get the place to insert the new statement.
 	insertBeforeStmt := analysisinternal.StmtToInsertVarBefore(path)
 	if insertBeforeStmt == nil {
-		return nil, fmt.Errorf("")
+		return nil, fmt.Errorf("could not locate insertion point")
 	}
 
 	insertBefore := fset.Position(insertBeforeStmt.Pos()).Offset
@@ -111,6 +147,7 @@
 	if nl := bytes.LastIndex(contentBeforeStmt, []byte("\n")); nl != -1 {
 		indent = string(contentBeforeStmt[nl:])
 	}
+
 	// Create the new local variable statement.
 	newStmt := fmt.Sprintf("%s := %s", ident.Name, indent)
 	return &analysis.SuggestedFix{
@@ -123,6 +160,181 @@
 	}, nil
 }
 
-func FixesError(msg string) bool {
-	return strings.HasPrefix(msg, undeclaredNamePrefix)
+func newFunctionDeclaration(path []ast.Node, file *ast.File, pkg *types.Package, info *types.Info, fset *token.FileSet) (*analysis.SuggestedFix, error) {
+	if len(path) < 3 {
+		return nil, fmt.Errorf("unexpected set of enclosing nodes: %v", path)
+	}
+	ident, ok := path[0].(*ast.Ident)
+	if !ok {
+		return nil, fmt.Errorf("no name for function declaration %v (%T)", path[0], path[0])
+	}
+	call, ok := path[1].(*ast.CallExpr)
+	if !ok {
+		return nil, fmt.Errorf("no call expression found %v (%T)", path[1], path[1])
+	}
+
+	// Find the enclosing function, so that we can add the new declaration
+	// below.
+	var enclosing *ast.FuncDecl
+	for _, n := range path {
+		if n, ok := n.(*ast.FuncDecl); ok {
+			enclosing = n
+			break
+		}
+	}
+	// TODO(rstambler): Support the situation when there is no enclosing
+	// function.
+	if enclosing == nil {
+		return nil, fmt.Errorf("no enclosing function found: %v", path)
+	}
+
+	pos := enclosing.End()
+
+	var paramNames []string
+	var paramTypes []types.Type
+	// keep track of all param names to later ensure uniqueness
+	nameCounts := map[string]int{}
+	for _, arg := range call.Args {
+		typ := info.TypeOf(arg)
+		if typ == nil {
+			return nil, fmt.Errorf("unable to determine type for %s", arg)
+		}
+
+		switch t := typ.(type) {
+		// this is the case where another function call returning multiple
+		// results is used as an argument
+		case *types.Tuple:
+			n := t.Len()
+			for i := 0; i < n; i++ {
+				name := typeToArgName(t.At(i).Type())
+				nameCounts[name]++
+
+				paramNames = append(paramNames, name)
+				paramTypes = append(paramTypes, types.Default(t.At(i).Type()))
+			}
+
+		default:
+			// does the argument have a name we can reuse?
+			// only happens in case of a *ast.Ident
+			var name string
+			if ident, ok := arg.(*ast.Ident); ok {
+				name = ident.Name
+			}
+
+			if name == "" {
+				name = typeToArgName(typ)
+			}
+
+			nameCounts[name]++
+
+			paramNames = append(paramNames, name)
+			paramTypes = append(paramTypes, types.Default(typ))
+		}
+	}
+
+	for n, c := range nameCounts {
+		// Any names we saw more than once will need a unique suffix added
+		// on. Reset the count to 1 to act as the suffix for the first
+		// occurrence of that name.
+		if c >= 2 {
+			nameCounts[n] = 1
+		} else {
+			delete(nameCounts, n)
+		}
+	}
+
+	params := &ast.FieldList{}
+
+	for i, name := range paramNames {
+		if suffix, repeats := nameCounts[name]; repeats {
+			nameCounts[name]++
+			name = fmt.Sprintf("%s%d", name, suffix)
+		}
+
+		// only worth checking after previous param in the list
+		if i > 0 {
+			// if type of parameter at hand is the same as the previous one,
+			// add it to the previous param list of identifiers so to have:
+			//  (s1, s2 string)
+			// and not
+			//  (s1 string, s2 string)
+			if paramTypes[i] == paramTypes[i-1] {
+				params.List[len(params.List)-1].Names = append(params.List[len(params.List)-1].Names, ast.NewIdent(name))
+				continue
+			}
+		}
+
+		params.List = append(params.List, &ast.Field{
+			Names: []*ast.Ident{
+				ast.NewIdent(name),
+			},
+			Type: analysisinternal.TypeExpr(fset, file, pkg, paramTypes[i]),
+		})
+	}
+
+	decl := &ast.FuncDecl{
+		Name: ast.NewIdent(ident.Name),
+		Type: &ast.FuncType{
+			Params: params,
+			// TODO(rstambler): Also handle result parameters here.
+		},
+		Body: &ast.BlockStmt{
+			List: []ast.Stmt{
+				&ast.ExprStmt{
+					X: &ast.CallExpr{
+						Fun: ast.NewIdent("panic"),
+						Args: []ast.Expr{
+							&ast.BasicLit{
+								Value: `"unimplemented"`,
+							},
+						},
+					},
+				},
+			},
+		},
+	}
+
+	b := bytes.NewBufferString("\n\n")
+	if err := format.Node(b, fset, decl); err != nil {
+		return nil, err
+	}
+	return &analysis.SuggestedFix{
+		Message: fmt.Sprintf("Create function \"%s\"", ident.Name),
+		TextEdits: []analysis.TextEdit{{
+			Pos:     pos,
+			End:     pos,
+			NewText: b.Bytes(),
+		}},
+	}, nil
+}
+func typeToArgName(ty types.Type) string {
+	s := types.Default(ty).String()
+
+	switch t := ty.(type) {
+	case *types.Basic:
+		// use first letter in type name for basic types
+		return s[0:1]
+	case *types.Slice:
+		// use element type to decide var name for slices
+		return typeToArgName(t.Elem())
+	case *types.Array:
+		// use element type to decide var name for arrays
+		return typeToArgName(t.Elem())
+	case *types.Chan:
+		return "ch"
+	}
+
+	s = strings.TrimFunc(s, func(r rune) bool {
+		return !unicode.IsLetter(r)
+	})
+
+	if s == "error" {
+		return "err"
+	}
+
+	// remove package (if present)
+	// and make first letter lowercase
+	a := []rune(s[strings.LastIndexByte(s, '.')+1:])
+	a[0] = unicode.ToLower(a[0])
+	return string(a)
 }
diff --git a/internal/lsp/analysis/unusedparams/testdata/src/a/a.go b/internal/lsp/analysis/unusedparams/testdata/src/a/a.go
index 248ecfc..23e4122 100644
--- a/internal/lsp/analysis/unusedparams/testdata/src/a/a.go
+++ b/internal/lsp/analysis/unusedparams/testdata/src/a/a.go
@@ -1,4 +1,4 @@
-// Copyright 2020 The Go Authors. All rights reserved.
+// Copyright 2022 The Go Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
diff --git a/internal/lsp/analysis/unusedparams/testdata/src/a/a.go.golden b/internal/lsp/analysis/unusedparams/testdata/src/a/a.go.golden
new file mode 100644
index 0000000..e28a6bd
--- /dev/null
+++ b/internal/lsp/analysis/unusedparams/testdata/src/a/a.go.golden
@@ -0,0 +1,55 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package a
+
+import (
+	"bytes"
+	"fmt"
+	"net/http"
+)
+
+type parent interface {
+	n(f bool)
+}
+
+type yuh struct {
+	a int
+}
+
+func (y *yuh) n(f bool) {
+	for i := 0; i < 10; i++ {
+		fmt.Println(i)
+	}
+}
+
+func a(i1 int, _ int, i3 int) int { // want "potentially unused parameter: 'i2'"
+	i3 += i1
+	_ = func(_ int) int { // want "potentially unused parameter: 'z'"
+		_ = 1
+		return 1
+	}
+	return i3
+}
+
+func b(_ bytes.Buffer) { // want "potentially unused parameter: 'c'"
+	_ = 1
+}
+
+func z(_ http.ResponseWriter, _ *http.Request) { // want "potentially unused parameter: 'h'"
+	fmt.Println("Before")
+}
+
+func l(h http.Handler) http.Handler {
+	return http.HandlerFunc(z)
+}
+
+func mult(a, _ int) int { // want "potentially unused parameter: 'b'"
+	a += 1
+	return a
+}
+
+func y(a int) {
+	panic("yo")
+}
diff --git a/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go b/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go
new file mode 100644
index 0000000..93af268
--- /dev/null
+++ b/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go
@@ -0,0 +1,55 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import (
+	"bytes"
+	"fmt"
+	"net/http"
+)
+
+type parent[T any] interface {
+	n(f T)
+}
+
+type yuh[T any] struct {
+	a T
+}
+
+func (y *yuh[int]) n(f bool) {
+	for i := 0; i < 10; i++ {
+		fmt.Println(i)
+	}
+}
+
+func a[T comparable](i1 int, i2 T, i3 int) int { // want "potentially unused parameter: 'i2'"
+	i3 += i1
+	_ = func(z int) int { // want "potentially unused parameter: 'z'"
+		_ = 1
+		return 1
+	}
+	return i3
+}
+
+func b[T any](c bytes.Buffer) { // want "potentially unused parameter: 'c'"
+	_ = 1
+}
+
+func z[T http.ResponseWriter](h T, _ *http.Request) { // want "potentially unused parameter: 'h'"
+	fmt.Println("Before")
+}
+
+func l(h http.Handler) http.Handler {
+	return http.HandlerFunc(z[http.ResponseWriter])
+}
+
+func mult(a, b int) int { // want "potentially unused parameter: 'b'"
+	a += 1
+	return a
+}
+
+func y[T any](a T) {
+	panic("yo")
+}
diff --git a/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden b/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden
new file mode 100644
index 0000000..c86bf28
--- /dev/null
+++ b/internal/lsp/analysis/unusedparams/testdata/src/typeparams/typeparams.go.golden
@@ -0,0 +1,55 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import (
+	"bytes"
+	"fmt"
+	"net/http"
+)
+
+type parent[T any] interface {
+	n(f T)
+}
+
+type yuh[T any] struct {
+	a T
+}
+
+func (y *yuh[int]) n(f bool) {
+	for i := 0; i < 10; i++ {
+		fmt.Println(i)
+	}
+}
+
+func a[T comparable](i1 int, _ T, i3 int) int { // want "potentially unused parameter: 'i2'"
+	i3 += i1
+	_ = func(_ int) int { // want "potentially unused parameter: 'z'"
+		_ = 1
+		return 1
+	}
+	return i3
+}
+
+func b[T any](_ bytes.Buffer) { // want "potentially unused parameter: 'c'"
+	_ = 1
+}
+
+func z[T http.ResponseWriter](_ T, _ *http.Request) { // want "potentially unused parameter: 'h'"
+	fmt.Println("Before")
+}
+
+func l(h http.Handler) http.Handler {
+	return http.HandlerFunc(z[http.ResponseWriter])
+}
+
+func mult(a, _ int) int { // want "potentially unused parameter: 'b'"
+	a += 1
+	return a
+}
+
+func y[T any](a T) {
+	panic("yo")
+}
diff --git a/internal/lsp/analysis/unusedparams/unusedparams.go b/internal/lsp/analysis/unusedparams/unusedparams.go
index e6f2274..4c933c8 100644
--- a/internal/lsp/analysis/unusedparams/unusedparams.go
+++ b/internal/lsp/analysis/unusedparams/unusedparams.go
@@ -131,13 +131,20 @@
 				start, end = u.ident.Pos(), u.ident.End()
 			}
 			// TODO(golang/go#36602): Add suggested fixes to automatically
-			// remove the unused parameter. To start, just remove it from the
-			// function declaration. Later, remove it from every use of this
+			// remove the unused parameter from every use of this
 			// function.
 			pass.Report(analysis.Diagnostic{
 				Pos:     start,
 				End:     end,
 				Message: fmt.Sprintf("potentially unused parameter: '%s'", u.ident.Name),
+				SuggestedFixes: []analysis.SuggestedFix{{
+					Message: `Replace with "_"`,
+					TextEdits: []analysis.TextEdit{{
+						Pos:     u.ident.Pos(),
+						End:     u.ident.End(),
+						NewText: []byte("_"),
+					}},
+				}},
 			})
 		}
 	})
diff --git a/internal/lsp/analysis/unusedparams/unusedparams_test.go b/internal/lsp/analysis/unusedparams/unusedparams_test.go
index 907f71c..dff17c9 100644
--- a/internal/lsp/analysis/unusedparams/unusedparams_test.go
+++ b/internal/lsp/analysis/unusedparams/unusedparams_test.go
@@ -9,9 +9,14 @@
 
 	"golang.org/x/tools/go/analysis/analysistest"
 	"golang.org/x/tools/internal/lsp/analysis/unusedparams"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 func Test(t *testing.T) {
 	testdata := analysistest.TestData()
-	analysistest.Run(t, testdata, unusedparams.Analyzer, "a")
+	tests := []string{"a"}
+	if typeparams.Enabled {
+		tests = append(tests, "typeparams")
+	}
+	analysistest.RunWithSuggestedFixes(t, testdata, unusedparams.Analyzer, tests...)
 }
diff --git a/internal/lsp/analysis/useany/testdata/src/a/a.go b/internal/lsp/analysis/useany/testdata/src/a/a.go
new file mode 100644
index 0000000..22d6931
--- /dev/null
+++ b/internal/lsp/analysis/useany/testdata/src/a/a.go
@@ -0,0 +1,25 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the useany checker.
+
+package a
+
+type Any interface{}
+
+func _[T interface{}]()                    {} // want "could use \"any\" for this empty interface"
+func _[X any, T interface{}]()             {} // want "could use \"any\" for this empty interface"
+func _[any interface{}]()                  {} // want "could use \"any\" for this empty interface"
+func _[T Any]()                            {} // want "could use \"any\" for this empty interface"
+func _[T interface{ int | interface{} }]() {} // want "could use \"any\" for this empty interface"
+func _[T interface{ int | Any }]()         {} // want "could use \"any\" for this empty interface"
+func _[T any]()                            {}
+
+type _[T interface{}] int                    // want "could use \"any\" for this empty interface"
+type _[X any, T interface{}] int             // want "could use \"any\" for this empty interface"
+type _[any interface{}] int                  // want "could use \"any\" for this empty interface"
+type _[T Any] int                            // want "could use \"any\" for this empty interface"
+type _[T interface{ int | interface{} }] int // want "could use \"any\" for this empty interface"
+type _[T interface{ int | Any }] int         // want "could use \"any\" for this empty interface"
+type _[T any] int
diff --git a/internal/lsp/analysis/useany/testdata/src/a/a.go.golden b/internal/lsp/analysis/useany/testdata/src/a/a.go.golden
new file mode 100644
index 0000000..efd8fd6
--- /dev/null
+++ b/internal/lsp/analysis/useany/testdata/src/a/a.go.golden
@@ -0,0 +1,25 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the useany checker.
+
+package a
+
+type Any interface{}
+
+func _[T any]()           {} // want "could use \"any\" for this empty interface"
+func _[X any, T any]()    {} // want "could use \"any\" for this empty interface"
+func _[any interface{}]() {} // want "could use \"any\" for this empty interface"
+func _[T any]()           {} // want "could use \"any\" for this empty interface"
+func _[T any]()           {} // want "could use \"any\" for this empty interface"
+func _[T any]()           {} // want "could use \"any\" for this empty interface"
+func _[T any]()           {}
+
+type _[T any] int           // want "could use \"any\" for this empty interface"
+type _[X any, T any] int    // want "could use \"any\" for this empty interface"
+type _[any interface{}] int // want "could use \"any\" for this empty interface"
+type _[T any] int           // want "could use \"any\" for this empty interface"
+type _[T any] int           // want "could use \"any\" for this empty interface"
+type _[T any] int           // want "could use \"any\" for this empty interface"
+type _[T any] int
diff --git a/internal/lsp/analysis/useany/useany.go b/internal/lsp/analysis/useany/useany.go
new file mode 100644
index 0000000..73e2f76
--- /dev/null
+++ b/internal/lsp/analysis/useany/useany.go
@@ -0,0 +1,102 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package useany defines an Analyzer that checks for usage of interface{} in
+// constraints, rather than the predeclared any.
+package useany
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"go/types"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
+)
+
+const Doc = `check for constraints that could be simplified to "any"`
+
+var Analyzer = &analysis.Analyzer{
+	Name:     "useany",
+	Doc:      Doc,
+	Requires: []*analysis.Analyzer{inspect.Analyzer},
+	Run:      run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+	inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+	universeAny := types.Universe.Lookup("any")
+	if universeAny == nil {
+		// Go <= 1.17. Nothing to check.
+		return nil, nil
+	}
+
+	nodeFilter := []ast.Node{
+		(*ast.TypeSpec)(nil),
+		(*ast.FuncType)(nil),
+	}
+
+	inspect.Preorder(nodeFilter, func(node ast.Node) {
+		var tparams *ast.FieldList
+		switch node := node.(type) {
+		case *ast.TypeSpec:
+			tparams = typeparams.ForTypeSpec(node)
+		case *ast.FuncType:
+			tparams = typeparams.ForFuncType(node)
+		default:
+			panic(fmt.Sprintf("unexpected node type %T", node))
+		}
+		if tparams.NumFields() == 0 {
+			return
+		}
+
+		for _, field := range tparams.List {
+			typ := pass.TypesInfo.Types[field.Type].Type
+			if typ == nil {
+				continue // something is wrong, but not our concern
+			}
+			iface, ok := typ.Underlying().(*types.Interface)
+			if !ok {
+				continue // invalid constraint
+			}
+
+			// If the constraint is the empty interface, offer a fix to use 'any'
+			// instead.
+			if iface.Empty() {
+				id, _ := field.Type.(*ast.Ident)
+				if id != nil && pass.TypesInfo.Uses[id] == universeAny {
+					continue
+				}
+
+				diag := analysis.Diagnostic{
+					Pos:     field.Type.Pos(),
+					End:     field.Type.End(),
+					Message: `could use "any" for this empty interface`,
+				}
+
+				// Only suggest a fix to 'any' if we actually resolve the predeclared
+				// any in this scope.
+				if scope := pass.TypesInfo.Scopes[node]; scope != nil {
+					if _, any := scope.LookupParent("any", token.NoPos); any == universeAny {
+						diag.SuggestedFixes = []analysis.SuggestedFix{{
+							Message: `use "any"`,
+							TextEdits: []analysis.TextEdit{{
+								Pos:     field.Type.Pos(),
+								End:     field.Type.End(),
+								NewText: []byte("any"),
+							}},
+						}}
+					}
+				}
+
+				pass.Report(diag)
+			}
+		}
+	})
+	return nil, nil
+}
diff --git a/internal/lsp/analysis/useany/useany_test.go b/internal/lsp/analysis/useany/useany_test.go
new file mode 100644
index 0000000..535d915
--- /dev/null
+++ b/internal/lsp/analysis/useany/useany_test.go
@@ -0,0 +1,21 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package useany_test
+
+import (
+	"testing"
+
+	"golang.org/x/tools/go/analysis/analysistest"
+	"golang.org/x/tools/internal/lsp/analysis/useany"
+	"golang.org/x/tools/internal/typeparams"
+)
+
+func Test(t *testing.T) {
+	if !typeparams.Enabled {
+		t.Skip("type params are not enabled")
+	}
+	testdata := analysistest.TestData()
+	analysistest.RunWithSuggestedFixes(t, testdata, useany.Analyzer, "a")
+}
diff --git a/internal/lsp/cache/analysis.go b/internal/lsp/cache/analysis.go
index baaad5a..d66a3ed 100644
--- a/internal/lsp/cache/analysis.go
+++ b/internal/lsp/cache/analysis.go
@@ -46,7 +46,9 @@
 	for _, ah := range roots {
 		diagnostics, _, err := ah.analyze(ctx, s)
 		if err != nil {
-			return nil, err
+			// Keep going if a single analyzer failed.
+			event.Error(ctx, fmt.Sprintf("analyzer %q failed", ah.analyzer.Name), err)
+			continue
 		}
 		results = append(results, diagnostics...)
 	}
@@ -388,7 +390,7 @@
 func factType(fact analysis.Fact) reflect.Type {
 	t := reflect.TypeOf(fact)
 	if t.Kind() != reflect.Ptr {
-		panic(fmt.Sprintf("invalid Fact type: got %T, want pointer", t))
+		panic(fmt.Sprintf("invalid Fact type: got %T, want pointer", fact))
 	}
 	return t
 }
diff --git a/internal/lsp/cache/cache.go b/internal/lsp/cache/cache.go
index be03e63..ac670b5 100644
--- a/internal/lsp/cache/cache.go
+++ b/internal/lsp/cache/cache.go
@@ -168,10 +168,6 @@
 	return h.uri
 }
 
-func (h *fileHandle) Kind() source.FileKind {
-	return source.DetectLanguage("", h.uri.Filename())
-}
-
 func (h *fileHandle) Hash() string {
 	return h.hash
 }
@@ -180,7 +176,6 @@
 	return source.FileIdentity{
 		URI:  h.uri,
 		Hash: h.hash,
-		Kind: h.Kind(),
 	}
 }
 
diff --git a/internal/lsp/cache/check.go b/internal/lsp/cache/check.go
index 2eb2d1e..f166863 100644
--- a/internal/lsp/cache/check.go
+++ b/internal/lsp/cache/check.go
@@ -12,6 +12,7 @@
 	"go/types"
 	"path"
 	"path/filepath"
+	"regexp"
 	"sort"
 	"strings"
 	"sync"
@@ -193,7 +194,7 @@
 		depKeys = append(depKeys, depHandle.key)
 	}
 	experimentalKey := s.View().Options().ExperimentalPackageCacheKey
-	ph.key = checkPackageKey(ph.m.ID, compiledGoFiles, m.Config, depKeys, mode, experimentalKey)
+	ph.key = checkPackageKey(ph.m.ID, compiledGoFiles, m, depKeys, mode, experimentalKey)
 	return ph, deps, nil
 }
 
@@ -213,15 +214,18 @@
 	return source.ParseExported
 }
 
-func checkPackageKey(id PackageID, pghs []*parseGoHandle, cfg *packages.Config, deps []packageHandleKey, mode source.ParseMode, experimentalKey bool) packageHandleKey {
+func checkPackageKey(id PackageID, pghs []*parseGoHandle, m *KnownMetadata, deps []packageHandleKey, mode source.ParseMode, experimentalKey bool) packageHandleKey {
 	b := bytes.NewBuffer(nil)
 	b.WriteString(string(id))
+	if m.Module != nil {
+		b.WriteString(m.Module.GoVersion) // go version affects type check errors.
+	}
 	if !experimentalKey {
 		// cfg was used to produce the other hashed inputs (package ID, parsed Go
 		// files, and deps). It should not otherwise affect the inputs to the type
 		// checker, so this experiment omits it. This should increase cache hits on
 		// the daemon as cfg contains the environment and working directory.
-		b.WriteString(hashConfig(cfg))
+		b.WriteString(hashConfig(m.Config))
 	}
 	b.WriteByte(byte(mode))
 	for _, dep := range deps {
@@ -420,6 +424,8 @@
 	return pkg, nil
 }
 
+var goVersionRx = regexp.MustCompile(`^go([1-9][0-9]*)\.(0|[1-9][0-9]*)$`)
+
 func doTypeCheck(ctx context.Context, snapshot *snapshot, m *Metadata, mode source.ParseMode, deps map[PackagePath]*packageHandle, astFilter *unexportedFilter) (*pkg, error) {
 	ctx, done := event.Start(ctx, "cache.typeCheck", tag.Package.Of(string(m.ID)))
 	defer done()
@@ -439,7 +445,7 @@
 		},
 		typesSizes: m.TypesSizes,
 	}
-	typeparams.InitInferred(pkg.typesInfo)
+	typeparams.InitInstanceInfo(pkg.typesInfo)
 
 	for _, gf := range pkg.m.GoFiles {
 		// In the presence of line directives, we may need to report errors in
@@ -515,6 +521,15 @@
 			return depPkg.types, nil
 		}),
 	}
+	if pkg.m.Module != nil && pkg.m.Module.GoVersion != "" {
+		goVersion := "go" + pkg.m.Module.GoVersion
+		// types.NewChecker panics if GoVersion is invalid. An unparsable mod
+		// file should probably stop us before we get here, but double check
+		// just in case.
+		if goVersionRx.MatchString(goVersion) {
+			typesinternal.SetGoVersion(cfg, goVersion)
+		}
+	}
 
 	if mode != source.ParseFull {
 		cfg.DisableUnusedImportCheck = true
diff --git a/internal/lsp/cache/errors.go b/internal/lsp/cache/errors.go
index 3f58d67..e9a86de 100644
--- a/internal/lsp/cache/errors.go
+++ b/internal/lsp/cache/errors.go
@@ -101,6 +101,7 @@
 }
 
 var importErrorRe = regexp.MustCompile(`could not import ([^\s]+)`)
+var unsupportedFeatureRe = regexp.MustCompile(`.*require.* go(\d+\.\d+) or later`)
 
 func typeErrorDiagnostics(snapshot *snapshot, pkg *pkg, e extendedError) ([]*source.Diagnostic, error) {
 	code, spn, err := typeErrorData(snapshot.FileSet(), pkg, e.primary)
@@ -145,6 +146,12 @@
 			return nil, err
 		}
 	}
+	if match := unsupportedFeatureRe.FindStringSubmatch(e.primary.Msg); match != nil {
+		diag.SuggestedFixes, err = editGoDirectiveQuickFix(snapshot, spn.URI(), match[1])
+		if err != nil {
+			return nil, err
+		}
+	}
 	return []*source.Diagnostic{diag}, nil
 }
 
@@ -165,6 +172,22 @@
 	return []source.SuggestedFix{source.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, nil
 }
 
+func editGoDirectiveQuickFix(snapshot *snapshot, uri span.URI, version string) ([]source.SuggestedFix, error) {
+	// Go mod edit only supports module mode.
+	if snapshot.workspaceMode()&moduleMode == 0 {
+		return nil, nil
+	}
+	title := fmt.Sprintf("go mod edit -go=%s", version)
+	cmd, err := command.NewEditGoDirectiveCommand(title, command.EditGoDirectiveArgs{
+		URI:     protocol.URIFromSpanURI(uri),
+		Version: version,
+	})
+	if err != nil {
+		return nil, err
+	}
+	return []source.SuggestedFix{source.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, nil
+}
+
 func analysisDiagnosticDiagnostics(snapshot *snapshot, pkg *pkg, a *analysis.Analyzer, e *analysis.Diagnostic) ([]*source.Diagnostic, error) {
 	var srcAnalyzer *source.Analyzer
 	// Find the analyzer that generated this diagnostic.
@@ -233,6 +256,9 @@
 // onlyDeletions returns true if all of the suggested fixes are deletions.
 func onlyDeletions(fixes []source.SuggestedFix) bool {
 	for _, fix := range fixes {
+		if fix.Command != nil {
+			return false
+		}
 		for _, edits := range fix.Edits {
 			for _, edit := range edits {
 				if edit.NewText != "" {
diff --git a/internal/lsp/cache/imports.go b/internal/lsp/cache/imports.go
index ed9919f..01a2468 100644
--- a/internal/lsp/cache/imports.go
+++ b/internal/lsp/cache/imports.go
@@ -39,7 +39,12 @@
 	// is slightly wasteful, since we'll drop caches a little too often, but
 	// the mod file shouldn't be changing while people are autocompleting.
 	var modFileHash string
-	if snapshot.workspaceMode()&usesWorkspaceModule == 0 {
+	// If we are using 'legacyWorkspace' mode, we can just read the modfile from
+	// the snapshot. Otherwise, we need to get the synthetic workspace mod file.
+	//
+	// TODO(rfindley): we should be able to just always use the synthetic
+	// workspace module, or alternatively use the go.work file.
+	if snapshot.workspace.moduleSource == legacyWorkspace {
 		for m := range snapshot.workspace.getActiveModFiles() { // range to access the only element
 			modFH, err := snapshot.GetFile(ctx, m)
 			if err != nil {
@@ -138,7 +143,7 @@
 
 	// Take an extra reference to the snapshot so that its workspace directory
 	// (if any) isn't destroyed while we're using it.
-	release := snapshot.generation.Acquire(ctx)
+	release := snapshot.generation.Acquire()
 	_, inv, cleanupInvocation, err := snapshot.goCommandInvocation(ctx, source.LoadWorkspace, &gocommand.Invocation{
 		WorkingDir: snapshot.view.rootURI.Filename(),
 	})
diff --git a/internal/lsp/cache/load.go b/internal/lsp/cache/load.go
index c5b5a3d..17b7aca 100644
--- a/internal/lsp/cache/load.go
+++ b/internal/lsp/cache/load.go
@@ -57,7 +57,7 @@
 			uri := span.URI(scope)
 			// Don't try to load a file that doesn't exist.
 			fh := s.FindFile(uri)
-			if fh == nil || fh.Kind() != source.Go {
+			if fh == nil || s.View().FileKind(fh) != source.Go {
 				continue
 			}
 			query = append(query, fmt.Sprintf("file=%s", uri.Filename()))
@@ -264,7 +264,7 @@
 	for _, fh := range files {
 		// Place the diagnostics on the package or module declarations.
 		var rng protocol.Range
-		switch fh.Kind() {
+		switch s.view.FileKind(fh) {
 		case source.Go:
 			if pgf, err := s.ParseGo(ctx, fh, source.ParseHeader); err == nil {
 				pkgDecl := span.NewRange(s.FileSet(), pgf.File.Package, pgf.File.Name.End())
diff --git a/internal/lsp/cache/metadata.go b/internal/lsp/cache/metadata.go
index bef7bf8..618578d 100644
--- a/internal/lsp/cache/metadata.go
+++ b/internal/lsp/cache/metadata.go
@@ -56,6 +56,11 @@
 	return string(m.PkgPath)
 }
 
+// ModuleInfo implements the source.Metadata interface.
+func (m *Metadata) ModuleInfo() *packages.Module {
+	return m.Module
+}
+
 // KnownMetadata is a wrapper around metadata that tracks its validity.
 type KnownMetadata struct {
 	*Metadata
diff --git a/internal/lsp/cache/mod.go b/internal/lsp/cache/mod.go
index a915d05..8a2d42a 100644
--- a/internal/lsp/cache/mod.go
+++ b/internal/lsp/cache/mod.go
@@ -73,13 +73,13 @@
 				if err != nil {
 					return &parseModData{err: err}
 				}
-				parseErrors = []*source.Diagnostic{{
+				parseErrors = append(parseErrors, &source.Diagnostic{
 					URI:      modFH.URI(),
 					Range:    rng,
 					Severity: protocol.SeverityError,
 					Source:   source.ParseError,
 					Message:  mfErr.Err.Error(),
-				}}
+				})
 			}
 		}
 		return &parseModData{
@@ -101,6 +101,84 @@
 	return pmh.parse(ctx, s)
 }
 
+type parseWorkHandle struct {
+	handle *memoize.Handle
+}
+
+type parseWorkData struct {
+	parsed *source.ParsedWorkFile
+
+	// err is any error encountered while parsing the file.
+	err error
+}
+
+func (mh *parseWorkHandle) parse(ctx context.Context, snapshot *snapshot) (*source.ParsedWorkFile, error) {
+	v, err := mh.handle.Get(ctx, snapshot.generation, snapshot)
+	if err != nil {
+		return nil, err
+	}
+	data := v.(*parseWorkData)
+	return data.parsed, data.err
+}
+
+func (s *snapshot) ParseWork(ctx context.Context, modFH source.FileHandle) (*source.ParsedWorkFile, error) {
+	if handle := s.getParseWorkHandle(modFH.URI()); handle != nil {
+		return handle.parse(ctx, s)
+	}
+	h := s.generation.Bind(modFH.FileIdentity(), func(ctx context.Context, _ memoize.Arg) interface{} {
+		_, done := event.Start(ctx, "cache.ParseModHandle", tag.URI.Of(modFH.URI()))
+		defer done()
+
+		contents, err := modFH.Read()
+		if err != nil {
+			return &parseWorkData{err: err}
+		}
+		m := &protocol.ColumnMapper{
+			URI:       modFH.URI(),
+			Converter: span.NewContentConverter(modFH.URI().Filename(), contents),
+			Content:   contents,
+		}
+		file, parseErr := modfile.ParseWork(modFH.URI().Filename(), contents, nil)
+		// Attempt to convert the error to a standardized parse error.
+		var parseErrors []*source.Diagnostic
+		if parseErr != nil {
+			mfErrList, ok := parseErr.(modfile.ErrorList)
+			if !ok {
+				return &parseWorkData{err: fmt.Errorf("unexpected parse error type %v", parseErr)}
+			}
+			for _, mfErr := range mfErrList {
+				rng, err := rangeFromPositions(m, mfErr.Pos, mfErr.Pos)
+				if err != nil {
+					return &parseWorkData{err: err}
+				}
+				parseErrors = append(parseErrors, &source.Diagnostic{
+					URI:      modFH.URI(),
+					Range:    rng,
+					Severity: protocol.SeverityError,
+					Source:   source.ParseError,
+					Message:  mfErr.Err.Error(),
+				})
+			}
+		}
+		return &parseWorkData{
+			parsed: &source.ParsedWorkFile{
+				URI:         modFH.URI(),
+				Mapper:      m,
+				File:        file,
+				ParseErrors: parseErrors,
+			},
+			err: parseErr,
+		}
+	}, nil)
+
+	pwh := &parseWorkHandle{handle: h}
+	s.mu.Lock()
+	s.parseWorkHandles[modFH.URI()] = pwh
+	s.mu.Unlock()
+
+	return pwh.parse(ctx, s)
+}
+
 // goSum reads the go.sum file for the go.mod file at modURI, if it exists. If
 // it doesn't exist, it returns nil.
 func (s *snapshot) goSum(ctx context.Context, modURI span.URI) []byte {
@@ -164,7 +242,7 @@
 }
 
 func (s *snapshot) ModWhy(ctx context.Context, fh source.FileHandle) (map[string]string, error) {
-	if fh.Kind() != source.Mod {
+	if s.View().FileKind(fh) != source.Mod {
 		return nil, fmt.Errorf("%s is not a go.mod file", fh.URI())
 	}
 	if handle := s.getModWhyHandle(fh.URI()); handle != nil {
diff --git a/internal/lsp/cache/mod_tidy.go b/internal/lsp/cache/mod_tidy.go
index 7c92746..e85f651 100644
--- a/internal/lsp/cache/mod_tidy.go
+++ b/internal/lsp/cache/mod_tidy.go
@@ -152,21 +152,6 @@
 	return mth.tidy(ctx, s)
 }
 
-func (s *snapshot) uriToModDecl(ctx context.Context, uri span.URI) (protocol.Range, error) {
-	fh, err := s.GetFile(ctx, uri)
-	if err != nil {
-		return protocol.Range{}, nil
-	}
-	pmf, err := s.ParseMod(ctx, fh)
-	if err != nil {
-		return protocol.Range{}, nil
-	}
-	if pmf.File.Module == nil || pmf.File.Module.Syntax == nil {
-		return protocol.Range{}, nil
-	}
-	return rangeFromPositions(pmf.Mapper, pmf.File.Module.Syntax.Start, pmf.File.Module.Syntax.End)
-}
-
 func (s *snapshot) hashImports(ctx context.Context, wsPackages []*packageHandle) (string, error) {
 	seen := map[string]struct{}{}
 	var imports []string
@@ -212,7 +197,11 @@
 		// vice versa.
 		srcDiag, err := directnessDiagnostic(pm.Mapper, req, snapshot.View().Options().ComputeEdits)
 		if err != nil {
-			return nil, err
+			// We're probably in a bad state if we can't compute a
+			// directnessDiagnostic, but try to keep going so as to not suppress
+			// other, valid diagnostics.
+			event.Error(ctx, "computing directness diagnostic", err)
+			continue
 		}
 		diagnostics = append(diagnostics, srcDiag)
 	}
@@ -428,7 +417,14 @@
 	// Change the directness in the matching require statement. To avoid
 	// reordering the require statements, rewrite all of them.
 	var requires []*modfile.Require
+	seenVersions := make(map[string]string)
 	for _, r := range copied.Require {
+		if seen := seenVersions[r.Mod.Path]; seen != "" && seen != r.Mod.Version {
+			// Avoid a panic in SetRequire below, which panics on conflicting
+			// versions.
+			return nil, fmt.Errorf("%q has conflicting versions: %q and %q", r.Mod.Path, seen, r.Mod.Version)
+		}
+		seenVersions[r.Mod.Path] = r.Mod.Version
 		if r.Mod.Path == req.Mod.Path {
 			requires = append(requires, &modfile.Require{
 				Mod:      r.Mod,
diff --git a/internal/lsp/cache/parse.go b/internal/lsp/cache/parse.go
index 742f48f..e761373 100644
--- a/internal/lsp/cache/parse.go
+++ b/internal/lsp/cache/parse.go
@@ -13,6 +13,7 @@
 	"go/scanner"
 	"go/token"
 	"go/types"
+	"path/filepath"
 	"reflect"
 	"strconv"
 	"strings"
@@ -246,7 +247,8 @@
 	ctx, done := event.Start(ctx, "cache.parseGo", tag.File.Of(fh.URI().Filename()))
 	defer done()
 
-	if fh.Kind() != source.Go {
+	ext := filepath.Ext(fh.URI().Filename())
+	if ext != ".go" && ext != "" { // files generated by cgo have no extension
 		return &parseGoData{err: errors.Errorf("cannot parse non-Go file %s", fh.URI())}
 	}
 	src, err := fh.Read()
@@ -672,6 +674,9 @@
 
 // fixAST inspects the AST and potentially modifies any *ast.BadStmts so that it can be
 // type-checked more effectively.
+//
+// If fixAST returns true, the resulting AST is considered "fixed", meaning
+// positions have been mangled, and type checker errors may not make sense.
 func fixAST(ctx context.Context, n ast.Node, tok *token.File, src []byte) (fixed bool) {
 	var err error
 	walkASTWithParent(n, func(n, parent ast.Node) bool {
@@ -782,7 +787,10 @@
 	// If the "{" is already in the source code, there isn't anything to
 	// fix since we aren't missing curlies.
 	if b.Lbrace.IsValid() {
-		braceOffset := tok.Offset(b.Lbrace)
+		braceOffset, err := source.Offset(tok, b.Lbrace)
+		if err != nil {
+			return nil
+		}
 		if braceOffset < len(src) && src[braceOffset] == '{' {
 			return nil
 		}
@@ -834,7 +842,11 @@
 
 	var buf bytes.Buffer
 	buf.Grow(len(src) + 3)
-	buf.Write(src[:tok.Offset(insertPos)])
+	offset, err := source.Offset(tok, insertPos)
+	if err != nil {
+		return nil
+	}
+	buf.Write(src[:offset])
 
 	// Detect if we need to insert a semicolon to fix "for" loop situations like:
 	//
@@ -854,7 +866,7 @@
 	// Insert "{}" at insertPos.
 	buf.WriteByte('{')
 	buf.WriteByte('}')
-	buf.Write(src[tok.Offset(insertPos):])
+	buf.Write(src[offset:])
 	return buf.Bytes()
 }
 
@@ -888,7 +900,10 @@
 
 	// If the right brace is actually in the source code at the
 	// specified position, don't mess with it.
-	braceOffset := tok.Offset(body.Rbrace)
+	braceOffset, err := source.Offset(tok, body.Rbrace)
+	if err != nil {
+		return
+	}
 	if braceOffset < len(src) && src[braceOffset] == '}' {
 		return
 	}
@@ -923,8 +938,12 @@
 		return nil
 	}
 
+	insertOffset, err := source.Offset(tok, s.X.End())
+	if err != nil {
+		return nil
+	}
 	// Insert directly after the selector's ".".
-	insertOffset := tok.Offset(s.X.End()) + 1
+	insertOffset++
 	if src[insertOffset-1] != '.' {
 		return nil
 	}
@@ -944,6 +963,8 @@
 // foo.var
 //
 // yields a "_" selector instead of "var" since "var" is a keyword.
+//
+// TODO(rfindley): should this constitute an ast 'fix'?
 func fixPhantomSelector(sel *ast.SelectorExpr, tok *token.File, src []byte) {
 	if !isPhantomUnderscore(sel.Sel, tok, src) {
 		return
@@ -980,7 +1001,10 @@
 
 	// Phantom underscore means the underscore is not actually in the
 	// program text.
-	offset := tok.Offset(id.Pos())
+	offset, err := source.Offset(tok, id.Pos())
+	if err != nil {
+		return false
+	}
 	return len(src) <= offset || src[offset] != '_'
 }
 
@@ -989,13 +1013,23 @@
 // like "if i := 0" the user hasn't typed the semicolon yet so the
 // parser is looking for the conditional expression. However, "i := 0"
 // are not valid expressions, so we get a BadExpr.
+//
+// fixInitStmt returns valid AST for the original source.
 func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) {
 	if !bad.Pos().IsValid() || !bad.End().IsValid() {
 		return
 	}
 
 	// Try to extract a statement from the BadExpr.
-	stmtBytes := src[tok.Offset(bad.Pos()) : tok.Offset(bad.End()-1)+1]
+	start, err := source.Offset(tok, bad.Pos())
+	if err != nil {
+		return
+	}
+	end, err := source.Offset(tok, bad.End()-1)
+	if err != nil {
+		return
+	}
+	stmtBytes := src[start : end+1]
 	stmt, err := parseStmt(bad.Pos(), stmtBytes)
 	if err != nil {
 		return
@@ -1035,7 +1069,11 @@
 // readKeyword reads the keyword starting at pos, if any.
 func readKeyword(pos token.Pos, tok *token.File, src []byte) string {
 	var kwBytes []byte
-	for i := tok.Offset(pos); i < len(src); i++ {
+	offset, err := source.Offset(tok, pos)
+	if err != nil {
+		return ""
+	}
+	for i := offset; i < len(src); i++ {
 		// Use a simplified identifier check since keywords are always lowercase ASCII.
 		if src[i] < 'a' || src[i] > 'z' {
 			break
@@ -1072,15 +1110,15 @@
 	// Avoid doing tok.Offset(to) since that panics if badExpr ends at EOF.
 	// It also panics if the position is not in the range of the file, and
 	// badExprs may not necessarily have good positions, so check first.
-	if !source.InRange(tok, from) {
+	fromOffset, err := source.Offset(tok, from)
+	if err != nil {
 		return false
 	}
-	if !source.InRange(tok, to-1) {
+	toOffset, err := source.Offset(tok, to-1)
+	if err != nil {
 		return false
 	}
-	fromOffset := tok.Offset(from)
-	toOffset := tok.Offset(to-1) + 1
-	exprBytes = append(exprBytes, src[fromOffset:toOffset]...)
+	exprBytes = append(exprBytes, src[fromOffset:toOffset+1]...)
 	exprBytes = bytes.TrimSpace(exprBytes)
 
 	// If our expression ends in "]" (e.g. "[]"), add a phantom selector
@@ -1233,18 +1271,26 @@
 		}
 	}
 
-	if !from.IsValid() || tok.Offset(from) >= len(src) {
+	fromOffset, err := source.Offset(tok, from)
+	if err != nil {
+		return false
+	}
+	if !from.IsValid() || fromOffset >= len(src) {
 		return false
 	}
 
-	if !to.IsValid() || tok.Offset(to) >= len(src) {
+	toOffset, err := source.Offset(tok, to)
+	if err != nil {
+		return false
+	}
+	if !to.IsValid() || toOffset >= len(src) {
 		return false
 	}
 
 	// Insert any phantom selectors needed to prevent dangling "." from messing
 	// up the AST.
 	exprBytes := make([]byte, 0, int(to-from)+len(phantomSelectors))
-	for i, b := range src[tok.Offset(from):tok.Offset(to)] {
+	for i, b := range src[fromOffset:toOffset] {
 		if len(phantomSelectors) > 0 && from+token.Pos(i) == phantomSelectors[0] {
 			exprBytes = append(exprBytes, '_')
 			phantomSelectors = phantomSelectors[1:]
@@ -1354,6 +1400,11 @@
 					continue
 				}
 
+				// Don't offset invalid positions: they should stay invalid.
+				if !token.Pos(f.Int()).IsValid() {
+					continue
+				}
+
 				f.SetInt(f.Int() + int64(offset))
 			}
 		}
diff --git a/internal/lsp/cache/pkg.go b/internal/lsp/cache/pkg.go
index 88ea886..0c7bf74 100644
--- a/internal/lsp/cache/pkg.go
+++ b/internal/lsp/cache/pkg.go
@@ -29,7 +29,7 @@
 	types           *types.Package
 	typesInfo       *types.Info
 	typesSizes      types.Sizes
-	hasFixedFiles   bool
+	hasFixedFiles   bool // if true, AST was sufficiently mangled that we should hide type errors
 }
 
 // Declare explicit types for files and directories to distinguish between the two.
diff --git a/internal/lsp/cache/session.go b/internal/lsp/cache/session.go
index bcb799a..e86ed25 100644
--- a/internal/lsp/cache/session.go
+++ b/internal/lsp/cache/session.go
@@ -62,7 +62,6 @@
 	return source.FileIdentity{
 		URI:  o.uri,
 		Hash: o.hash,
-		Kind: o.kind,
 	}
 }
 
@@ -156,10 +155,15 @@
 	return s.cache
 }
 
-func (s *Session) NewView(ctx context.Context, name string, folder, tempWorkspace span.URI, options *source.Options) (source.View, source.Snapshot, func(), error) {
+func (s *Session) NewView(ctx context.Context, name string, folder span.URI, options *source.Options) (source.View, source.Snapshot, func(), error) {
 	s.viewMu.Lock()
 	defer s.viewMu.Unlock()
-	view, snapshot, release, err := s.createView(ctx, name, folder, tempWorkspace, options, 0)
+	for _, view := range s.views {
+		if span.CompareURI(view.folder, folder) == 0 {
+			return nil, nil, nil, source.ErrViewExists
+		}
+	}
+	view, snapshot, release, err := s.createView(ctx, name, folder, options, 0)
 	if err != nil {
 		return nil, nil, func() {}, err
 	}
@@ -169,7 +173,7 @@
 	return view, snapshot, release, nil
 }
 
-func (s *Session) createView(ctx context.Context, name string, folder, tempWorkspace span.URI, options *source.Options, snapshotID uint64) (*View, *snapshot, func(), error) {
+func (s *Session) createView(ctx context.Context, name string, folder span.URI, options *source.Options, snapshotID uint64) (*View, *snapshot, func(), error) {
 	index := atomic.AddInt64(&viewIndex, 1)
 
 	if s.cache.options != nil {
@@ -214,7 +218,6 @@
 		filesByBase:          map[string][]*fileBase{},
 		rootURI:              root,
 		workspaceInformation: *ws,
-		tempWorkspace:        tempWorkspace,
 	}
 	v.importsState = &importsState{
 		ctx: backgroundCtx,
@@ -240,6 +243,7 @@
 		workspacePackages: make(map[PackageID]PackagePath),
 		unloadableFiles:   make(map[span.URI]struct{}),
 		parseModHandles:   make(map[span.URI]*parseModHandle),
+		parseWorkHandles:  make(map[span.URI]*parseWorkHandle),
 		modTidyHandles:    make(map[span.URI]*modTidyHandle),
 		modWhyHandles:     make(map[span.URI]*modWhyHandle),
 		workspace:         workspace,
@@ -249,17 +253,12 @@
 	initCtx, initCancel := context.WithCancel(xcontext.Detach(ctx))
 	v.initCancelFirstAttempt = initCancel
 	snapshot := v.snapshot
-	release := snapshot.generation.Acquire(initCtx)
+	release := snapshot.generation.Acquire()
 	go func() {
 		defer release()
 		snapshot.initialize(initCtx, true)
-		// Ensure that the view workspace is written at least once following
-		// initialization.
-		if err := v.updateWorkspace(initCtx); err != nil {
-			event.Error(ctx, "copying workspace dir", err)
-		}
 	}()
-	return v, snapshot, snapshot.generation.Acquire(ctx), nil
+	return v, snapshot, snapshot.generation.Acquire(), nil
 }
 
 // View returns the view by name.
@@ -362,16 +361,24 @@
 func (s *Session) updateView(ctx context.Context, view *View, options *source.Options) (*View, error) {
 	s.viewMu.Lock()
 	defer s.viewMu.Unlock()
+
+	// Preserve the snapshot ID if we are recreating the view.
+	view.snapshotMu.Lock()
+	if view.snapshot == nil {
+		view.snapshotMu.Unlock()
+		panic("updateView called after View was already shut down")
+	}
+	snapshotID := view.snapshot.id
+	view.snapshotMu.Unlock()
+
 	i, err := s.dropView(ctx, view)
 	if err != nil {
 		return nil, err
 	}
-	// Preserve the snapshot ID if we are recreating the view.
-	view.snapshotMu.Lock()
-	snapshotID := view.snapshot.id
-	view.snapshotMu.Unlock()
-	v, _, release, err := s.createView(ctx, view.name, view.folder, view.tempWorkspace, options, snapshotID)
+
+	v, _, release, err := s.createView(ctx, view.name, view.folder, options, snapshotID)
 	release()
+
 	if err != nil {
 		// we have dropped the old view, but could not create the new one
 		// this should not happen and is very bad, but we still need to clean
@@ -525,7 +532,7 @@
 	defer s.viewMu.RUnlock()
 	var snapshots []*snapshot
 	for _, v := range s.views {
-		snapshot, release := v.getSnapshot(ctx)
+		snapshot, release := v.getSnapshot()
 		defer release()
 		snapshots = append(snapshots, snapshot)
 	}
@@ -604,16 +611,13 @@
 		var kind source.FileKind
 		switch c.Action {
 		case source.Open:
-			kind = source.DetectLanguage(c.LanguageID, c.URI.Filename())
+			kind = source.FileKindForLang(c.LanguageID)
 		default:
 			if !ok {
 				return nil, errors.Errorf("updateOverlays: modifying unopened overlay %v", c.URI)
 			}
 			kind = o.kind
 		}
-		if kind == source.UnknownKind {
-			return nil, errors.Errorf("updateOverlays: unknown file kind for %s", c.URI)
-		}
 
 		// Closing a file just deletes its overlay.
 		if c.Action == source.Close {
@@ -667,6 +671,18 @@
 			hash:    hash,
 			saved:   sameContentOnDisk,
 		}
+
+		// When opening files, ensure that we actually have a well-defined view and file kind.
+		if c.Action == source.Open {
+			view, err := s.ViewOf(o.uri)
+			if err != nil {
+				return nil, errors.Errorf("updateOverlays: finding view for %s: %v", o.uri, err)
+			}
+			if kind := view.FileKind(o); kind == source.UnknownKind {
+				return nil, errors.Errorf("updateOverlays: unknown file kind for %s", o.uri)
+			}
+		}
+
 		s.overlays[c.URI] = o
 	}
 
@@ -715,7 +731,7 @@
 	defer s.viewMu.RUnlock()
 	patterns := map[string]struct{}{}
 	for _, view := range s.views {
-		snapshot, release := view.getSnapshot(ctx)
+		snapshot, release := view.getSnapshot()
 		for k, v := range snapshot.fileWatchingGlobPatterns(ctx) {
 			patterns[k] = v
 		}
diff --git a/internal/lsp/cache/snapshot.go b/internal/lsp/cache/snapshot.go
index d5f230a..900f13f 100644
--- a/internal/lsp/cache/snapshot.go
+++ b/internal/lsp/cache/snapshot.go
@@ -102,10 +102,14 @@
 	// unloadableFiles keeps track of files that we've failed to load.
 	unloadableFiles map[span.URI]struct{}
 
-	// parseModHandles keeps track of any ParseModHandles for the snapshot.
+	// parseModHandles keeps track of any parseModHandles for the snapshot.
 	// The handles need not refer to only the view's go.mod file.
 	parseModHandles map[span.URI]*parseModHandle
 
+	// parseWorkHandles keeps track of any parseWorkHandles for the snapshot.
+	// The handles need not refer to only the view's go.work file.
+	parseWorkHandles map[span.URI]*parseWorkHandle
+
 	// Preserve go.mod-related handles to avoid garbage-collecting the results
 	// of various calls to the go command. The handles need not refer to only
 	// the view's go.mod file.
@@ -158,17 +162,21 @@
 	return uris
 }
 
+func (s *snapshot) WorkFile() span.URI {
+	return s.workspace.workFile
+}
+
 func (s *snapshot) Templates() map[span.URI]source.VersionedFileHandle {
-	if !s.view.options.ExperimentalTemplateSupport {
-		return nil
-	}
-	ans := map[span.URI]source.VersionedFileHandle{}
-	for k, x := range s.files {
-		if strings.HasSuffix(filepath.Ext(k.Filename()), "tmpl") {
-			ans[k] = x
+	s.mu.Lock()
+	defer s.mu.Unlock()
+
+	tmpls := map[span.URI]source.VersionedFileHandle{}
+	for k, fh := range s.files {
+		if s.view.FileKind(fh) == source.Tmpl {
+			tmpls[k] = fh
 		}
 	}
-	return ans
+	return tmpls
 }
 
 func (s *snapshot) ValidBuildConfiguration() bool {
@@ -198,18 +206,6 @@
 	if options.TempModfile && s.view.workspaceInformation.goversion >= 14 {
 		mode |= tempModfile
 	}
-	// If the user is intentionally limiting their workspace scope, don't
-	// enable multi-module workspace mode.
-	// TODO(rstambler): This should only change the calculation of the root,
-	// not the mode.
-	if !options.ExpandWorkspaceToModule {
-		return mode
-	}
-	// The workspace module has been disabled by the user.
-	if !options.ExperimentalWorkspaceModule {
-		return mode
-	}
-	mode |= usesWorkspaceModule
 	return mode
 }
 
@@ -334,17 +330,41 @@
 		inv.Env = append(inv.Env, "GOPROXY=off")
 	}
 
+	// What follows is rather complicated logic for how to actually run the go
+	// command. A word of warning: this is the result of various incremental
+	// features added to gopls, and varying behavior of the Go command across Go
+	// versions. It can surely be cleaned up significantly, but tread carefully.
+	//
+	// Roughly speaking we need to resolve four things:
+	//  - the working directory.
+	//  - the -mod flag
+	//  - the -modfile flag
+	//  - the -workfile flag
+	//
+	// These are dependent on a number of factors: whether we need to run in a
+	// synthetic workspace, whether flags are supported at the current go
+	// version, and what we're actually trying to achieve (the
+	// source.InvocationFlags).
+
 	var modURI span.URI
 	// Select the module context to use.
 	// If we're type checking, we need to use the workspace context, meaning
 	// the main (workspace) module. Otherwise, we should use the module for
 	// the passed-in working dir.
 	if mode == source.LoadWorkspace {
-		if s.workspaceMode()&usesWorkspaceModule == 0 {
+		switch s.workspace.moduleSource {
+		case legacyWorkspace:
 			for m := range s.workspace.getActiveModFiles() { // range to access the only element
 				modURI = m
 			}
-		} else {
+		case goWorkWorkspace:
+			if s.view.goversion >= 18 {
+				break
+			}
+			// Before go 1.18, the Go command did not natively support go.work files,
+			// so we 'fake' them with a workspace module.
+			fallthrough
+		case fileSystemWorkspace, goplsModWorkspace:
 			var tmpDir span.URI
 			var err error
 			tmpDir, err = s.getWorkspaceDir(ctx)
@@ -375,9 +395,9 @@
 		return "", nil, cleanup, err
 	}
 
+	mutableModFlag := ""
 	// If the mod flag isn't set, populate it based on the mode and workspace.
 	if inv.ModFlag == "" {
-		mutableModFlag := ""
 		if s.view.goversion >= 16 {
 			mutableModFlag = "mod"
 		}
@@ -391,19 +411,28 @@
 			} else {
 				inv.ModFlag = mutableModFlag
 			}
-		case source.UpdateUserModFile, source.WriteTemporaryModFile:
+		case source.WriteTemporaryModFile:
 			inv.ModFlag = mutableModFlag
+			// -mod must be readonly when using go.work files - see issue #48941
+			inv.Env = append(inv.Env, "GOWORK=off")
 		}
 	}
 
-	wantTempMod := mode != source.UpdateUserModFile
-	needTempMod := mode == source.WriteTemporaryModFile
-	tempMod := wantTempMod && s.workspaceMode()&tempModfile != 0
-	if needTempMod && !tempMod {
+	// Only use a temp mod file if the modfile can actually be mutated.
+	needTempMod := inv.ModFlag == mutableModFlag
+	useTempMod := s.workspaceMode()&tempModfile != 0
+	if needTempMod && !useTempMod {
 		return "", nil, cleanup, source.ErrTmpModfileUnsupported
 	}
 
-	if tempMod {
+	// We should use -workfile if:
+	//  1. We're not actively trying to mutate a modfile.
+	//  2. We have an active go.work file.
+	//  3. We're using at least Go 1.18.
+	useWorkFile := !needTempMod && s.workspace.moduleSource == goWorkWorkspace && s.view.goversion >= 18
+	if useWorkFile {
+		// TODO(#51215): build a temp workfile and set GOWORK in the environment.
+	} else if useTempMod {
 		if modURI == "" {
 			return "", nil, cleanup, fmt.Errorf("no go.mod file found in %s", inv.WorkingDir)
 		}
@@ -453,10 +482,10 @@
 	return hashContents([]byte(strings.Join(unsaved, "")))
 }
 
-func (s *snapshot) PackagesForFile(ctx context.Context, uri span.URI, mode source.TypecheckMode) ([]source.Package, error) {
+func (s *snapshot) PackagesForFile(ctx context.Context, uri span.URI, mode source.TypecheckMode, includeTestVariants bool) ([]source.Package, error) {
 	ctx = event.Label(ctx, tag.URI.Of(uri))
 
-	phs, err := s.packageHandlesForFile(ctx, uri, mode)
+	phs, err := s.packageHandlesForFile(ctx, uri, mode, includeTestVariants)
 	if err != nil {
 		return nil, err
 	}
@@ -474,7 +503,7 @@
 func (s *snapshot) PackageForFile(ctx context.Context, uri span.URI, mode source.TypecheckMode, pkgPolicy source.PackageFilter) (source.Package, error) {
 	ctx = event.Label(ctx, tag.URI.Of(uri))
 
-	phs, err := s.packageHandlesForFile(ctx, uri, mode)
+	phs, err := s.packageHandlesForFile(ctx, uri, mode, false)
 	if err != nil {
 		return nil, err
 	}
@@ -503,7 +532,7 @@
 	return ph.check(ctx, s)
 }
 
-func (s *snapshot) packageHandlesForFile(ctx context.Context, uri span.URI, mode source.TypecheckMode) ([]*packageHandle, error) {
+func (s *snapshot) packageHandlesForFile(ctx context.Context, uri span.URI, mode source.TypecheckMode, includeTestVariants bool) ([]*packageHandle, error) {
 	// Check if we should reload metadata for the file. We don't invalidate IDs
 	// (though we should), so the IDs will be a better source of truth than the
 	// metadata. If there are no IDs for the file, then we should also reload.
@@ -511,8 +540,8 @@
 	if err != nil {
 		return nil, err
 	}
-	if fh.Kind() != source.Go {
-		return nil, fmt.Errorf("no packages for non-Go file %s", uri)
+	if kind := s.view.FileKind(fh); kind != source.Go {
+		return nil, fmt.Errorf("no packages for non-Go file %s (%v)", uri, kind)
 	}
 	knownIDs, err := s.getOrLoadIDsForURI(ctx, uri)
 	if err != nil {
@@ -523,7 +552,7 @@
 	for _, id := range knownIDs {
 		// Filter out any intermediate test variants. We typically aren't
 		// interested in these packages for file= style queries.
-		if m := s.getMetadata(id); m != nil && m.IsIntermediateTestVariant {
+		if m := s.getMetadata(id); m != nil && m.IsIntermediateTestVariant && !includeTestVariants {
 			continue
 		}
 		var parseModes []source.ParseMode
@@ -658,6 +687,12 @@
 	return s.parseModHandles[uri]
 }
 
+func (s *snapshot) getParseWorkHandle(uri span.URI) *parseWorkHandle {
+	s.mu.Lock()
+	defer s.mu.Unlock()
+	return s.parseWorkHandles[uri]
+}
+
 func (s *snapshot) getModWhyHandle(uri span.URI) *modWhyHandle {
 	s.mu.Lock()
 	defer s.mu.Unlock()
@@ -768,22 +803,18 @@
 	return false
 }
 
-func (s *snapshot) getWorkspacePkgPath(id PackageID) PackagePath {
-	s.mu.Lock()
-	defer s.mu.Unlock()
-
-	return s.workspacePackages[id]
-}
-
-const fileExtensions = "go,mod,sum,work,tmpl"
+const fileExtensions = "go,mod,sum,work"
 
 func (s *snapshot) fileWatchingGlobPatterns(ctx context.Context) map[string]struct{} {
+	extensions := fileExtensions
+	for _, ext := range s.View().Options().TemplateExtensions {
+		extensions += "," + ext
+	}
 	// Work-around microsoft/vscode#100870 by making sure that we are,
 	// at least, watching the user's entire workspace. This will still be
 	// applied to every folder in the workspace.
 	patterns := map[string]struct{}{
-		fmt.Sprintf("**/*.{%s}", fileExtensions): {},
-		"**/*.*tmpl":                             {},
+		fmt.Sprintf("**/*.{%s}", extensions): {},
 	}
 	dirs := s.workspace.dirs(ctx, s)
 	for _, dir := range dirs {
@@ -797,7 +828,7 @@
 		// TODO(rstambler): If microsoft/vscode#3025 is resolved before
 		// microsoft/vscode#101042, we will need a work-around for Windows
 		// drive letter casing.
-		patterns[fmt.Sprintf("%s/**/*.{%s}", dirName, fileExtensions)] = struct{}{}
+		patterns[fmt.Sprintf("%s/**/*.{%s}", dirName, extensions)] = struct{}{}
 	}
 
 	// Some clients do not send notifications for changes to directories that
@@ -963,15 +994,25 @@
 
 func (s *snapshot) Symbols(ctx context.Context) (map[span.URI][]source.Symbol, error) {
 	result := make(map[span.URI][]source.Symbol)
+
+	// Keep going on errors, but log the first failure. Partial symbol results
+	// are better than no symbol results.
+	var firstErr error
 	for uri, f := range s.files {
 		sh := s.buildSymbolHandle(ctx, f)
 		v, err := sh.handle.Get(ctx, s.generation, s)
 		if err != nil {
-			return nil, err
+			if firstErr == nil {
+				firstErr = err
+			}
+			continue
 		}
 		data := v.(*symbolData)
 		result[uri] = data.symbols
 	}
+	if firstErr != nil {
+		event.Error(ctx, "getting snapshot symbols", firstErr)
+	}
 	return result, nil
 }
 
@@ -983,7 +1024,11 @@
 	var mds []source.Metadata
 	for _, id := range knownIDs {
 		md := s.getMetadata(id)
-		mds = append(mds, md)
+		// TODO(rfindley): knownIDs and metadata should be in sync, but existing
+		// code is defensive of nil metadata.
+		if md != nil {
+			mds = append(mds, md)
+		}
 	}
 	return mds, nil
 }
@@ -1566,7 +1611,7 @@
 	var files []source.VersionedFileHandle
 	for uri, fh := range s.files {
 		// Don't try to reload metadata for go.mod files.
-		if fh.Kind() != source.Go {
+		if s.view.FileKind(fh) != source.Go {
 			continue
 		}
 		// If the URI doesn't belong to this view, then it's not in a workspace
@@ -1652,7 +1697,7 @@
 	return ac.originalSnapshot.GetFile(ctx, uri)
 }
 
-func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileChange, forceReloadMetadata bool) (*snapshot, bool) {
+func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileChange, forceReloadMetadata bool) *snapshot {
 	var vendorChanged bool
 	newWorkspace, workspaceChanged, workspaceReload := s.workspace.invalidate(ctx, changes, &unappliedChanges{
 		originalSnapshot: s,
@@ -1686,6 +1731,7 @@
 		workspacePackages: make(map[PackageID]PackagePath, len(s.workspacePackages)),
 		unloadableFiles:   make(map[span.URI]struct{}, len(s.unloadableFiles)),
 		parseModHandles:   make(map[span.URI]*parseModHandle, len(s.parseModHandles)),
+		parseWorkHandles:  make(map[span.URI]*parseWorkHandle, len(s.parseWorkHandles)),
 		modTidyHandles:    make(map[span.URI]*modTidyHandle, len(s.modTidyHandles)),
 		modWhyHandles:     make(map[span.URI]*modWhyHandle, len(s.modWhyHandles)),
 		knownSubdirs:      make(map[span.URI]struct{}, len(s.knownSubdirs)),
@@ -1720,6 +1766,10 @@
 	for k, v := range s.parseModHandles {
 		result.parseModHandles[k] = v
 	}
+	// Copy all of the parseWorkHandles.
+	for k, v := range s.parseWorkHandles {
+		result.parseWorkHandles[k] = v
+	}
 
 	for k, v := range s.goFiles {
 		if _, ok := changes[k.file.URI]; ok {
@@ -1810,9 +1860,8 @@
 				delete(result.modWhyHandles, k)
 			}
 		}
-		if isGoMod(uri) {
-			delete(result.parseModHandles, uri)
-		}
+		delete(result.parseModHandles, uri)
+		delete(result.parseWorkHandles, uri)
 		// Handle the invalidated file; it may have new contents or not exist.
 		if !change.exists {
 			delete(result.files, uri)
@@ -2017,6 +2066,9 @@
 	for _, v := range result.parseModHandles {
 		newGen.Inherit(v.handle)
 	}
+	for _, v := range result.parseWorkHandles {
+		newGen.Inherit(v.handle)
+	}
 	// Don't bother copying the importedBy graph,
 	// as it changes each time we update metadata.
 
@@ -2032,7 +2084,7 @@
 			result.initializeOnce = &sync.Once{}
 		}
 	}
-	return result, workspaceChanged
+	return result
 }
 
 // guessPackageIDsForURI returns all packages related to uri. If we haven't
@@ -2282,7 +2334,8 @@
 		if file == nil || parsed.Module == nil {
 			return nil, fmt.Errorf("no module declaration for %s", modURI)
 		}
-		if parsed.Go != nil && semver.Compare(goVersion, parsed.Go.Version) < 0 {
+		// Prepend "v" to go versions to make them valid semver.
+		if parsed.Go != nil && semver.Compare("v"+goVersion, "v"+parsed.Go.Version) < 0 {
 			goVersion = parsed.Go.Version
 		}
 		path := parsed.Module.Mod.Path
diff --git a/internal/lsp/cache/symbols.go b/internal/lsp/cache/symbols.go
index d1ecf2a..8310172 100644
--- a/internal/lsp/cache/symbols.go
+++ b/internal/lsp/cache/symbols.go
@@ -69,7 +69,6 @@
 
 type symbolWalker struct {
 	curFile    *source.ParsedGoFile
-	pkgName    string
 	curURI     protocol.DocumentURI
 	symbols    []source.Symbol
 	firstError error
diff --git a/internal/lsp/cache/view.go b/internal/lsp/cache/view.go
index b54210e..b34807c 100644
--- a/internal/lsp/cache/view.go
+++ b/internal/lsp/cache/view.go
@@ -29,7 +29,6 @@
 	"golang.org/x/tools/internal/imports"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
-	"golang.org/x/tools/internal/memoize"
 	"golang.org/x/tools/internal/span"
 	"golang.org/x/tools/internal/xcontext"
 	errors "golang.org/x/xerrors"
@@ -74,7 +73,7 @@
 	initCancelFirstAttempt context.CancelFunc
 
 	snapshotMu sync.Mutex
-	snapshot   *snapshot
+	snapshot   *snapshot // nil after shutdown has been called
 
 	// initialWorkspaceLoad is closed when the first workspace initialization has
 	// completed. If we failed to load, we only retry if the go.mod file changes,
@@ -93,10 +92,6 @@
 	// workspaceInformation tracks various details about this view's
 	// environment variables, go version, and use of modules.
 	workspaceInformation
-
-	// tempWorkspace is a temporary directory dedicated to holding the latest
-	// version of the workspace go.mod file. (TODO: also go.sum file)
-	tempWorkspace span.URI
 }
 
 type workspaceInformation struct {
@@ -141,16 +136,8 @@
 
 	// tempModfile indicates whether or not the -modfile flag should be used.
 	tempModfile
-
-	// usesWorkspaceModule indicates support for the experimental workspace module
-	// feature.
-	usesWorkspaceModule
 )
 
-type builtinPackageHandle struct {
-	handle *memoize.Handle
-}
-
 // fileBase holds the common functionality for all files.
 // It is intended to be embedded in the file implementations
 type fileBase struct {
@@ -231,16 +218,39 @@
 	return v.folder
 }
 
-func (v *View) TempWorkspace() span.URI {
-	return v.tempWorkspace
-}
-
 func (v *View) Options() *source.Options {
 	v.optionsMu.Lock()
 	defer v.optionsMu.Unlock()
 	return v.options
 }
 
+func (v *View) FileKind(fh source.FileHandle) source.FileKind {
+	if o, ok := fh.(source.Overlay); ok {
+		if o.Kind() != source.UnknownKind {
+			return o.Kind()
+		}
+	}
+	fext := filepath.Ext(fh.URI().Filename())
+	switch fext {
+	case ".go":
+		return source.Go
+	case ".mod":
+		return source.Mod
+	case ".sum":
+		return source.Sum
+	case ".work":
+		return source.Work
+	}
+	exts := v.Options().TemplateExtensions
+	for _, ext := range exts {
+		if fext == ext || fext == "."+ext {
+			return source.Tmpl
+		}
+	}
+	// and now what? This should never happen, but it does for cgo before go1.15
+	return source.Go
+}
+
 func minorOptionsChange(a, b *source.Options) bool {
 	// Check if any of the settings that modify our understanding of files have been changed
 	if !reflect.DeepEqual(a.Env, b.Env) {
@@ -332,25 +342,49 @@
 	return s.view.importsState.runProcessEnvFunc(ctx, s, fn)
 }
 
+// separated out from its sole use in locateTemplateFiles for testability
+func fileHasExtension(path string, suffixes []string) bool {
+	ext := filepath.Ext(path)
+	if ext != "" && ext[0] == '.' {
+		ext = ext[1:]
+	}
+	for _, s := range suffixes {
+		if s != "" && ext == s {
+			return true
+		}
+	}
+	return false
+}
+
 func (s *snapshot) locateTemplateFiles(ctx context.Context) {
-	if !s.view.Options().ExperimentalTemplateSupport {
+	if len(s.view.Options().TemplateExtensions) == 0 {
 		return
 	}
+	suffixes := s.view.Options().TemplateExtensions
+
+	// The workspace root may have been expanded to a module, but we should apply
+	// directory filters based on the configured workspace folder.
+	//
+	// TODO(rfindley): we should be more principled about paths outside of the
+	// workspace folder: do we even consider them? Do we support absolute
+	// exclusions? Relative exclusions starting with ..?
 	dir := s.workspace.root.Filename()
+	relativeTo := s.view.folder.Filename()
+
 	searched := 0
 	// Change to WalkDir when we move up to 1.16
 	err := filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
 		if err != nil {
 			return err
 		}
-		if strings.HasSuffix(filepath.Ext(path), "tmpl") && !pathExcludedByFilter(path, dir, s.view.gomodcache, s.view.options) &&
-			!fi.IsDir() {
+		relpath := strings.TrimPrefix(path, relativeTo)
+		excluded := pathExcludedByFilter(relpath, dir, s.view.gomodcache, s.view.options)
+		if fileHasExtension(path, suffixes) && !excluded && !fi.IsDir() {
 			k := span.URIFromPath(path)
-			fh, err := s.GetVersionedFile(ctx, k)
+			_, err := s.GetVersionedFile(ctx, k)
 			if err != nil {
 				return nil
 			}
-			s.files[k] = fh
 		}
 		searched++
 		if fileLimit > 0 && searched > fileLimit {
@@ -490,7 +524,10 @@
 	}
 	v.mu.Unlock()
 	v.snapshotMu.Lock()
-	go v.snapshot.generation.Destroy()
+	if v.snapshot != nil {
+		go v.snapshot.generation.Destroy("View.shutdown")
+		v.snapshot = nil
+	}
 	v.snapshotMu.Unlock()
 	v.importsState.destroy()
 }
@@ -536,13 +573,16 @@
 }
 
 func (v *View) Snapshot(ctx context.Context) (source.Snapshot, func()) {
-	return v.getSnapshot(ctx)
+	return v.getSnapshot()
 }
 
-func (v *View) getSnapshot(ctx context.Context) (*snapshot, func()) {
+func (v *View) getSnapshot() (*snapshot, func()) {
 	v.snapshotMu.Lock()
 	defer v.snapshotMu.Unlock()
-	return v.snapshot, v.snapshot.generation.Acquire(ctx)
+	if v.snapshot == nil {
+		panic("getSnapshot called after shutdown")
+	}
+	return v.snapshot, v.snapshot.generation.Acquire()
 }
 
 func (s *snapshot) initialize(ctx context.Context, firstAttempt bool) {
@@ -655,6 +695,9 @@
 
 // invalidateContent invalidates the content of a Go file,
 // including any position and type information that depends on it.
+//
+// invalidateContent returns a non-nil snapshot for the new content, along with
+// a callback which the caller must invoke to release that snapshot.
 func (v *View) invalidateContent(ctx context.Context, changes map[span.URI]*fileChange, forceReloadMetadata bool) (*snapshot, func()) {
 	// Detach the context so that content invalidation cannot be canceled.
 	ctx = xcontext.Detach(ctx)
@@ -663,6 +706,10 @@
 	v.snapshotMu.Lock()
 	defer v.snapshotMu.Unlock()
 
+	if v.snapshot == nil {
+		panic("invalidateContent called after shutdown")
+	}
+
 	// Cancel all still-running previous requests, since they would be
 	// operating on stale data.
 	v.snapshot.cancel()
@@ -672,57 +719,10 @@
 
 	oldSnapshot := v.snapshot
 
-	var workspaceChanged bool
-	v.snapshot, workspaceChanged = oldSnapshot.clone(ctx, v.baseCtx, changes, forceReloadMetadata)
-	if workspaceChanged {
-		if err := v.updateWorkspaceLocked(ctx); err != nil {
-			event.Error(ctx, "copying workspace dir", err)
-		}
-	}
-	go oldSnapshot.generation.Destroy()
+	v.snapshot = oldSnapshot.clone(ctx, v.baseCtx, changes, forceReloadMetadata)
+	go oldSnapshot.generation.Destroy("View.invalidateContent")
 
-	return v.snapshot, v.snapshot.generation.Acquire(ctx)
-}
-
-func (v *View) updateWorkspace(ctx context.Context) error {
-	if v.tempWorkspace == "" {
-		return nil
-	}
-	v.snapshotMu.Lock()
-	defer v.snapshotMu.Unlock()
-	return v.updateWorkspaceLocked(ctx)
-}
-
-// updateWorkspaceLocked should only be called when v.snapshotMu is held. It
-// guarantees that workspace module content will be copied to v.tempWorkace at
-// some point in the future. We do not guarantee that the temp workspace sees
-// all changes to the workspace module, only that it is eventually consistent
-// with the workspace module of the latest snapshot.
-func (v *View) updateWorkspaceLocked(ctx context.Context) error {
-	release := v.snapshot.generation.Acquire(ctx)
-	defer release()
-	src, err := v.snapshot.getWorkspaceDir(ctx)
-	if err != nil {
-		return err
-	}
-	for _, name := range []string{"go.mod", "go.sum"} {
-		srcname := filepath.Join(src.Filename(), name)
-		srcf, err := os.Open(srcname)
-		if err != nil {
-			return errors.Errorf("opening snapshot %s: %w", name, err)
-		}
-		defer srcf.Close()
-		dstname := filepath.Join(v.tempWorkspace.Filename(), name)
-		dstf, err := os.Create(dstname)
-		if err != nil {
-			return errors.Errorf("truncating view %s: %w", name, err)
-		}
-		defer dstf.Close()
-		if _, err := io.Copy(dstf, srcf); err != nil {
-			return errors.Errorf("copying %s: %w", name, err)
-		}
-	}
-	return nil
+	return v.snapshot, v.snapshot.generation.Acquire()
 }
 
 func (s *Session) getWorkspaceInformation(ctx context.Context, folder span.URI, options *source.Options) (*workspaceInformation, error) {
@@ -800,7 +800,7 @@
 // TODO (rFindley): move this to workspace.go
 // TODO (rFindley): simplify this once workspace modules are enabled by default.
 func findWorkspaceRoot(ctx context.Context, folder span.URI, fs source.FileSource, excludePath func(string) bool, experimental bool) (span.URI, error) {
-	patterns := []string{"go.mod"}
+	patterns := []string{"go.work", "go.mod"}
 	if experimental {
 		patterns = []string{"go.work", "gopls.mod", "go.mod"}
 	}
@@ -850,7 +850,8 @@
 		if exists {
 			return span.URIFromPath(dir), nil
 		}
-		next, _ := filepath.Split(dir)
+		// Trailing separators must be trimmed, otherwise filepath.Split is a noop.
+		next, _ := filepath.Split(strings.TrimRight(dir, string(filepath.Separator)))
 		if next == dir {
 			break
 		}
@@ -904,6 +905,7 @@
 	for k := range vars {
 		args = append(args, k)
 	}
+	args = append(args, "GOWORK")
 
 	inv := gocommand.Invocation{
 		Verb:       "env",
@@ -1057,26 +1059,18 @@
 	}
 }
 
+// pathExcludedByFilter reports whether the path (relative to the workspace
+// folder) should be excluded by the configured directory filters.
+//
+// TODO(rfindley): passing root and gomodcache here makes it confusing whether
+// path should be absolute or relative, and has already caused at least one
+// bug.
 func pathExcludedByFilter(path, root, gomodcache string, opts *source.Options) bool {
 	path = strings.TrimPrefix(filepath.ToSlash(path), "/")
 	gomodcache = strings.TrimPrefix(filepath.ToSlash(strings.TrimPrefix(gomodcache, root)), "/")
-
-	excluded := false
 	filters := opts.DirectoryFilters
 	if gomodcache != "" {
 		filters = append(filters, "-"+gomodcache)
 	}
-	for _, filter := range filters {
-		op, prefix := filter[0], filter[1:]
-		// Non-empty prefixes have to be precise directory matches.
-		if prefix != "" {
-			prefix = prefix + "/"
-			path = path + "/"
-		}
-		if !strings.HasPrefix(path, prefix) {
-			continue
-		}
-		excluded = op == '-'
-	}
-	return excluded
+	return source.FiltersDisallow(path, filters)
 }
diff --git a/internal/lsp/cache/view_test.go b/internal/lsp/cache/view_test.go
index f0923d4..d76dcda 100644
--- a/internal/lsp/cache/view_test.go
+++ b/internal/lsp/cache/view_test.go
@@ -55,6 +55,8 @@
 module a
 -- a/x/x.go
 package x
+-- a/x/y/y.go
+package x
 -- b/go.mod --
 module b
 -- b/c/go.mod --
@@ -79,6 +81,7 @@
 		{"", "", false}, // no module at root, and more than one nested module
 		{"a", "a", false},
 		{"a/x", "a", false},
+		{"a/x/y", "a", false},
 		{"b/c", "b/c", false},
 		{"d", "d/e", false},
 		{"d", "d", true},
@@ -172,3 +175,44 @@
 		}
 	}
 }
+
+func TestSuffixes(t *testing.T) {
+	type file struct {
+		path string
+		want bool
+	}
+	type cases struct {
+		option []string
+		files  []file
+	}
+	tests := []cases{
+		{[]string{"tmpl", "gotmpl"}, []file{ // default
+			{"foo", false},
+			{"foo.tmpl", true},
+			{"foo.gotmpl", true},
+			{"tmpl", false},
+			{"tmpl.go", false}},
+		},
+		{[]string{"tmpl", "gotmpl", "html", "gohtml"}, []file{
+			{"foo.gotmpl", true},
+			{"foo.html", true},
+			{"foo.gohtml", true},
+			{"html", false}},
+		},
+		{[]string{"tmpl", "gotmpl", ""}, []file{ // possible user mistake
+			{"foo.gotmpl", true},
+			{"foo.go", false},
+			{"foo", false}},
+		},
+	}
+	for _, a := range tests {
+		suffixes := a.option
+		for _, b := range a.files {
+			got := fileHasExtension(b.path, suffixes)
+			if got != b.want {
+				t.Errorf("got %v, want %v, option %q, file %q (%+v)",
+					got, b.want, a.option, b.path, b)
+			}
+		}
+	}
+}
diff --git a/internal/lsp/cache/workspace.go b/internal/lsp/cache/workspace.go
index 4204bcc..5d62d66 100644
--- a/internal/lsp/cache/workspace.go
+++ b/internal/lsp/cache/workspace.go
@@ -16,19 +16,19 @@
 	"golang.org/x/mod/modfile"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/lsp/source"
-	workfile "golang.org/x/tools/internal/mod/modfile"
 	"golang.org/x/tools/internal/span"
 	"golang.org/x/tools/internal/xcontext"
 	errors "golang.org/x/xerrors"
 )
 
+// workspaceSource reports how the set of active modules has been derived.
 type workspaceSource int
 
 const (
-	legacyWorkspace = iota
-	goplsModWorkspace
-	goWorkWorkspace
-	fileSystemWorkspace
+	legacyWorkspace     = iota // non-module or single module mode
+	goplsModWorkspace          // modules provided by a gopls.mod file
+	goWorkWorkspace            // modules provided by a go.work file
+	fileSystemWorkspace        // modules scanned from the filesystem
 )
 
 func (s workspaceSource) String() string {
@@ -69,9 +69,8 @@
 	// In all modes except for legacy, this is equivalent to modFiles.
 	knownModFiles map[span.URI]struct{}
 
-	// go111moduleOff indicates whether GO111MODULE=off has been configured in
-	// the environment.
-	go111moduleOff bool
+	// workFile, if nonEmpty, is the go.work file for the workspace.
+	workFile span.URI
 
 	// The workspace module is lazily re-built once after being invalidated.
 	// buildMu+built guards this reconstruction.
@@ -87,59 +86,60 @@
 	wsDirs   map[span.URI]struct{}
 }
 
-func newWorkspace(ctx context.Context, root span.URI, fs source.FileSource, excludePath func(string) bool, go111moduleOff bool, experimental bool) (*workspace, error) {
-	// In experimental mode, the user may have a gopls.mod file that defines
-	// their workspace.
-	if experimental {
-		ws, err := parseExplicitWorkspaceFile(ctx, root, fs, excludePath)
-		if err == nil {
-			return ws, nil
-		}
+// newWorkspace creates a new workspace at the given root directory,
+// determining its module source based on the presence of a gopls.mod or
+// go.work file, and the go111moduleOff and useWsModule settings.
+//
+// If useWsModule is set, the workspace may use a synthetic mod file replacing
+// all modules in the root.
+//
+// If there is no active workspace file (a gopls.mod or go.work), newWorkspace
+// scans the filesystem to find modules.
+func newWorkspace(ctx context.Context, root span.URI, fs source.FileSource, excludePath func(string) bool, go111moduleOff bool, useWsModule bool) (*workspace, error) {
+	ws := &workspace{
+		root:        root,
+		excludePath: excludePath,
 	}
+
+	// The user may have a gopls.mod or go.work file that defines their
+	// workspace.
+	if err := loadExplicitWorkspaceFile(ctx, ws, fs); err == nil {
+		return ws, nil
+	}
+
 	// Otherwise, in all other modes, search for all of the go.mod files in the
 	// workspace.
 	knownModFiles, err := findModules(root, excludePath, 0)
 	if err != nil {
 		return nil, err
 	}
-	// When GO111MODULE=off, there are no active go.mod files.
-	if go111moduleOff {
-		return &workspace{
-			root:           root,
-			excludePath:    excludePath,
-			moduleSource:   legacyWorkspace,
-			knownModFiles:  knownModFiles,
-			go111moduleOff: true,
-		}, nil
-	}
-	// In legacy mode, not all known go.mod files will be considered active.
-	if !experimental {
+	ws.knownModFiles = knownModFiles
+
+	switch {
+	case go111moduleOff:
+		ws.moduleSource = legacyWorkspace
+	case useWsModule:
+		ws.activeModFiles = knownModFiles
+		ws.moduleSource = fileSystemWorkspace
+	default:
+		ws.moduleSource = legacyWorkspace
 		activeModFiles, err := getLegacyModules(ctx, root, fs)
 		if err != nil {
 			return nil, err
 		}
-		return &workspace{
-			root:           root,
-			excludePath:    excludePath,
-			activeModFiles: activeModFiles,
-			knownModFiles:  knownModFiles,
-			moduleSource:   legacyWorkspace,
-		}, nil
+		ws.activeModFiles = activeModFiles
 	}
-	return &workspace{
-		root:           root,
-		excludePath:    excludePath,
-		activeModFiles: knownModFiles,
-		knownModFiles:  knownModFiles,
-		moduleSource:   fileSystemWorkspace,
-	}, nil
+	return ws, nil
 }
 
-func parseExplicitWorkspaceFile(ctx context.Context, root span.URI, fs source.FileSource, excludePath func(string) bool) (*workspace, error) {
+// loadExplicitWorkspaceFile loads workspace information from go.work or
+// gopls.mod files, setting the active modules, mod file, and module source
+// accordingly.
+func loadExplicitWorkspaceFile(ctx context.Context, ws *workspace, fs source.FileSource) error {
 	for _, src := range []workspaceSource{goWorkWorkspace, goplsModWorkspace} {
-		fh, err := fs.GetFile(ctx, uriForSource(root, src))
+		fh, err := fs.GetFile(ctx, uriForSource(ws.root, src))
 		if err != nil {
-			return nil, err
+			return err
 		}
 		contents, err := fh.Read()
 		if err != nil {
@@ -149,23 +149,23 @@
 		var activeModFiles map[span.URI]struct{}
 		switch src {
 		case goWorkWorkspace:
-			file, activeModFiles, err = parseGoWork(ctx, root, fh.URI(), contents, fs)
+			file, activeModFiles, err = parseGoWork(ctx, ws.root, fh.URI(), contents, fs)
+			ws.workFile = fh.URI()
 		case goplsModWorkspace:
-			file, activeModFiles, err = parseGoplsMod(root, fh.URI(), contents)
+			file, activeModFiles, err = parseGoplsMod(ws.root, fh.URI(), contents)
 		}
 		if err != nil {
-			return nil, err
+			ws.buildMu.Lock()
+			ws.built = true
+			ws.buildErr = err
+			ws.buildMu.Unlock()
 		}
-		return &workspace{
-			root:           root,
-			excludePath:    excludePath,
-			activeModFiles: activeModFiles,
-			knownModFiles:  activeModFiles,
-			mod:            file,
-			moduleSource:   src,
-		}, nil
+		ws.mod = file
+		ws.activeModFiles = activeModFiles
+		ws.moduleSource = src
+		return nil
 	}
-	return nil, noHardcodedWorkspace
+	return noHardcodedWorkspace
 }
 
 var noHardcodedWorkspace = errors.New("no hardcoded workspace")
@@ -282,10 +282,11 @@
 		moduleSource:   w.moduleSource,
 		knownModFiles:  make(map[span.URI]struct{}),
 		activeModFiles: make(map[span.URI]struct{}),
-		go111moduleOff: w.go111moduleOff,
+		workFile:       w.workFile,
 		mod:            w.mod,
 		sum:            w.sum,
 		wsDirs:         w.wsDirs,
+		excludePath:    w.excludePath,
 	}
 	for k, v := range w.knownModFiles {
 		result.knownModFiles[k] = v
@@ -296,90 +297,31 @@
 
 	// First handle changes to the go.work or gopls.mod file. This must be
 	// considered before any changes to go.mod or go.sum files, as these files
-	// determine which modules we care about. In legacy workspace mode we don't
-	// consider the gopls.mod or go.work files.
-	if w.moduleSource != legacyWorkspace {
-		// If go.work/gopls.mod has changed we need to either re-read it if it
-		// exists or walk the filesystem if it has been deleted.
-		// go.work should override the gopls.mod if both exist.
-		for _, src := range []workspaceSource{goplsModWorkspace, goWorkWorkspace} {
-			uri := uriForSource(w.root, src)
-			// File opens/closes are just no-ops.
-			change, ok := changes[uri]
-			if !ok || change.isUnchanged {
-				continue
-			}
-			if change.exists {
-				// Only invalidate if the file if it actually parses.
-				// Otherwise, stick with the current file.
-				var parsedFile *modfile.File
-				var parsedModules map[span.URI]struct{}
-				var err error
-				switch src {
-				case goWorkWorkspace:
-					parsedFile, parsedModules, err = parseGoWork(ctx, w.root, uri, change.content, fs)
-				case goplsModWorkspace:
-					parsedFile, parsedModules, err = parseGoplsMod(w.root, uri, change.content)
-				}
-				if err == nil {
-					changed = true
-					reload = change.fileHandle.Saved()
-					result.mod = parsedFile
-					result.moduleSource = src
-					result.knownModFiles = parsedModules
-					result.activeModFiles = make(map[span.URI]struct{})
-					for k, v := range parsedModules {
-						result.activeModFiles[k] = v
-					}
-				} else {
-					// An unparseable file should not invalidate the workspace:
-					// nothing good could come from changing the workspace in
-					// this case.
-					event.Error(ctx, fmt.Sprintf("parsing %s", filepath.Base(uri.Filename())), err)
-				}
-			} else {
-				// go.work/gopls.mod is deleted. search for modules again.
-				changed = true
-				reload = true
-				result.moduleSource = fileSystemWorkspace
-				// The parsed file is no longer valid.
-				result.mod = nil
-				knownModFiles, err := findModules(w.root, w.excludePath, 0)
-				if err != nil {
-					result.knownModFiles = nil
-					result.activeModFiles = nil
-					event.Error(ctx, "finding file system modules", err)
-				} else {
-					result.knownModFiles = knownModFiles
-					result.activeModFiles = make(map[span.URI]struct{})
-					for k, v := range result.knownModFiles {
-						result.activeModFiles[k] = v
-					}
-				}
-			}
+	// determine which modules we care about. If go.work/gopls.mod has changed
+	// we need to either re-read it if it exists or walk the filesystem if it
+	// has been deleted. go.work should override the gopls.mod if both exist.
+	changed, reload = handleWorkspaceFileChanges(ctx, result, changes, fs)
+	// Next, handle go.mod changes that could affect our workspace.
+	for uri, change := range changes {
+		// Otherwise, we only care about go.mod files in the workspace directory.
+		if change.isUnchanged || !isGoMod(uri) || !source.InDir(result.root.Filename(), uri.Filename()) {
+			continue
 		}
-	}
-
-	// Next, handle go.mod changes that could affect our workspace. If we're
-	// reading our tracked modules from the gopls.mod, there's nothing to do
-	// here.
-	if result.moduleSource != goplsModWorkspace && result.moduleSource != goWorkWorkspace {
-		for uri, change := range changes {
-			if change.isUnchanged || !isGoMod(uri) || !source.InDir(result.root.Filename(), uri.Filename()) {
-				continue
+		changed = true
+		active := result.moduleSource != legacyWorkspace || source.CompareURI(modURI(w.root), uri) == 0
+		reload = reload || (active && change.fileHandle.Saved())
+		// Don't mess with the list of mod files if using go.work or gopls.mod.
+		if result.moduleSource == goplsModWorkspace || result.moduleSource == goWorkWorkspace {
+			continue
+		}
+		if change.exists {
+			result.knownModFiles[uri] = struct{}{}
+			if active {
+				result.activeModFiles[uri] = struct{}{}
 			}
-			changed = true
-			active := result.moduleSource != legacyWorkspace || source.CompareURI(modURI(w.root), uri) == 0
-			reload = reload || (active && change.fileHandle.Saved())
-			if change.exists {
-				result.knownModFiles[uri] = struct{}{}
-				if active {
-					result.activeModFiles[uri] = struct{}{}
-				}
-			} else {
-				delete(result.knownModFiles, uri)
-				delete(result.activeModFiles, uri)
-			}
+		} else {
+			delete(result.knownModFiles, uri)
+			delete(result.activeModFiles, uri)
 		}
 	}
 
@@ -407,6 +349,76 @@
 	return result, changed, reload
 }
 
+// handleWorkspaceFileChanges handles changes related to a go.work or gopls.mod
+// file, updating ws accordingly. ws.root must be set.
+func handleWorkspaceFileChanges(ctx context.Context, ws *workspace, changes map[span.URI]*fileChange, fs source.FileSource) (changed, reload bool) {
+	// If go.work/gopls.mod has changed we need to either re-read it if it
+	// exists or walk the filesystem if it has been deleted.
+	// go.work should override the gopls.mod if both exist.
+	for _, src := range []workspaceSource{goWorkWorkspace, goplsModWorkspace} {
+		uri := uriForSource(ws.root, src)
+		// File opens/closes are just no-ops.
+		change, ok := changes[uri]
+		if !ok {
+			continue
+		}
+		if change.isUnchanged {
+			break
+		}
+		if change.exists {
+			// Only invalidate if the file if it actually parses.
+			// Otherwise, stick with the current file.
+			var parsedFile *modfile.File
+			var parsedModules map[span.URI]struct{}
+			var err error
+			switch src {
+			case goWorkWorkspace:
+				parsedFile, parsedModules, err = parseGoWork(ctx, ws.root, uri, change.content, fs)
+			case goplsModWorkspace:
+				parsedFile, parsedModules, err = parseGoplsMod(ws.root, uri, change.content)
+			}
+			if err != nil {
+				// An unparseable file should not invalidate the workspace:
+				// nothing good could come from changing the workspace in
+				// this case.
+				event.Error(ctx, fmt.Sprintf("parsing %s", filepath.Base(uri.Filename())), err)
+			} else {
+				// only update the modfile if it parsed.
+				changed = true
+				reload = change.fileHandle.Saved()
+				ws.mod = parsedFile
+				ws.moduleSource = src
+				ws.knownModFiles = parsedModules
+				ws.activeModFiles = make(map[span.URI]struct{})
+				for k, v := range parsedModules {
+					ws.activeModFiles[k] = v
+				}
+			}
+			break // We've found an explicit workspace file, so can stop looking.
+		} else {
+			// go.work/gopls.mod is deleted. search for modules again.
+			changed = true
+			reload = true
+			ws.moduleSource = fileSystemWorkspace
+			// The parsed file is no longer valid.
+			ws.mod = nil
+			knownModFiles, err := findModules(ws.root, ws.excludePath, 0)
+			if err != nil {
+				ws.knownModFiles = nil
+				ws.activeModFiles = nil
+				event.Error(ctx, "finding file system modules", err)
+			} else {
+				ws.knownModFiles = knownModFiles
+				ws.activeModFiles = make(map[span.URI]struct{})
+				for k, v := range ws.knownModFiles {
+					ws.activeModFiles[k] = v
+				}
+			}
+		}
+	}
+	return changed, reload
+}
+
 // goplsModURI returns the URI for the gopls.mod file contained in root.
 func uriForSource(root span.URI, src workspaceSource) span.URI {
 	var basename string
@@ -432,7 +444,7 @@
 }
 
 func isGoSum(uri span.URI) bool {
-	return filepath.Base(uri.Filename()) == "go.sum"
+	return filepath.Base(uri.Filename()) == "go.sum" || filepath.Base(uri.Filename()) == "go.work.sum"
 }
 
 // fileExists reports if the file uri exists within source.
@@ -477,26 +489,29 @@
 }
 
 func parseGoWork(ctx context.Context, root, uri span.URI, contents []byte, fs source.FileSource) (*modfile.File, map[span.URI]struct{}, error) {
-	workFile, err := workfile.ParseWork(uri.Filename(), contents, nil)
+	workFile, err := modfile.ParseWork(uri.Filename(), contents, nil)
 	if err != nil {
 		return nil, nil, errors.Errorf("parsing go.work: %w", err)
 	}
 	modFiles := make(map[span.URI]struct{})
-	for _, dir := range workFile.Directory {
+	for _, dir := range workFile.Use {
 		// The resulting modfile must use absolute paths, so that it can be
 		// written to a temp directory.
-		dir.DiskPath = absolutePath(root, dir.DiskPath)
-		modURI := span.URIFromPath(filepath.Join(dir.DiskPath, "go.mod"))
+		dir.Path = absolutePath(root, dir.Path)
+		modURI := span.URIFromPath(filepath.Join(dir.Path, "go.mod"))
 		modFiles[modURI] = struct{}{}
 	}
 	modFile, err := buildWorkspaceModFile(ctx, modFiles, fs)
 	if err != nil {
 		return nil, nil, err
 	}
-	if workFile.Go.Version != "" {
-		if err := modFile.AddGoStmt(workFile.Go.Version); err != nil {
-			return nil, nil, err
-		}
+
+	// Require a go directive, per the spec.
+	if workFile.Go == nil || workFile.Go.Version == "" {
+		return nil, nil, fmt.Errorf("go.work has missing or incomplete go directive")
+	}
+	if err := modFile.AddGoStmt(workFile.Go.Version); err != nil {
+		return nil, nil, err
 	}
 
 	return modFile, modFiles, nil
diff --git a/internal/lsp/cache/workspace_test.go b/internal/lsp/cache/workspace_test.go
index a03aedc..b809ad1 100644
--- a/internal/lsp/cache/workspace_test.go
+++ b/internal/lsp/cache/workspace_test.go
@@ -6,10 +6,12 @@
 
 import (
 	"context"
+	"errors"
 	"os"
 	"strings"
 	"testing"
 
+	"golang.org/x/mod/modfile"
 	"golang.org/x/tools/internal/lsp/fake"
 	"golang.org/x/tools/internal/lsp/source"
 	"golang.org/x/tools/internal/span"
@@ -309,6 +311,74 @@
 	}
 }
 
+func workspaceFromTxtar(t *testing.T, files string) (*workspace, func(), error) {
+	ctx := context.Background()
+	dir, err := fake.Tempdir(fake.UnpackTxt(files))
+	if err != nil {
+		return nil, func() {}, err
+	}
+	cleanup := func() {
+		os.RemoveAll(dir)
+	}
+	root := span.URIFromPath(dir)
+
+	fs := &osFileSource{}
+	excludeNothing := func(string) bool { return false }
+	workspace, err := newWorkspace(ctx, root, fs, excludeNothing, false, false)
+	return workspace, cleanup, err
+}
+
+func TestWorkspaceParseError(t *testing.T) {
+	w, cleanup, err := workspaceFromTxtar(t, `
+-- go.work --
+go 1.18
+
+usa ./typo
+-- typo/go.mod --
+module foo
+`)
+	defer cleanup()
+	if err != nil {
+		t.Fatalf("error creating workspace: %v; want no error", err)
+	}
+	w.buildMu.Lock()
+	built, buildErr := w.built, w.buildErr
+	w.buildMu.Unlock()
+	if !built || buildErr == nil {
+		t.Fatalf("built, buildErr: got %v, %v; want true, non-nil", built, buildErr)
+	}
+	var errList modfile.ErrorList
+	if !errors.As(buildErr, &errList) {
+		t.Fatalf("expected error to be an errorlist; got %v", buildErr)
+	}
+	if len(errList) != 1 {
+		t.Fatalf("expected errorList to have one element; got %v elements", len(errList))
+	}
+	parseErr := errList[0]
+	if parseErr.Pos.Line != 3 {
+		t.Fatalf("expected error to be on line 3; got %v", parseErr.Pos.Line)
+	}
+}
+
+func TestWorkspaceMissingModFile(t *testing.T) {
+	w, cleanup, err := workspaceFromTxtar(t, `
+-- go.work --
+go 1.18
+
+use ./missing
+`)
+	defer cleanup()
+	if err != nil {
+		t.Fatalf("error creating workspace: %v; want no error", err)
+	}
+	w.buildMu.Lock()
+	built, buildErr := w.built, w.buildErr
+	w.buildMu.Unlock()
+	if !built || buildErr == nil {
+		t.Fatalf("built, buildErr: got %v, %v; want true, non-nil", built, buildErr)
+	}
+}
+
 func checkState(ctx context.Context, t *testing.T, fs source.FileSource, rel fake.RelativeTo, got *workspace, want wsState) {
 	t.Helper()
 	if got.moduleSource != want.source {
diff --git a/internal/lsp/cmd/call_hierarchy.go b/internal/lsp/cmd/call_hierarchy.go
index 2f870f0..c9f9e73 100644
--- a/internal/lsp/cmd/call_hierarchy.go
+++ b/internal/lsp/cmd/call_hierarchy.go
@@ -21,17 +21,18 @@
 }
 
 func (c *callHierarchy) Name() string      { return "call_hierarchy" }
+func (c *callHierarchy) Parent() string    { return c.app.Name() }
 func (c *callHierarchy) Usage() string     { return "<position>" }
 func (c *callHierarchy) ShortHelp() string { return "display selected identifier's call hierarchy" }
 func (c *callHierarchy) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), `
 Example:
 
-  $ # 1-indexed location (:line:column or :#offset) of the target identifier
-  $ gopls call_hierarchy helper/helper.go:8:6
-  $ gopls call_hierarchy helper/helper.go:#53
+	$ # 1-indexed location (:line:column or :#offset) of the target identifier
+	$ gopls call_hierarchy helper/helper.go:8:6
+	$ gopls call_hierarchy helper/helper.go:#53
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (c *callHierarchy) Run(ctx context.Context, args ...string) error {
diff --git a/internal/lsp/cmd/check.go b/internal/lsp/cmd/check.go
index 42d1976..566924a 100644
--- a/internal/lsp/cmd/check.go
+++ b/internal/lsp/cmd/check.go
@@ -19,15 +19,16 @@
 }
 
 func (c *check) Name() string      { return "check" }
+func (c *check) Parent() string    { return c.app.Name() }
 func (c *check) Usage() string     { return "<filename>" }
 func (c *check) ShortHelp() string { return "show diagnostic results for the specified file" }
 func (c *check) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), `
 Example: show the diagnostic results of this file:
 
-  $ gopls check internal/lsp/cmd/check.go
+	$ gopls check internal/lsp/cmd/check.go
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 // Run performs the check on the files specified by args and prints the
diff --git a/internal/lsp/cmd/cmd.go b/internal/lsp/cmd/cmd.go
index ad344f7..d48398d 100644
--- a/internal/lsp/cmd/cmd.go
+++ b/internal/lsp/cmd/cmd.go
@@ -15,8 +15,11 @@
 	"io/ioutil"
 	"log"
 	"os"
+	"reflect"
+	"sort"
 	"strings"
 	"sync"
+	"text/tabwriter"
 	"time"
 
 	"golang.org/x/tools/internal/jsonrpc2"
@@ -61,10 +64,10 @@
 	Remote string `flag:"remote" help:"forward all commands to a remote lsp specified by this flag. With no special prefix, this is assumed to be a TCP address. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. If 'auto', or prefixed by 'auto;', the remote address is automatically resolved based on the executing environment."`
 
 	// Verbose enables verbose logging.
-	Verbose bool `flag:"v" help:"verbose output"`
+	Verbose bool `flag:"v,verbose" help:"verbose output"`
 
 	// VeryVerbose enables a higher level of verbosity in logging output.
-	VeryVerbose bool `flag:"vv" help:"very verbose output"`
+	VeryVerbose bool `flag:"vv,veryverbose" help:"very verbose output"`
 
 	// Control ocagent export of telemetry
 	OCAgent string `flag:"ocagent" help:"the address of the ocagent (e.g. http://localhost:55678), or off"`
@@ -94,6 +97,7 @@
 			RemoteListenTimeout: 1 * time.Minute,
 		},
 	}
+	app.Serve.app = app
 	return app
 }
 
@@ -101,39 +105,110 @@
 func (app *Application) Name() string { return app.name }
 
 // Usage implements tool.Application returning empty extra argument usage.
-func (app *Application) Usage() string { return "<command> [command-flags] [command-args]" }
+func (app *Application) Usage() string { return "" }
 
 // ShortHelp implements tool.Application returning the main binary help.
 func (app *Application) ShortHelp() string {
-	return "The Go Language source tools."
+	return ""
 }
 
 // DetailedHelp implements tool.Application returning the main binary help.
 // This includes the short help for all the sub commands.
 func (app *Application) DetailedHelp(f *flag.FlagSet) {
-	fmt.Fprint(f.Output(), `
-gopls is a Go language server. It is typically used with an editor to provide
-language features. When no command is specified, gopls will default to the 'serve'
-command. The language features can also be accessed via the gopls command-line interface.
+	w := tabwriter.NewWriter(f.Output(), 0, 0, 2, ' ', 0)
+	defer w.Flush()
 
-Available commands are:
+	fmt.Fprint(w, `
+gopls is a Go language server.
+
+It is typically used with an editor to provide language features. When no
+command is specified, gopls will default to the 'serve' command. The language
+features can also be accessed via the gopls command-line interface.
+
+Usage:
+  gopls help [<subject>]
+
+Command:
 `)
-	fmt.Fprint(f.Output(), `
-main:
-`)
+	fmt.Fprint(w, "\nMain\t\n")
 	for _, c := range app.mainCommands() {
-		fmt.Fprintf(f.Output(), "  %s : %v\n", c.Name(), c.ShortHelp())
+		fmt.Fprintf(w, "  %s\t%s\n", c.Name(), c.ShortHelp())
 	}
-	fmt.Fprint(f.Output(), `
-features:
-`)
+	fmt.Fprint(w, "\t\nFeatures\t\n")
 	for _, c := range app.featureCommands() {
-		fmt.Fprintf(f.Output(), "  %s : %v\n", c.Name(), c.ShortHelp())
+		fmt.Fprintf(w, "  %s\t%s\n", c.Name(), c.ShortHelp())
 	}
-	fmt.Fprint(f.Output(), `
-gopls flags are:
-`)
-	f.PrintDefaults()
+	fmt.Fprint(w, "\nflags:\n")
+	printFlagDefaults(f)
+}
+
+// this is a slightly modified version of flag.PrintDefaults to give us control
+func printFlagDefaults(s *flag.FlagSet) {
+	var flags [][]*flag.Flag
+	seen := map[flag.Value]int{}
+	s.VisitAll(func(f *flag.Flag) {
+		if i, ok := seen[f.Value]; !ok {
+			seen[f.Value] = len(flags)
+			flags = append(flags, []*flag.Flag{f})
+		} else {
+			flags[i] = append(flags[i], f)
+		}
+	})
+	for _, entry := range flags {
+		sort.SliceStable(entry, func(i, j int) bool {
+			return len(entry[i].Name) < len(entry[j].Name)
+		})
+		var b strings.Builder
+		for i, f := range entry {
+			switch i {
+			case 0:
+				b.WriteString("  -")
+			default:
+				b.WriteString(",-")
+			}
+			b.WriteString(f.Name)
+		}
+
+		f := entry[0]
+		name, usage := flag.UnquoteUsage(f)
+		if len(name) > 0 {
+			b.WriteString("=")
+			b.WriteString(name)
+		}
+		// Boolean flags of one ASCII letter are so common we
+		// treat them specially, putting their usage on the same line.
+		if b.Len() <= 4 { // space, space, '-', 'x'.
+			b.WriteString("\t")
+		} else {
+			// Four spaces before the tab triggers good alignment
+			// for both 4- and 8-space tab stops.
+			b.WriteString("\n    \t")
+		}
+		b.WriteString(strings.ReplaceAll(usage, "\n", "\n    \t"))
+		if !isZeroValue(f, f.DefValue) {
+			if reflect.TypeOf(f.Value).Elem().Name() == "stringValue" {
+				fmt.Fprintf(&b, " (default %q)", f.DefValue)
+			} else {
+				fmt.Fprintf(&b, " (default %v)", f.DefValue)
+			}
+		}
+		fmt.Fprint(s.Output(), b.String(), "\n")
+	}
+}
+
+// isZeroValue is copied from the flags package
+func isZeroValue(f *flag.Flag, value string) bool {
+	// Build a zero value of the flag's Value type, and see if the
+	// result of calling its String method equals the value passed in.
+	// This works unless the Value type is itself an interface type.
+	typ := reflect.TypeOf(f.Value)
+	var z reflect.Value
+	if typ.Kind() == reflect.Ptr {
+		z = reflect.New(typ.Elem())
+	} else {
+		z = reflect.Zero(typ)
+	}
+	return value == z.Interface().(flag.Value).String()
 }
 
 // Run takes the args after top level flag processing, and invokes the correct
@@ -142,14 +217,15 @@
 // temporary measure for compatibility.
 func (app *Application) Run(ctx context.Context, args ...string) error {
 	ctx = debug.WithInstance(ctx, app.wd, app.OCAgent)
-	app.Serve.app = app
 	if len(args) == 0 {
-		return tool.Run(ctx, &app.Serve, args)
+		s := flag.NewFlagSet(app.Name(), flag.ExitOnError)
+		return tool.Run(ctx, s, &app.Serve, args)
 	}
 	command, args := args[0], args[1:]
-	for _, c := range app.commands() {
+	for _, c := range app.Commands() {
 		if c.Name() == command {
-			return tool.Run(ctx, c, args)
+			s := flag.NewFlagSet(app.Name(), flag.ExitOnError)
+			return tool.Run(ctx, s, c, args)
 		}
 	}
 	return tool.CommandLineErrorf("Unknown command %v", command)
@@ -158,7 +234,7 @@
 // commands returns the set of commands supported by the gopls tool on the
 // command line.
 // The command is specified by the first non flag argument.
-func (app *Application) commands() []tool.Application {
+func (app *Application) Commands() []tool.Application {
 	var commands []tool.Application
 	commands = append(commands, app.mainCommands()...)
 	commands = append(commands, app.featureCommands()...)
@@ -169,8 +245,8 @@
 	return []tool.Application{
 		&app.Serve,
 		&version{app: app},
-		&bug{},
-		&apiJSON{},
+		&bug{app: app},
+		&apiJSON{app: app},
 		&licenses{app: app},
 	}
 }
@@ -197,6 +273,7 @@
 		&symbols{app: app},
 		newWorkspace(app),
 		&workspaceSymbol{app: app},
+		&vulncheck{app: app},
 	}
 }
 
@@ -418,8 +495,8 @@
 	return results, nil
 }
 
-func (c *cmdClient) ApplyEdit(ctx context.Context, p *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResponse, error) {
-	return &protocol.ApplyWorkspaceEditResponse{Applied: false, FailureReason: "not implemented"}, nil
+func (c *cmdClient) ApplyEdit(ctx context.Context, p *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResult, error) {
+	return &protocol.ApplyWorkspaceEditResult{Applied: false, FailureReason: "not implemented"}, nil
 }
 
 func (c *cmdClient) PublishDiagnostics(ctx context.Context, p *protocol.PublishDiagnosticsParams) error {
@@ -497,7 +574,7 @@
 	p := &protocol.DidOpenTextDocumentParams{
 		TextDocument: protocol.TextDocumentItem{
 			URI:        protocol.URIFromSpanURI(uri),
-			LanguageID: source.DetectLanguage("", file.uri.Filename()).String(),
+			LanguageID: "go",
 			Version:    1,
 			Text:       string(file.mapper.Content),
 		},
diff --git a/internal/lsp/cmd/definition.go b/internal/lsp/cmd/definition.go
index e15540f..f3c71b6 100644
--- a/internal/lsp/cmd/definition.go
+++ b/internal/lsp/cmd/definition.go
@@ -43,18 +43,19 @@
 }
 
 func (d *definition) Name() string      { return "definition" }
-func (d *definition) Usage() string     { return "<position>" }
+func (d *definition) Parent() string    { return d.app.Name() }
+func (d *definition) Usage() string     { return "[definition-flags] <position>" }
 func (d *definition) ShortHelp() string { return "show declaration of selected identifier" }
 func (d *definition) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprintf(f.Output(), `
 Example: show the definition of the identifier at syntax at offset %[1]v in this file (flag.FlagSet):
 
-$ gopls definition internal/lsp/cmd/definition.go:%[1]v:%[2]v
-$ gopls definition internal/lsp/cmd/definition.go:#%[3]v
+	$ gopls definition internal/lsp/cmd/definition.go:%[1]v:%[2]v
+	$ gopls definition internal/lsp/cmd/definition.go:#%[3]v
 
-	gopls query definition flags are:
+definition-flags:
 `, exampleLine, exampleColumn, exampleOffset)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 // Run performs the definition query as specified by args and prints the
diff --git a/internal/lsp/cmd/folding_range.go b/internal/lsp/cmd/folding_range.go
index f655f30..513c9bd 100644
--- a/internal/lsp/cmd/folding_range.go
+++ b/internal/lsp/cmd/folding_range.go
@@ -20,15 +20,16 @@
 }
 
 func (r *foldingRanges) Name() string      { return "folding_ranges" }
+func (r *foldingRanges) Parent() string    { return r.app.Name() }
 func (r *foldingRanges) Usage() string     { return "<file>" }
 func (r *foldingRanges) ShortHelp() string { return "display selected file's folding ranges" }
 func (r *foldingRanges) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), `
 Example:
 
-  $ gopls folding_ranges helper/helper.go
+	$ gopls folding_ranges helper/helper.go
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (r *foldingRanges) Run(ctx context.Context, args ...string) error {
diff --git a/internal/lsp/cmd/format.go b/internal/lsp/cmd/format.go
index d1ecf56..2d0f3f7 100644
--- a/internal/lsp/cmd/format.go
+++ b/internal/lsp/cmd/format.go
@@ -19,15 +19,16 @@
 
 // format implements the format verb for gopls.
 type format struct {
-	Diff  bool `flag:"d" help:"display diffs instead of rewriting files"`
-	Write bool `flag:"w" help:"write result to (source) file instead of stdout"`
-	List  bool `flag:"l" help:"list files whose formatting differs from gofmt's"`
+	Diff  bool `flag:"d,diff" help:"display diffs instead of rewriting files"`
+	Write bool `flag:"w,write" help:"write result to (source) file instead of stdout"`
+	List  bool `flag:"l,list" help:"list files whose formatting differs from gofmt's"`
 
 	app *Application
 }
 
 func (c *format) Name() string      { return "format" }
-func (c *format) Usage() string     { return "<filerange>" }
+func (c *format) Parent() string    { return c.app.Name() }
+func (c *format) Usage() string     { return "[format-flags] <filerange>" }
 func (c *format) ShortHelp() string { return "format the code according to the go standard" }
 func (c *format) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), `
@@ -35,11 +36,11 @@
 
 Example: reformat this file:
 
-  $ gopls format -w internal/lsp/cmd/check.go
+	$ gopls format -w internal/lsp/cmd/check.go
 
-	gopls format flags are:
+format-flags:
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 // Run performs the check on the files specified by args and prints the
diff --git a/internal/lsp/cmd/help_test.go b/internal/lsp/cmd/help_test.go
new file mode 100644
index 0000000..536d19d
--- /dev/null
+++ b/internal/lsp/cmd/help_test.go
@@ -0,0 +1,57 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cmd_test
+
+import (
+	"bytes"
+	"context"
+	"flag"
+	"io/ioutil"
+	"path/filepath"
+	"testing"
+
+	"golang.org/x/tools/internal/lsp/cmd"
+	"golang.org/x/tools/internal/testenv"
+	"golang.org/x/tools/internal/tool"
+)
+
+//go:generate go test -run Help -update-help-files
+
+var updateHelpFiles = flag.Bool("update-help-files", false, "Write out the help files instead of checking them")
+
+const appName = "gopls"
+
+func TestHelpFiles(t *testing.T) {
+	testenv.NeedsGoBuild(t) // This is a lie. We actually need the source code.
+	app := cmd.New(appName, "", nil, nil)
+	ctx := context.Background()
+	for _, page := range append(app.Commands(), app) {
+		t.Run(page.Name(), func(t *testing.T) {
+			var buf bytes.Buffer
+			s := flag.NewFlagSet(page.Name(), flag.ContinueOnError)
+			s.SetOutput(&buf)
+			tool.Run(ctx, s, page, []string{"-h"})
+			name := page.Name()
+			if name == appName {
+				name = "usage"
+			}
+			helpFile := filepath.Join("usage", name+".hlp")
+			got := buf.Bytes()
+			if *updateHelpFiles {
+				if err := ioutil.WriteFile(helpFile, got, 0666); err != nil {
+					t.Errorf("Failed writing %v: %v", helpFile, err)
+				}
+				return
+			}
+			expect, err := ioutil.ReadFile(helpFile)
+			switch {
+			case err != nil:
+				t.Errorf("Missing help file %q", helpFile)
+			case !bytes.Equal(expect, got):
+				t.Errorf("Help file %q did not match, got:\n%q\nwant:\n%q", helpFile, string(got), string(expect))
+			}
+		})
+	}
+}
diff --git a/internal/lsp/cmd/highlight.go b/internal/lsp/cmd/highlight.go
index b60d513..a325a2d 100644
--- a/internal/lsp/cmd/highlight.go
+++ b/internal/lsp/cmd/highlight.go
@@ -21,17 +21,18 @@
 }
 
 func (r *highlight) Name() string      { return "highlight" }
+func (r *highlight) Parent() string    { return r.app.Name() }
 func (r *highlight) Usage() string     { return "<position>" }
 func (r *highlight) ShortHelp() string { return "display selected identifier's highlights" }
 func (r *highlight) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), `
 Example:
 
-  $ # 1-indexed location (:line:column or :#offset) of the target identifier
-  $ gopls highlight helper/helper.go:8:6
-  $ gopls highlight helper/helper.go:#53
+	$ # 1-indexed location (:line:column or :#offset) of the target identifier
+	$ gopls highlight helper/helper.go:8:6
+	$ gopls highlight helper/helper.go:#53
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (r *highlight) Run(ctx context.Context, args ...string) error {
diff --git a/internal/lsp/cmd/implementation.go b/internal/lsp/cmd/implementation.go
index 18eaa4e..7b42d99 100644
--- a/internal/lsp/cmd/implementation.go
+++ b/internal/lsp/cmd/implementation.go
@@ -21,17 +21,18 @@
 }
 
 func (i *implementation) Name() string      { return "implementation" }
+func (i *implementation) Parent() string    { return i.app.Name() }
 func (i *implementation) Usage() string     { return "<position>" }
 func (i *implementation) ShortHelp() string { return "display selected identifier's implementation" }
 func (i *implementation) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), `
 Example:
 
-  $ # 1-indexed location (:line:column or :#offset) of the target identifier
-  $ gopls implementation helper/helper.go:8:6
-  $ gopls implementation helper/helper.go:#53
+	$ # 1-indexed location (:line:column or :#offset) of the target identifier
+	$ gopls implementation helper/helper.go:8:6
+	$ gopls implementation helper/helper.go:#53
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (i *implementation) Run(ctx context.Context, args ...string) error {
diff --git a/internal/lsp/cmd/imports.go b/internal/lsp/cmd/imports.go
index a6d00e9..215c57f 100644
--- a/internal/lsp/cmd/imports.go
+++ b/internal/lsp/cmd/imports.go
@@ -20,24 +20,25 @@
 
 // imports implements the import verb for gopls.
 type imports struct {
-	Diff  bool `flag:"d" help:"display diffs instead of rewriting files"`
-	Write bool `flag:"w" help:"write result to (source) file instead of stdout"`
+	Diff  bool `flag:"d,diff" help:"display diffs instead of rewriting files"`
+	Write bool `flag:"w,write" help:"write result to (source) file instead of stdout"`
 
 	app *Application
 }
 
 func (t *imports) Name() string      { return "imports" }
-func (t *imports) Usage() string     { return "<filename>" }
+func (t *imports) Parent() string    { return t.app.Name() }
+func (t *imports) Usage() string     { return "[imports-flags] <filename>" }
 func (t *imports) ShortHelp() string { return "updates import statements" }
 func (t *imports) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprintf(f.Output(), `
 Example: update imports statements in a file:
 
-  $ gopls imports -w internal/lsp/cmd/check.go
+	$ gopls imports -w internal/lsp/cmd/check.go
 
-gopls imports flags are:
+imports-flags:
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 // Run performs diagnostic checks on the file specified and either;
diff --git a/internal/lsp/cmd/info.go b/internal/lsp/cmd/info.go
index 87ba428..09f453e 100644
--- a/internal/lsp/cmd/info.go
+++ b/internal/lsp/cmd/info.go
@@ -21,32 +21,42 @@
 
 // version implements the version command.
 type version struct {
+	JSON bool `flag:"json" help:"outputs in json format."`
+
 	app *Application
 }
 
 func (v *version) Name() string      { return "version" }
+func (v *version) Parent() string    { return v.app.Name() }
 func (v *version) Usage() string     { return "" }
 func (v *version) ShortHelp() string { return "print the gopls version information" }
 func (v *version) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), ``)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 // Run prints version information to stdout.
 func (v *version) Run(ctx context.Context, args ...string) error {
-	debug.PrintVersionInfo(ctx, os.Stdout, v.app.verbose(), debug.PlainText)
-	return nil
+	var mode = debug.PlainText
+	if v.JSON {
+		mode = debug.JSON
+	}
+
+	return debug.PrintVersionInfo(ctx, os.Stdout, v.app.verbose(), mode)
 }
 
 // bug implements the bug command.
-type bug struct{}
+type bug struct {
+	app *Application
+}
 
 func (b *bug) Name() string      { return "bug" }
+func (b *bug) Parent() string    { return b.app.Name() }
 func (b *bug) Usage() string     { return "" }
 func (b *bug) ShortHelp() string { return "report a bug in gopls" }
 func (b *bug) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), ``)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 const goplsBugPrefix = "x/tools/gopls: <DESCRIBE THE PROBLEM>"
@@ -84,14 +94,17 @@
 	return nil
 }
 
-type apiJSON struct{}
+type apiJSON struct {
+	app *Application
+}
 
 func (j *apiJSON) Name() string      { return "api-json" }
+func (j *apiJSON) Parent() string    { return j.app.Name() }
 func (j *apiJSON) Usage() string     { return "" }
 func (j *apiJSON) ShortHelp() string { return "print json describing gopls API" }
 func (j *apiJSON) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), ``)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (j *apiJSON) Run(ctx context.Context, args ...string) error {
@@ -108,11 +121,12 @@
 }
 
 func (l *licenses) Name() string      { return "licenses" }
+func (l *licenses) Parent() string    { return l.app.Name() }
 func (l *licenses) Usage() string     { return "" }
 func (l *licenses) ShortHelp() string { return "print licenses of included software" }
 func (l *licenses) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), ``)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 const licensePreamble = `
diff --git a/internal/lsp/cmd/links.go b/internal/lsp/cmd/links.go
index 1d5a669..d49aabb 100644
--- a/internal/lsp/cmd/links.go
+++ b/internal/lsp/cmd/links.go
@@ -25,17 +25,18 @@
 }
 
 func (l *links) Name() string      { return "links" }
-func (l *links) Usage() string     { return "<filename>" }
+func (l *links) Parent() string    { return l.app.Name() }
+func (l *links) Usage() string     { return "[links-flags] <filename>" }
 func (l *links) ShortHelp() string { return "list links in a file" }
 func (l *links) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprintf(f.Output(), `
 Example: list links contained within a file:
 
-  $ gopls links internal/lsp/cmd/check.go
+	$ gopls links internal/lsp/cmd/check.go
 
-gopls links flags are:
+links-flags:
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 // Run finds all the links within a document
diff --git a/internal/lsp/cmd/prepare_rename.go b/internal/lsp/cmd/prepare_rename.go
index 2e6965e..aef0477 100644
--- a/internal/lsp/cmd/prepare_rename.go
+++ b/internal/lsp/cmd/prepare_rename.go
@@ -21,6 +21,7 @@
 }
 
 func (r *prepareRename) Name() string      { return "prepare_rename" }
+func (r *prepareRename) Parent() string    { return r.app.Name() }
 func (r *prepareRename) Usage() string     { return "<position>" }
 func (r *prepareRename) ShortHelp() string { return "test validity of a rename operation at location" }
 func (r *prepareRename) DetailedHelp(f *flag.FlagSet) {
@@ -31,7 +32,7 @@
 	$ gopls prepare_rename helper/helper.go:8:6
 	$ gopls prepare_rename helper/helper.go:#53
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 // ErrInvalidRenamePosition is returned when prepareRename is run at a position that
@@ -72,7 +73,7 @@
 		return ErrInvalidRenamePosition
 	}
 
-	l := protocol.Location{Range: *result}
+	l := protocol.Location{Range: result.Range}
 	s, err := file.mapper.Span(l)
 	if err != nil {
 		return err
diff --git a/internal/lsp/cmd/references.go b/internal/lsp/cmd/references.go
index 5626019..0697d2e 100644
--- a/internal/lsp/cmd/references.go
+++ b/internal/lsp/cmd/references.go
@@ -17,25 +17,26 @@
 
 // references implements the references verb for gopls
 type references struct {
-	IncludeDeclaration bool `flag:"d" help:"include the declaration of the specified identifier in the results"`
+	IncludeDeclaration bool `flag:"d,declaration" help:"include the declaration of the specified identifier in the results"`
 
 	app *Application
 }
 
 func (r *references) Name() string      { return "references" }
-func (r *references) Usage() string     { return "<position>" }
+func (r *references) Parent() string    { return r.app.Name() }
+func (r *references) Usage() string     { return "[references-flags] <position>" }
 func (r *references) ShortHelp() string { return "display selected identifier's references" }
 func (r *references) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), `
 Example:
 
-  $ # 1-indexed location (:line:column or :#offset) of the target identifier
-  $ gopls references helper/helper.go:8:6
-  $ gopls references helper/helper.go:#53
+	$ # 1-indexed location (:line:column or :#offset) of the target identifier
+	$ gopls references helper/helper.go:8:6
+	$ gopls references helper/helper.go:#53
 
-  gopls references flags are:
+references-flags:
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (r *references) Run(ctx context.Context, args ...string) error {
diff --git a/internal/lsp/cmd/remote.go b/internal/lsp/cmd/remote.go
index 86c2299..f711135 100644
--- a/internal/lsp/cmd/remote.go
+++ b/internal/lsp/cmd/remote.go
@@ -18,6 +18,7 @@
 )
 
 type remote struct {
+	app *Application
 	subcommands
 
 	// For backward compatibility, allow aliasing this command (it was previously
@@ -30,6 +31,7 @@
 
 func newRemote(app *Application, alias string) *remote {
 	return &remote{
+		app: app,
 		subcommands: subcommands{
 			&listSessions{app: app},
 			&startDebugging{app: app},
@@ -45,6 +47,8 @@
 	return "remote"
 }
 
+func (r *remote) Parent() string { return r.app.Name() }
+
 func (r *remote) ShortHelp() string {
 	short := "interact with the gopls daemon"
 	if r.alias != "" {
@@ -58,8 +62,9 @@
 	app *Application
 }
 
-func (c *listSessions) Name() string  { return "sessions" }
-func (c *listSessions) Usage() string { return "" }
+func (c *listSessions) Name() string   { return "sessions" }
+func (c *listSessions) Parent() string { return c.app.Name() }
+func (c *listSessions) Usage() string  { return "" }
 func (c *listSessions) ShortHelp() string {
 	return "print information about current gopls sessions"
 }
@@ -80,7 +85,7 @@
 
 func (c *listSessions) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), listSessionsExamples)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (c *listSessions) Run(ctx context.Context, args ...string) error {
@@ -126,7 +131,7 @@
 
 func (c *startDebugging) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), startDebuggingExamples)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (c *startDebugging) Run(ctx context.Context, args ...string) error {
diff --git a/internal/lsp/cmd/rename.go b/internal/lsp/cmd/rename.go
index 5742082..b0a22a1 100644
--- a/internal/lsp/cmd/rename.go
+++ b/internal/lsp/cmd/rename.go
@@ -23,27 +23,28 @@
 
 // rename implements the rename verb for gopls.
 type rename struct {
-	Diff     bool `flag:"d" help:"display diffs instead of rewriting files"`
-	Write    bool `flag:"w" help:"write result to (source) file instead of stdout"`
+	Diff     bool `flag:"d,diff" help:"display diffs instead of rewriting files"`
+	Write    bool `flag:"w,write" help:"write result to (source) file instead of stdout"`
 	Preserve bool `flag:"preserve" help:"preserve original files"`
 
 	app *Application
 }
 
 func (r *rename) Name() string      { return "rename" }
-func (r *rename) Usage() string     { return "<position> <new name>" }
+func (r *rename) Parent() string    { return r.app.Name() }
+func (r *rename) Usage() string     { return "[rename-flags] <position> <name>" }
 func (r *rename) ShortHelp() string { return "rename selected identifier" }
 func (r *rename) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), `
 Example:
 
-  $ # 1-based location (:line:column or :#position) of the thing to change
-  $ gopls rename helper/helper.go:8:6 Foo
-  $ gopls rename helper/helper.go:#53 Foo
+	$ # 1-based location (:line:column or :#position) of the thing to change
+	$ gopls rename helper/helper.go:8:6 Foo
+	$ gopls rename helper/helper.go:#53 Foo
 
-	gopls rename flags are:
+rename-flags:
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 // Run renames the specified identifier and either;
diff --git a/internal/lsp/cmd/semantictokens.go b/internal/lsp/cmd/semantictokens.go
index e8f9018..120f91d 100644
--- a/internal/lsp/cmd/semantictokens.go
+++ b/internal/lsp/cmd/semantictokens.go
@@ -14,7 +14,6 @@
 	"io/ioutil"
 	"log"
 	"os"
-	"runtime"
 	"unicode/utf8"
 
 	"golang.org/x/tools/internal/lsp"
@@ -53,22 +52,16 @@
 var colmap *protocol.ColumnMapper
 
 func (c *semtok) Name() string      { return "semtok" }
+func (c *semtok) Parent() string    { return c.app.Name() }
 func (c *semtok) Usage() string     { return "<filename>" }
 func (c *semtok) ShortHelp() string { return "show semantic tokens for the specified file" }
 func (c *semtok) DetailedHelp(f *flag.FlagSet) {
-	for i := 1; ; i++ {
-		_, f, l, ok := runtime.Caller(i)
-		if !ok {
-			break
-		}
-		log.Printf("%d: %s:%d", i, f, l)
-	}
 	fmt.Fprint(f.Output(), `
 Example: show the semantic tokens for this file:
 
-  $ gopls semtok internal/lsp/cmd/semtok.go
+	$ gopls semtok internal/lsp/cmd/semtok.go
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 // Run performs the semtok on the files specified by args and prints the
diff --git a/internal/lsp/cmd/serve.go b/internal/lsp/cmd/serve.go
index 4164b58..f6e2683 100644
--- a/internal/lsp/cmd/serve.go
+++ b/internal/lsp/cmd/serve.go
@@ -41,19 +41,21 @@
 	app *Application
 }
 
-func (s *Serve) Name() string  { return "serve" }
-func (s *Serve) Usage() string { return "" }
+func (s *Serve) Name() string   { return "serve" }
+func (s *Serve) Parent() string { return s.app.Name() }
+func (s *Serve) Usage() string  { return "[server-flags]" }
 func (s *Serve) ShortHelp() string {
 	return "run a server for Go code using the Language Server Protocol"
 }
 func (s *Serve) DetailedHelp(f *flag.FlagSet) {
-	fmt.Fprint(f.Output(), `
+	fmt.Fprint(f.Output(), `  gopls [flags] [server-flags]
+
 The server communicates using JSONRPC2 on stdin and stdout, and is intended to be run directly as
 a child of an editor process.
 
-gopls server flags are:
+server-flags:
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (s *Serve) remoteArgs(network, address string) []string {
diff --git a/internal/lsp/cmd/signature.go b/internal/lsp/cmd/signature.go
index 0a7a599..db94843 100644
--- a/internal/lsp/cmd/signature.go
+++ b/internal/lsp/cmd/signature.go
@@ -20,17 +20,18 @@
 }
 
 func (r *signature) Name() string      { return "signature" }
+func (r *signature) Parent() string    { return r.app.Name() }
 func (r *signature) Usage() string     { return "<position>" }
 func (r *signature) ShortHelp() string { return "display selected identifier's signature" }
 func (r *signature) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), `
 Example:
 
-  $ # 1-indexed location (:line:column or :#offset) of the target identifier
-  $ gopls signature helper/helper.go:8:6
-  $ gopls signature helper/helper.go:#53
+	$ # 1-indexed location (:line:column or :#offset) of the target identifier
+	$ gopls signature helper/helper.go:8:6
+	$ gopls signature helper/helper.go:#53
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (r *signature) Run(ctx context.Context, args ...string) error {
diff --git a/internal/lsp/cmd/subcommands.go b/internal/lsp/cmd/subcommands.go
index 5af5923..deac5c8 100644
--- a/internal/lsp/cmd/subcommands.go
+++ b/internal/lsp/cmd/subcommands.go
@@ -8,6 +8,7 @@
 	"context"
 	"flag"
 	"fmt"
+	"text/tabwriter"
 
 	"golang.org/x/tools/internal/tool"
 )
@@ -17,14 +18,16 @@
 type subcommands []tool.Application
 
 func (s subcommands) DetailedHelp(f *flag.FlagSet) {
-	fmt.Fprint(f.Output(), "\nsubcommands:\n")
+	w := tabwriter.NewWriter(f.Output(), 0, 0, 2, ' ', 0)
+	defer w.Flush()
+	fmt.Fprint(w, "\nSubcommand:\n")
 	for _, c := range s {
-		fmt.Fprintf(f.Output(), "  %s: %s\n", c.Name(), c.ShortHelp())
+		fmt.Fprintf(w, "  %s\t%s\n", c.Name(), c.ShortHelp())
 	}
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
-func (s subcommands) Usage() string { return "<subcommand> [args...]" }
+func (s subcommands) Usage() string { return "<subcommand> [arg]..." }
 
 func (s subcommands) Run(ctx context.Context, args ...string) error {
 	if len(args) == 0 {
@@ -33,7 +36,8 @@
 	command, args := args[0], args[1:]
 	for _, c := range s {
 		if c.Name() == command {
-			return tool.Run(ctx, c, args)
+			s := flag.NewFlagSet(c.Name(), flag.ExitOnError)
+			return tool.Run(ctx, s, c, args)
 		}
 	}
 	return tool.CommandLineErrorf("unknown subcommand %v", command)
diff --git a/internal/lsp/cmd/suggested_fix.go b/internal/lsp/cmd/suggested_fix.go
index 51ab4db..df14631 100644
--- a/internal/lsp/cmd/suggested_fix.go
+++ b/internal/lsp/cmd/suggested_fix.go
@@ -20,25 +20,25 @@
 
 // suggestedFix implements the fix verb for gopls.
 type suggestedFix struct {
-	Diff  bool `flag:"d" help:"display diffs instead of rewriting files"`
-	Write bool `flag:"w" help:"write result to (source) file instead of stdout"`
-	All   bool `flag:"a" help:"apply all fixes, not just preferred fixes"`
+	Diff  bool `flag:"d,diff" help:"display diffs instead of rewriting files"`
+	Write bool `flag:"w,write" help:"write result to (source) file instead of stdout"`
+	All   bool `flag:"a,all" help:"apply all fixes, not just preferred fixes"`
 
 	app *Application
 }
 
 func (s *suggestedFix) Name() string      { return "fix" }
-func (s *suggestedFix) Usage() string     { return "<filename>" }
+func (s *suggestedFix) Parent() string    { return s.app.Name() }
+func (s *suggestedFix) Usage() string     { return "[fix-flags] <filename>" }
 func (s *suggestedFix) ShortHelp() string { return "apply suggested fixes" }
 func (s *suggestedFix) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprintf(f.Output(), `
-Example: apply suggested fixes for this file:
+Example: apply suggested fixes for this file
+	$ gopls fix -w internal/lsp/cmd/check.go
 
-  $ gopls fix -w internal/lsp/cmd/check.go
-
-gopls fix flags are:
+fix-flags:
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 // Run performs diagnostic checks on the file specified and either;
diff --git a/internal/lsp/cmd/symbols.go b/internal/lsp/cmd/symbols.go
index b4a503b..b43a6dc 100644
--- a/internal/lsp/cmd/symbols.go
+++ b/internal/lsp/cmd/symbols.go
@@ -22,14 +22,15 @@
 }
 
 func (r *symbols) Name() string      { return "symbols" }
+func (r *symbols) Parent() string    { return r.app.Name() }
 func (r *symbols) Usage() string     { return "<file>" }
 func (r *symbols) ShortHelp() string { return "display selected file's symbols" }
 func (r *symbols) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), `
 Example:
-  $ gopls symbols helper/helper.go
+	$ gopls symbols helper/helper.go
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 func (r *symbols) Run(ctx context.Context, args ...string) error {
 	if len(args) != 1 {
diff --git a/internal/lsp/cmd/test/cmdtest.go b/internal/lsp/cmd/test/cmdtest.go
index 2e92726..312f7b8 100644
--- a/internal/lsp/cmd/test/cmdtest.go
+++ b/internal/lsp/cmd/test/cmdtest.go
@@ -8,6 +8,7 @@
 import (
 	"bytes"
 	"context"
+	"flag"
 	"fmt"
 	"io"
 	"os"
@@ -108,6 +109,10 @@
 	//TODO: import addition not supported on command line
 }
 
+func (r *runner) Hover(t *testing.T, spn span.Span, info string) {
+	//TODO: hovering not supported on command line
+}
+
 func (r *runner) runGoplsCmd(t testing.TB, args ...string) (string, string) {
 	rStdout, wStdout, err := os.Pipe()
 	if err != nil {
@@ -133,7 +138,8 @@
 	os.Stdout, os.Stderr = wStdout, wStderr
 	app := cmd.New("gopls-test", r.data.Config.Dir, r.data.Exported.Config.Env, r.options)
 	remote := r.remote
-	err = tool.Run(tests.Context(t),
+	s := flag.NewFlagSet(app.Name(), flag.ExitOnError)
+	err = tool.Run(tests.Context(t), s,
 		app,
 		append([]string{fmt.Sprintf("-remote=internal@%s", remote)}, args...))
 	if err != nil {
diff --git a/internal/lsp/cmd/test/signature.go b/internal/lsp/cmd/test/signature.go
index 0c77da1..f6bdaeb 100644
--- a/internal/lsp/cmd/test/signature.go
+++ b/internal/lsp/cmd/test/signature.go
@@ -9,6 +9,7 @@
 	"testing"
 
 	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/lsp/tests"
 	"golang.org/x/tools/internal/span"
 )
 
@@ -27,7 +28,7 @@
 	expect := string(r.data.Golden(goldenTag, filename, func() ([]byte, error) {
 		return []byte(got), nil
 	}))
-	if expect != got {
+	if tests.NormalizeAny(expect) != tests.NormalizeAny(got) {
 		t.Errorf("signature failed for %s expected:\n%q\ngot:\n%q'", filename, expect, got)
 	}
 }
diff --git a/internal/lsp/cmd/test/suggested_fix.go b/internal/lsp/cmd/test/suggested_fix.go
index 160dcdf..c819e05 100644
--- a/internal/lsp/cmd/test/suggested_fix.go
+++ b/internal/lsp/cmd/test/suggested_fix.go
@@ -24,7 +24,7 @@
 	args = append(args, actionKinds...)
 	got, stderr := r.NormalizeGoplsCmd(t, args...)
 	if stderr == "ExecuteCommand is not yet supported on the command line" {
-		t.Skipf(stderr)
+		return // don't skip to keep the summary counts correct
 	}
 	want := string(r.data.Golden("suggestedfix_"+tests.SpanName(spn), filename, func() ([]byte, error) {
 		return []byte(got), nil
diff --git a/internal/lsp/cmd/usage/api-json.hlp b/internal/lsp/cmd/usage/api-json.hlp
new file mode 100644
index 0000000..cb9fbfb
--- /dev/null
+++ b/internal/lsp/cmd/usage/api-json.hlp
@@ -0,0 +1,4 @@
+print json describing gopls API
+
+Usage:
+  gopls [flags] api-json
diff --git a/internal/lsp/cmd/usage/bug.hlp b/internal/lsp/cmd/usage/bug.hlp
new file mode 100644
index 0000000..772d54d
--- /dev/null
+++ b/internal/lsp/cmd/usage/bug.hlp
@@ -0,0 +1,4 @@
+report a bug in gopls
+
+Usage:
+  gopls [flags] bug
diff --git a/internal/lsp/cmd/usage/call_hierarchy.hlp b/internal/lsp/cmd/usage/call_hierarchy.hlp
new file mode 100644
index 0000000..07fccc8
--- /dev/null
+++ b/internal/lsp/cmd/usage/call_hierarchy.hlp
@@ -0,0 +1,10 @@
+display selected identifier's call hierarchy
+
+Usage:
+  gopls [flags] call_hierarchy <position>
+
+Example:
+
+	$ # 1-indexed location (:line:column or :#offset) of the target identifier
+	$ gopls call_hierarchy helper/helper.go:8:6
+	$ gopls call_hierarchy helper/helper.go:#53
diff --git a/internal/lsp/cmd/usage/check.hlp b/internal/lsp/cmd/usage/check.hlp
new file mode 100644
index 0000000..ba89588
--- /dev/null
+++ b/internal/lsp/cmd/usage/check.hlp
@@ -0,0 +1,8 @@
+show diagnostic results for the specified file
+
+Usage:
+  gopls [flags] check <filename>
+
+Example: show the diagnostic results of this file:
+
+	$ gopls check internal/lsp/cmd/check.go
diff --git a/internal/lsp/cmd/usage/definition.hlp b/internal/lsp/cmd/usage/definition.hlp
new file mode 100644
index 0000000..500e6c9
--- /dev/null
+++ b/internal/lsp/cmd/usage/definition.hlp
@@ -0,0 +1,15 @@
+show declaration of selected identifier
+
+Usage:
+  gopls [flags] definition [definition-flags] <position>
+
+Example: show the definition of the identifier at syntax at offset 44 in this file (flag.FlagSet):
+
+	$ gopls definition internal/lsp/cmd/definition.go:44:47
+	$ gopls definition internal/lsp/cmd/definition.go:#1270
+
+definition-flags:
+  -json
+    	emit output in JSON format
+  -markdown
+    	support markdown in responses
diff --git a/internal/lsp/cmd/usage/fix.hlp b/internal/lsp/cmd/usage/fix.hlp
new file mode 100644
index 0000000..4789a6c
--- /dev/null
+++ b/internal/lsp/cmd/usage/fix.hlp
@@ -0,0 +1,15 @@
+apply suggested fixes
+
+Usage:
+  gopls [flags] fix [fix-flags] <filename>
+
+Example: apply suggested fixes for this file
+	$ gopls fix -w internal/lsp/cmd/check.go
+
+fix-flags:
+  -a,-all
+    	apply all fixes, not just preferred fixes
+  -d,-diff
+    	display diffs instead of rewriting files
+  -w,-write
+    	write result to (source) file instead of stdout
diff --git a/internal/lsp/cmd/usage/folding_ranges.hlp b/internal/lsp/cmd/usage/folding_ranges.hlp
new file mode 100644
index 0000000..4af2da6
--- /dev/null
+++ b/internal/lsp/cmd/usage/folding_ranges.hlp
@@ -0,0 +1,8 @@
+display selected file's folding ranges
+
+Usage:
+  gopls [flags] folding_ranges <file>
+
+Example:
+
+	$ gopls folding_ranges helper/helper.go
diff --git a/internal/lsp/cmd/usage/format.hlp b/internal/lsp/cmd/usage/format.hlp
new file mode 100644
index 0000000..7ef0bbe
--- /dev/null
+++ b/internal/lsp/cmd/usage/format.hlp
@@ -0,0 +1,18 @@
+format the code according to the go standard
+
+Usage:
+  gopls [flags] format [format-flags] <filerange>
+
+The arguments supplied may be simple file names, or ranges within files.
+
+Example: reformat this file:
+
+	$ gopls format -w internal/lsp/cmd/check.go
+
+format-flags:
+  -d,-diff
+    	display diffs instead of rewriting files
+  -l,-list
+    	list files whose formatting differs from gofmt's
+  -w,-write
+    	write result to (source) file instead of stdout
diff --git a/internal/lsp/cmd/usage/highlight.hlp b/internal/lsp/cmd/usage/highlight.hlp
new file mode 100644
index 0000000..e128eb7
--- /dev/null
+++ b/internal/lsp/cmd/usage/highlight.hlp
@@ -0,0 +1,10 @@
+display selected identifier's highlights
+
+Usage:
+  gopls [flags] highlight <position>
+
+Example:
+
+	$ # 1-indexed location (:line:column or :#offset) of the target identifier
+	$ gopls highlight helper/helper.go:8:6
+	$ gopls highlight helper/helper.go:#53
diff --git a/internal/lsp/cmd/usage/implementation.hlp b/internal/lsp/cmd/usage/implementation.hlp
new file mode 100644
index 0000000..09414f1
--- /dev/null
+++ b/internal/lsp/cmd/usage/implementation.hlp
@@ -0,0 +1,10 @@
+display selected identifier's implementation
+
+Usage:
+  gopls [flags] implementation <position>
+
+Example:
+
+	$ # 1-indexed location (:line:column or :#offset) of the target identifier
+	$ gopls implementation helper/helper.go:8:6
+	$ gopls implementation helper/helper.go:#53
diff --git a/internal/lsp/cmd/usage/imports.hlp b/internal/lsp/cmd/usage/imports.hlp
new file mode 100644
index 0000000..295f4da
--- /dev/null
+++ b/internal/lsp/cmd/usage/imports.hlp
@@ -0,0 +1,14 @@
+updates import statements
+
+Usage:
+  gopls [flags] imports [imports-flags] <filename>
+
+Example: update imports statements in a file:
+
+	$ gopls imports -w internal/lsp/cmd/check.go
+
+imports-flags:
+  -d,-diff
+    	display diffs instead of rewriting files
+  -w,-write
+    	write result to (source) file instead of stdout
diff --git a/internal/lsp/cmd/usage/inspect.hlp b/internal/lsp/cmd/usage/inspect.hlp
new file mode 100644
index 0000000..3d0a0f3
--- /dev/null
+++ b/internal/lsp/cmd/usage/inspect.hlp
@@ -0,0 +1,8 @@
+interact with the gopls daemon (deprecated: use 'remote')
+
+Usage:
+  gopls [flags] inspect <subcommand> [arg]...
+
+Subcommand:
+  sessions  print information about current gopls sessions
+  debug     start the debug server
diff --git a/internal/lsp/cmd/usage/licenses.hlp b/internal/lsp/cmd/usage/licenses.hlp
new file mode 100644
index 0000000..ab60ebc
--- /dev/null
+++ b/internal/lsp/cmd/usage/licenses.hlp
@@ -0,0 +1,4 @@
+print licenses of included software
+
+Usage:
+  gopls [flags] licenses
diff --git a/internal/lsp/cmd/usage/links.hlp b/internal/lsp/cmd/usage/links.hlp
new file mode 100644
index 0000000..7f7612c
--- /dev/null
+++ b/internal/lsp/cmd/usage/links.hlp
@@ -0,0 +1,12 @@
+list links in a file
+
+Usage:
+  gopls [flags] links [links-flags] <filename>
+
+Example: list links contained within a file:
+
+	$ gopls links internal/lsp/cmd/check.go
+
+links-flags:
+  -json
+    	emit document links in JSON format
diff --git a/internal/lsp/cmd/usage/prepare_rename.hlp b/internal/lsp/cmd/usage/prepare_rename.hlp
new file mode 100644
index 0000000..7f8a6f3
--- /dev/null
+++ b/internal/lsp/cmd/usage/prepare_rename.hlp
@@ -0,0 +1,10 @@
+test validity of a rename operation at location
+
+Usage:
+  gopls [flags] prepare_rename <position>
+
+Example:
+
+	$ # 1-indexed location (:line:column or :#offset) of the target identifier
+	$ gopls prepare_rename helper/helper.go:8:6
+	$ gopls prepare_rename helper/helper.go:#53
diff --git a/internal/lsp/cmd/usage/references.hlp b/internal/lsp/cmd/usage/references.hlp
new file mode 100644
index 0000000..c55ef03
--- /dev/null
+++ b/internal/lsp/cmd/usage/references.hlp
@@ -0,0 +1,14 @@
+display selected identifier's references
+
+Usage:
+  gopls [flags] references [references-flags] <position>
+
+Example:
+
+	$ # 1-indexed location (:line:column or :#offset) of the target identifier
+	$ gopls references helper/helper.go:8:6
+	$ gopls references helper/helper.go:#53
+
+references-flags:
+  -d,-declaration
+    	include the declaration of the specified identifier in the results
diff --git a/internal/lsp/cmd/usage/remote.hlp b/internal/lsp/cmd/usage/remote.hlp
new file mode 100644
index 0000000..dd6034f
--- /dev/null
+++ b/internal/lsp/cmd/usage/remote.hlp
@@ -0,0 +1,8 @@
+interact with the gopls daemon
+
+Usage:
+  gopls [flags] remote <subcommand> [arg]...
+
+Subcommand:
+  sessions  print information about current gopls sessions
+  debug     start the debug server
diff --git a/internal/lsp/cmd/usage/rename.hlp b/internal/lsp/cmd/usage/rename.hlp
new file mode 100644
index 0000000..ae58cbf
--- /dev/null
+++ b/internal/lsp/cmd/usage/rename.hlp
@@ -0,0 +1,18 @@
+rename selected identifier
+
+Usage:
+  gopls [flags] rename [rename-flags] <position> <name>
+
+Example:
+
+	$ # 1-based location (:line:column or :#position) of the thing to change
+	$ gopls rename helper/helper.go:8:6 Foo
+	$ gopls rename helper/helper.go:#53 Foo
+
+rename-flags:
+  -d,-diff
+    	display diffs instead of rewriting files
+  -preserve
+    	preserve original files
+  -w,-write
+    	write result to (source) file instead of stdout
diff --git a/internal/lsp/cmd/usage/semtok.hlp b/internal/lsp/cmd/usage/semtok.hlp
new file mode 100644
index 0000000..459ed59
--- /dev/null
+++ b/internal/lsp/cmd/usage/semtok.hlp
@@ -0,0 +1,8 @@
+show semantic tokens for the specified file
+
+Usage:
+  gopls [flags] semtok <filename>
+
+Example: show the semantic tokens for this file:
+
+	$ gopls semtok internal/lsp/cmd/semtok.go
diff --git a/internal/lsp/cmd/usage/serve.hlp b/internal/lsp/cmd/usage/serve.hlp
new file mode 100644
index 0000000..370cbce
--- /dev/null
+++ b/internal/lsp/cmd/usage/serve.hlp
@@ -0,0 +1,30 @@
+run a server for Go code using the Language Server Protocol
+
+Usage:
+  gopls [flags] serve [server-flags]
+  gopls [flags] [server-flags]
+
+The server communicates using JSONRPC2 on stdin and stdout, and is intended to be run directly as
+a child of an editor process.
+
+server-flags:
+  -debug=string
+    	serve debug information on the supplied address
+  -listen=string
+    	address on which to listen for remote connections. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. Otherwise, TCP is used.
+  -listen.timeout=duration
+    	when used with -listen, shut down the server when there are no connected clients for this duration
+  -logfile=string
+    	filename to log to. if value is "auto", then logging to a default output file is enabled
+  -mode=string
+    	no effect
+  -port=int
+    	port on which to run gopls for debugging purposes
+  -remote.debug=string
+    	when used with -remote=auto, the -debug value used to start the daemon
+  -remote.listen.timeout=duration
+    	when used with -remote=auto, the -listen.timeout value used to start the daemon (default 1m0s)
+  -remote.logfile=string
+    	when used with -remote=auto, the -logfile value used to start the daemon
+  -rpc.trace
+    	print the full rpc trace in lsp inspector format
diff --git a/internal/lsp/cmd/usage/signature.hlp b/internal/lsp/cmd/usage/signature.hlp
new file mode 100644
index 0000000..f9fd0bf
--- /dev/null
+++ b/internal/lsp/cmd/usage/signature.hlp
@@ -0,0 +1,10 @@
+display selected identifier's signature
+
+Usage:
+  gopls [flags] signature <position>
+
+Example:
+
+	$ # 1-indexed location (:line:column or :#offset) of the target identifier
+	$ gopls signature helper/helper.go:8:6
+	$ gopls signature helper/helper.go:#53
diff --git a/internal/lsp/cmd/usage/symbols.hlp b/internal/lsp/cmd/usage/symbols.hlp
new file mode 100644
index 0000000..2aa36aa
--- /dev/null
+++ b/internal/lsp/cmd/usage/symbols.hlp
@@ -0,0 +1,7 @@
+display selected file's symbols
+
+Usage:
+  gopls [flags] symbols <file>
+
+Example:
+	$ gopls symbols helper/helper.go
diff --git a/internal/lsp/cmd/usage/usage.hlp b/internal/lsp/cmd/usage/usage.hlp
new file mode 100644
index 0000000..1d0fb8d
--- /dev/null
+++ b/internal/lsp/cmd/usage/usage.hlp
@@ -0,0 +1,77 @@
+
+gopls is a Go language server.
+
+It is typically used with an editor to provide language features. When no
+command is specified, gopls will default to the 'serve' command. The language
+features can also be accessed via the gopls command-line interface.
+
+Usage:
+  gopls help [<subject>]
+
+Command:
+
+Main                
+  serve             run a server for Go code using the Language Server Protocol
+  version           print the gopls version information
+  bug               report a bug in gopls
+  api-json          print json describing gopls API
+  licenses          print licenses of included software
+                    
+Features            
+  call_hierarchy    display selected identifier's call hierarchy
+  check             show diagnostic results for the specified file
+  definition        show declaration of selected identifier
+  folding_ranges    display selected file's folding ranges
+  format            format the code according to the go standard
+  highlight         display selected identifier's highlights
+  implementation    display selected identifier's implementation
+  imports           updates import statements
+  remote            interact with the gopls daemon
+  inspect           interact with the gopls daemon (deprecated: use 'remote')
+  links             list links in a file
+  prepare_rename    test validity of a rename operation at location
+  references        display selected identifier's references
+  rename            rename selected identifier
+  semtok            show semantic tokens for the specified file
+  signature         display selected identifier's signature
+  fix               apply suggested fixes
+  symbols           display selected file's symbols
+  workspace         manage the gopls workspace (experimental: under development)
+  workspace_symbol  search symbols in workspace
+  vulncheck         run experimental vulncheck analysis (experimental: under development)
+
+flags:
+  -debug=string
+    	serve debug information on the supplied address
+  -listen=string
+    	address on which to listen for remote connections. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. Otherwise, TCP is used.
+  -listen.timeout=duration
+    	when used with -listen, shut down the server when there are no connected clients for this duration
+  -logfile=string
+    	filename to log to. if value is "auto", then logging to a default output file is enabled
+  -mode=string
+    	no effect
+  -ocagent=string
+    	the address of the ocagent (e.g. http://localhost:55678), or off (default "off")
+  -port=int
+    	port on which to run gopls for debugging purposes
+  -profile.cpu=string
+    	write CPU profile to this file
+  -profile.mem=string
+    	write memory profile to this file
+  -profile.trace=string
+    	write trace log to this file
+  -remote=string
+    	forward all commands to a remote lsp specified by this flag. With no special prefix, this is assumed to be a TCP address. If prefixed by 'unix;', the subsequent address is assumed to be a unix domain socket. If 'auto', or prefixed by 'auto;', the remote address is automatically resolved based on the executing environment.
+  -remote.debug=string
+    	when used with -remote=auto, the -debug value used to start the daemon
+  -remote.listen.timeout=duration
+    	when used with -remote=auto, the -listen.timeout value used to start the daemon (default 1m0s)
+  -remote.logfile=string
+    	when used with -remote=auto, the -logfile value used to start the daemon
+  -rpc.trace
+    	print the full rpc trace in lsp inspector format
+  -v,-verbose
+    	verbose output
+  -vv,-veryverbose
+    	very verbose output
diff --git a/internal/lsp/cmd/usage/version.hlp b/internal/lsp/cmd/usage/version.hlp
new file mode 100644
index 0000000..3a09dde
--- /dev/null
+++ b/internal/lsp/cmd/usage/version.hlp
@@ -0,0 +1,6 @@
+print the gopls version information
+
+Usage:
+  gopls [flags] version
+  -json
+    	outputs in json format.
diff --git a/internal/lsp/cmd/usage/vulncheck.hlp b/internal/lsp/cmd/usage/vulncheck.hlp
new file mode 100644
index 0000000..4bfdc4b
--- /dev/null
+++ b/internal/lsp/cmd/usage/vulncheck.hlp
@@ -0,0 +1,9 @@
+run experimental vulncheck analysis (experimental: under development)
+
+Usage:
+  gopls [flags] vulncheck
+
+	WARNING: this command is experimental.
+
+	Example:
+	$ gopls vulncheck <packages>
diff --git a/internal/lsp/cmd/usage/workspace.hlp b/internal/lsp/cmd/usage/workspace.hlp
new file mode 100644
index 0000000..912cf29
--- /dev/null
+++ b/internal/lsp/cmd/usage/workspace.hlp
@@ -0,0 +1,7 @@
+manage the gopls workspace (experimental: under development)
+
+Usage:
+  gopls [flags] workspace <subcommand> [arg]...
+
+Subcommand:
+  generate  generate a gopls.mod file for a workspace
diff --git a/internal/lsp/cmd/usage/workspace_symbol.hlp b/internal/lsp/cmd/usage/workspace_symbol.hlp
new file mode 100644
index 0000000..a61b47b
--- /dev/null
+++ b/internal/lsp/cmd/usage/workspace_symbol.hlp
@@ -0,0 +1,13 @@
+search symbols in workspace
+
+Usage:
+  gopls [flags] workspace_symbol [workspace_symbol-flags] <query>
+
+Example:
+
+	$ gopls workspace_symbol -matcher fuzzy 'wsymbols'
+
+workspace_symbol-flags:
+  -matcher=string
+    	specifies the type of matcher: fuzzy, caseSensitive, or caseInsensitive.
+    	The default is caseInsensitive.
diff --git a/internal/lsp/cmd/vulncheck.go b/internal/lsp/cmd/vulncheck.go
new file mode 100644
index 0000000..adf59ce
--- /dev/null
+++ b/internal/lsp/cmd/vulncheck.go
@@ -0,0 +1,79 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cmd
+
+import (
+	"context"
+	"encoding/json"
+	"flag"
+	"fmt"
+	"os"
+
+	"golang.org/x/tools/internal/lsp/command"
+	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/tool"
+)
+
+// vulncheck implements the vulncheck command.
+type vulncheck struct {
+	app *Application
+}
+
+func (v *vulncheck) Name() string   { return "vulncheck" }
+func (v *vulncheck) Parent() string { return v.app.Name() }
+func (v *vulncheck) Usage() string  { return "" }
+func (v *vulncheck) ShortHelp() string {
+	return "run experimental vulncheck analysis (experimental: under development)"
+}
+func (v *vulncheck) DetailedHelp(f *flag.FlagSet) {
+	fmt.Fprint(f.Output(), `
+	WARNING: this command is experimental.
+
+	Example:
+	$ gopls vulncheck <packages>
+`)
+	printFlagDefaults(f)
+}
+
+func (v *vulncheck) Run(ctx context.Context, args ...string) error {
+	if len(args) > 1 {
+		return tool.CommandLineErrorf("vulncheck accepts at most one package pattern")
+	}
+	pattern := "."
+	if len(args) == 1 {
+		pattern = args[0]
+	}
+
+	conn, err := v.app.connect(ctx)
+	if err != nil {
+		return err
+	}
+	defer conn.terminate(ctx)
+
+	cwd, err := os.Getwd()
+	if err != nil {
+		return err
+	}
+
+	cmd, err := command.NewRunVulncheckExpCommand("", command.VulncheckArgs{
+		Dir:     protocol.URIFromPath(cwd),
+		Pattern: pattern,
+	})
+	if err != nil {
+		return err
+	}
+
+	params := &protocol.ExecuteCommandParams{Command: cmd.Command, Arguments: cmd.Arguments}
+	res, err := conn.ExecuteCommand(ctx, params)
+	if err != nil {
+		return fmt.Errorf("executing server command: %v", err)
+	}
+	data, err := json.MarshalIndent(res, " ", " ")
+	if err != nil {
+		return fmt.Errorf("failed to decode results: %v", err)
+	}
+	fmt.Printf("%s\n", data)
+	return nil
+}
diff --git a/internal/lsp/cmd/workspace.go b/internal/lsp/cmd/workspace.go
index 61757d2..c0ddd9e 100644
--- a/internal/lsp/cmd/workspace.go
+++ b/internal/lsp/cmd/workspace.go
@@ -19,18 +19,21 @@
 // used for manipulating the workspace mod file, rather than editing it
 // manually.
 type workspace struct {
+	app *Application
 	subcommands
 }
 
 func newWorkspace(app *Application) *workspace {
 	return &workspace{
+		app: app,
 		subcommands: subcommands{
 			&generateWorkspaceMod{app: app},
 		},
 	}
 }
 
-func (w *workspace) Name() string { return "workspace" }
+func (w *workspace) Name() string   { return "workspace" }
+func (w *workspace) Parent() string { return w.app.Name() }
 func (w *workspace) ShortHelp() string {
 	return "manage the gopls workspace (experimental: under development)"
 }
@@ -48,7 +51,7 @@
 }
 
 func (c *generateWorkspaceMod) DetailedHelp(f *flag.FlagSet) {
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (c *generateWorkspaceMod) Run(ctx context.Context, args ...string) error {
diff --git a/internal/lsp/cmd/workspace_symbol.go b/internal/lsp/cmd/workspace_symbol.go
index b263262..38fe5de 100644
--- a/internal/lsp/cmd/workspace_symbol.go
+++ b/internal/lsp/cmd/workspace_symbol.go
@@ -22,17 +22,18 @@
 }
 
 func (r *workspaceSymbol) Name() string      { return "workspace_symbol" }
-func (r *workspaceSymbol) Usage() string     { return "<query>" }
+func (r *workspaceSymbol) Parent() string    { return r.app.Name() }
+func (r *workspaceSymbol) Usage() string     { return "[workspace_symbol-flags] <query>" }
 func (r *workspaceSymbol) ShortHelp() string { return "search symbols in workspace" }
 func (r *workspaceSymbol) DetailedHelp(f *flag.FlagSet) {
 	fmt.Fprint(f.Output(), `
 Example:
 
-  $ gopls workspace_symbol -matcher fuzzy 'wsymbols'
+	$ gopls workspace_symbol -matcher fuzzy 'wsymbols'
 
-gopls workspace_symbol flags are:
+workspace_symbol-flags:
 `)
-	f.PrintDefaults()
+	printFlagDefaults(f)
 }
 
 func (r *workspaceSymbol) Run(ctx context.Context, args ...string) error {
@@ -50,6 +51,8 @@
 			o.SymbolMatcher = source.SymbolFuzzy
 		case "caseSensitive":
 			o.SymbolMatcher = source.SymbolCaseSensitive
+		case "fastfuzzy":
+			o.SymbolMatcher = source.SymbolFastFuzzy
 		default:
 			o.SymbolMatcher = source.SymbolCaseInsensitive
 		}
diff --git a/internal/lsp/code_action.go b/internal/lsp/code_action.go
index b58e954..7ddf812 100644
--- a/internal/lsp/code_action.go
+++ b/internal/lsp/code_action.go
@@ -30,9 +30,10 @@
 	uri := fh.URI()
 
 	// Determine the supported actions for this file kind.
-	supportedCodeActions, ok := snapshot.View().Options().SupportedCodeActions[fh.Kind()]
+	kind := snapshot.View().FileKind(fh)
+	supportedCodeActions, ok := snapshot.View().Options().SupportedCodeActions[kind]
 	if !ok {
-		return nil, fmt.Errorf("no supported code actions for %v file kind", fh.Kind())
+		return nil, fmt.Errorf("no supported code actions for %v file kind", kind)
 	}
 
 	// The Only field of the context specifies which code actions the client wants.
@@ -51,7 +52,12 @@
 	} else {
 		wanted = make(map[protocol.CodeActionKind]bool)
 		for _, only := range params.Context.Only {
-			wanted[only] = supportedCodeActions[only] || explicit[only]
+			for k, v := range supportedCodeActions {
+				if only == k || strings.HasPrefix(string(k), string(only)+".") {
+					wanted[k] = wanted[k] || v
+				}
+			}
+			wanted[only] = wanted[only] || explicit[only]
 		}
 	}
 	if len(supportedCodeActions) == 0 {
@@ -62,7 +68,7 @@
 	}
 
 	var codeActions []protocol.CodeAction
-	switch fh.Kind() {
+	switch kind {
 	case source.Mod:
 		if diagnostics := params.Context.Diagnostics; len(diagnostics) > 0 {
 			diags, err := mod.DiagnosticsForMod(ctx, snapshot, fh)
diff --git a/internal/lsp/code_lens.go b/internal/lsp/code_lens.go
index 6e371fc..e194458 100644
--- a/internal/lsp/code_lens.go
+++ b/internal/lsp/code_lens.go
@@ -23,7 +23,7 @@
 		return nil, err
 	}
 	var lenses map[command.Command]source.LensFunc
-	switch fh.Kind() {
+	switch snapshot.View().FileKind(fh) {
 	case source.Mod:
 		lenses = mod.LensFuncs()
 	case source.Go:
diff --git a/internal/lsp/command.go b/internal/lsp/command.go
index 61c794b..088fa57 100644
--- a/internal/lsp/command.go
+++ b/internal/lsp/command.go
@@ -13,9 +13,12 @@
 	"io/ioutil"
 	"os"
 	"path/filepath"
+	"sort"
 	"strings"
 
 	"golang.org/x/mod/modfile"
+	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/gocommand"
 	"golang.org/x/tools/internal/lsp/command"
@@ -90,7 +93,10 @@
 		deps.snapshot, deps.fh, ok, release, err = c.s.beginFileRequest(ctx, cfg.forURI, source.UnknownKind)
 		defer release()
 		if !ok {
-			return err
+			if err != nil {
+				return err
+			}
+			return fmt.Errorf("invalid file URL: %v", cfg.forURI)
 		}
 	}
 	ctx, cancel := context.WithCancel(xcontext.Detach(ctx))
@@ -259,6 +265,26 @@
 	})
 }
 
+func (c *commandHandler) EditGoDirective(ctx context.Context, args command.EditGoDirectiveArgs) error {
+	return c.run(ctx, commandConfig{
+		requireSave: true, // if go.mod isn't saved it could cause a problem
+		forURI:      args.URI,
+	}, func(ctx context.Context, deps commandDeps) error {
+		snapshot, fh, ok, release, err := c.s.beginFileRequest(ctx, args.URI, source.UnknownKind)
+		defer release()
+		if !ok {
+			return err
+		}
+		if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error {
+			_, err := invoke("mod", "edit", "-go", args.Version)
+			return err
+		}); err != nil {
+			return err
+		}
+		return nil
+	})
+}
+
 func (c *commandHandler) RemoveDependency(ctx context.Context, args command.RemoveDependencyArgs) error {
 	return c.run(ctx, commandConfig{
 		progress: "Removing dependency",
@@ -358,7 +384,7 @@
 
 func (c *commandHandler) runTests(ctx context.Context, snapshot source.Snapshot, work *progress.WorkDone, uri protocol.DocumentURI, tests, benchmarks []string) error {
 	// TODO: fix the error reporting when this runs async.
-	pkgs, err := snapshot.PackagesForFile(ctx, uri.SpanURI(), source.TypecheckWorkspace)
+	pkgs, err := snapshot.PackagesForFile(ctx, uri.SpanURI(), source.TypecheckWorkspace, false)
 	if err != nil {
 		return err
 	}
@@ -681,6 +707,48 @@
 	})
 	return result, err
 }
+
+func (c *commandHandler) ListImports(ctx context.Context, args command.URIArg) (command.ListImportsResult, error) {
+	var result command.ListImportsResult
+	err := c.run(ctx, commandConfig{
+		forURI: args.URI,
+	}, func(ctx context.Context, deps commandDeps) error {
+		pkg, err := deps.snapshot.PackageForFile(ctx, args.URI.SpanURI(), source.TypecheckWorkspace, source.NarrowestPackage)
+		if err != nil {
+			return err
+		}
+		pgf, err := pkg.File(args.URI.SpanURI())
+		if err != nil {
+			return err
+		}
+		for _, group := range astutil.Imports(deps.snapshot.FileSet(), pgf.File) {
+			for _, imp := range group {
+				if imp.Path == nil {
+					continue
+				}
+				var name string
+				if imp.Name != nil {
+					name = imp.Name.Name
+				}
+				result.Imports = append(result.Imports, command.FileImport{
+					Path: source.ImportPath(imp),
+					Name: name,
+				})
+			}
+		}
+		for _, imp := range pkg.Imports() {
+			result.PackageImports = append(result.PackageImports, command.PackageImport{
+				Path: imp.PkgPath(), // This might be the vendored path under GOPATH vendoring, in which case it's a bug.
+			})
+		}
+		sort.Slice(result.PackageImports, func(i, j int) bool {
+			return result.PackageImports[i].Path < result.PackageImports[j].Path
+		})
+		return nil
+	})
+	return result, err
+}
+
 func (c *commandHandler) AddImport(ctx context.Context, args command.AddImportArgs) error {
 	return c.run(ctx, commandConfig{
 		progress: "Adding import",
@@ -701,17 +769,6 @@
 	})
 }
 
-func (c *commandHandler) WorkspaceMetadata(ctx context.Context) (command.WorkspaceMetadataResult, error) {
-	var result command.WorkspaceMetadataResult
-	for _, view := range c.s.session.Views() {
-		result.Workspaces = append(result.Workspaces, command.Workspace{
-			Name:      view.Name(),
-			ModuleDir: view.TempWorkspace().Filename(),
-		})
-	}
-	return result, nil
-}
-
 func (c *commandHandler) StartDebugging(ctx context.Context, args command.DebuggingArgs) (result command.DebuggingResult, _ error) {
 	addr := args.Addr
 	if addr == "" {
@@ -728,3 +785,35 @@
 	result.URLs = []string{"http://" + listenedAddr}
 	return result, nil
 }
+
+func (c *commandHandler) RunVulncheckExp(ctx context.Context, args command.VulncheckArgs) (result command.VulncheckResult, _ error) {
+	err := c.run(ctx, commandConfig{
+		progress:    "Running vulncheck",
+		requireSave: true,
+		forURI:      args.Dir, // Will dir work?
+	}, func(ctx context.Context, deps commandDeps) error {
+		view := deps.snapshot.View()
+		opts := view.Options()
+		if opts == nil || opts.Hooks.Govulncheck == nil {
+			return errors.New("vulncheck feature is not available")
+		}
+
+		buildFlags := opts.BuildFlags // XXX: is session.Options equivalent to view.Options?
+		var viewEnv []string
+		if e := opts.EnvSlice(); e != nil {
+			viewEnv = append(os.Environ(), e...)
+		}
+		cfg := &packages.Config{
+			Context:    ctx,
+			Tests:      true, // TODO(hyangah): add a field in args.
+			BuildFlags: buildFlags,
+			Env:        viewEnv,
+			Dir:        view.Folder().Filename(),
+			// TODO(hyangah): configure overlay
+		}
+		var err error
+		result, err = opts.Hooks.Govulncheck(ctx, cfg, args)
+		return err
+	})
+	return result, err
+}
diff --git a/internal/lsp/command/command_gen.go b/internal/lsp/command/command_gen.go
index 09ebd2f..22cfeff 100644
--- a/internal/lsp/command/command_gen.go
+++ b/internal/lsp/command/command_gen.go
@@ -23,14 +23,17 @@
 	AddImport         Command = "add_import"
 	ApplyFix          Command = "apply_fix"
 	CheckUpgrades     Command = "check_upgrades"
+	EditGoDirective   Command = "edit_go_directive"
 	GCDetails         Command = "gc_details"
 	Generate          Command = "generate"
 	GenerateGoplsMod  Command = "generate_gopls_mod"
 	GoGetPackage      Command = "go_get_package"
+	ListImports       Command = "list_imports"
 	ListKnownPackages Command = "list_known_packages"
 	RegenerateCgo     Command = "regenerate_cgo"
 	RemoveDependency  Command = "remove_dependency"
 	RunTests          Command = "run_tests"
+	RunVulncheckExp   Command = "run_vulncheck_exp"
 	StartDebugging    Command = "start_debugging"
 	Test              Command = "test"
 	Tidy              Command = "tidy"
@@ -38,7 +41,6 @@
 	UpdateGoSum       Command = "update_go_sum"
 	UpgradeDependency Command = "upgrade_dependency"
 	Vendor            Command = "vendor"
-	WorkspaceMetadata Command = "workspace_metadata"
 )
 
 var Commands = []Command{
@@ -46,14 +48,17 @@
 	AddImport,
 	ApplyFix,
 	CheckUpgrades,
+	EditGoDirective,
 	GCDetails,
 	Generate,
 	GenerateGoplsMod,
 	GoGetPackage,
+	ListImports,
 	ListKnownPackages,
 	RegenerateCgo,
 	RemoveDependency,
 	RunTests,
+	RunVulncheckExp,
 	StartDebugging,
 	Test,
 	Tidy,
@@ -61,7 +66,6 @@
 	UpdateGoSum,
 	UpgradeDependency,
 	Vendor,
-	WorkspaceMetadata,
 }
 
 func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Interface) (interface{}, error) {
@@ -90,6 +94,12 @@
 			return nil, err
 		}
 		return nil, s.CheckUpgrades(ctx, a0)
+	case "gopls.edit_go_directive":
+		var a0 EditGoDirectiveArgs
+		if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
+			return nil, err
+		}
+		return nil, s.EditGoDirective(ctx, a0)
 	case "gopls.gc_details":
 		var a0 protocol.DocumentURI
 		if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
@@ -114,6 +124,12 @@
 			return nil, err
 		}
 		return nil, s.GoGetPackage(ctx, a0)
+	case "gopls.list_imports":
+		var a0 URIArg
+		if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
+			return nil, err
+		}
+		return s.ListImports(ctx, a0)
 	case "gopls.list_known_packages":
 		var a0 URIArg
 		if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
@@ -138,6 +154,12 @@
 			return nil, err
 		}
 		return nil, s.RunTests(ctx, a0)
+	case "gopls.run_vulncheck_exp":
+		var a0 VulncheckArgs
+		if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
+			return nil, err
+		}
+		return s.RunVulncheckExp(ctx, a0)
 	case "gopls.start_debugging":
 		var a0 DebuggingArgs
 		if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
@@ -182,8 +204,6 @@
 			return nil, err
 		}
 		return nil, s.Vendor(ctx, a0)
-	case "gopls.workspace_metadata":
-		return s.WorkspaceMetadata(ctx)
 	}
 	return nil, fmt.Errorf("unsupported command %q", params.Command)
 }
@@ -236,6 +256,18 @@
 	}, nil
 }
 
+func NewEditGoDirectiveCommand(title string, a0 EditGoDirectiveArgs) (protocol.Command, error) {
+	args, err := MarshalArgs(a0)
+	if err != nil {
+		return protocol.Command{}, err
+	}
+	return protocol.Command{
+		Title:     title,
+		Command:   "gopls.edit_go_directive",
+		Arguments: args,
+	}, nil
+}
+
 func NewGCDetailsCommand(title string, a0 protocol.DocumentURI) (protocol.Command, error) {
 	args, err := MarshalArgs(a0)
 	if err != nil {
@@ -284,6 +316,18 @@
 	}, nil
 }
 
+func NewListImportsCommand(title string, a0 URIArg) (protocol.Command, error) {
+	args, err := MarshalArgs(a0)
+	if err != nil {
+		return protocol.Command{}, err
+	}
+	return protocol.Command{
+		Title:     title,
+		Command:   "gopls.list_imports",
+		Arguments: args,
+	}, nil
+}
+
 func NewListKnownPackagesCommand(title string, a0 URIArg) (protocol.Command, error) {
 	args, err := MarshalArgs(a0)
 	if err != nil {
@@ -332,6 +376,18 @@
 	}, nil
 }
 
+func NewRunVulncheckExpCommand(title string, a0 VulncheckArgs) (protocol.Command, error) {
+	args, err := MarshalArgs(a0)
+	if err != nil {
+		return protocol.Command{}, err
+	}
+	return protocol.Command{
+		Title:     title,
+		Command:   "gopls.run_vulncheck_exp",
+		Arguments: args,
+	}, nil
+}
+
 func NewStartDebuggingCommand(title string, a0 DebuggingArgs) (protocol.Command, error) {
 	args, err := MarshalArgs(a0)
 	if err != nil {
@@ -415,15 +471,3 @@
 		Arguments: args,
 	}, nil
 }
-
-func NewWorkspaceMetadataCommand(title string) (protocol.Command, error) {
-	args, err := MarshalArgs()
-	if err != nil {
-		return protocol.Command{}, err
-	}
-	return protocol.Command{
-		Title:     title,
-		Command:   "gopls.workspace_metadata",
-		Arguments: args,
-	}, nil
-}
diff --git a/internal/lsp/command/commandmeta/meta.go b/internal/lsp/command/commandmeta/meta.go
index 1a6a2c7..102b898 100644
--- a/internal/lsp/command/commandmeta/meta.go
+++ b/internal/lsp/command/commandmeta/meta.go
@@ -148,7 +148,7 @@
 		JSONTag: reflect.StructTag(tag).Get("json"),
 	}
 	under := fld.Type.Underlying()
-	// Quick-and-dirty handling for various underyling types.
+	// Quick-and-dirty handling for various underlying types.
 	switch p := under.(type) {
 	case *types.Pointer:
 		under = p.Elem().Underlying()
diff --git a/internal/lsp/command/interface.go b/internal/lsp/command/interface.go
index 360dfc3..9aecfbe 100644
--- a/internal/lsp/command/interface.go
+++ b/internal/lsp/command/interface.go
@@ -68,6 +68,11 @@
 	// Runs `go mod vendor` for a module.
 	Vendor(context.Context, URIArg) error
 
+	// EditGoDirective: Run go mod edit -go=version
+	//
+	// Runs `go mod edit -go=version` for a module.
+	EditGoDirective(context.Context, EditGoDirectiveArgs) error
+
 	// UpdateGoSum: Update go.sum
 	//
 	// Updates the go.sum file for a module.
@@ -120,6 +125,12 @@
 	// Retrieve a list of packages that are importable from the given URI.
 	ListKnownPackages(context.Context, URIArg) (ListKnownPackagesResult, error)
 
+	// ListImports: List imports of a file and its package
+	//
+	// Retrieve a list of imports in the given Go file, and the package it
+	// belongs to.
+	ListImports(context.Context, URIArg) (ListImportsResult, error)
+
 	// AddImport: Add an import
 	//
 	// Ask the server to add an import path to a given Go file.  The method will
@@ -127,16 +138,16 @@
 	// themselves.
 	AddImport(context.Context, AddImportArgs) error
 
-	// WorkspaceMetadata: Query workspace metadata
-	//
-	// Query the server for information about active workspaces.
-	WorkspaceMetadata(context.Context) (WorkspaceMetadataResult, error)
-
 	// StartDebugging: Start the gopls debug server
 	//
 	// Start the gopls debug server if it isn't running, and return the debug
 	// address.
 	StartDebugging(context.Context, DebuggingArgs) (DebuggingResult, error)
+
+	// RunVulncheckExp: Run vulncheck (experimental)
+	//
+	// Run vulnerability check (`govulncheck`).
+	RunVulncheckExp(context.Context, VulncheckArgs) (VulncheckResult, error)
 }
 
 type RunTestsArgs struct {
@@ -203,6 +214,13 @@
 	OnlyDiagnostic bool
 }
 
+type EditGoDirectiveArgs struct {
+	// Any document URI within the relevant module.
+	URI protocol.DocumentURI
+	// The version to pass to `go mod edit -go`.
+	Version string
+}
+
 type GoGetPackageArgs struct {
 	// Any document URI within the relevant module.
 	URI protocol.DocumentURI
@@ -229,6 +247,26 @@
 	Packages []string
 }
 
+type ListImportsResult struct {
+	// Imports is a list of imports in the requested file.
+	Imports []FileImport
+
+	// PackageImports is a list of all imports in the requested file's package.
+	PackageImports []PackageImport
+}
+
+type FileImport struct {
+	// Path is the import path of the import.
+	Path string
+	// Name is the name of the import, e.g. `foo` in `import foo "strings"`.
+	Name string
+}
+
+type PackageImport struct {
+	// Path is the import path of the import.
+	Path string
+}
+
 type WorkspaceMetadataArgs struct {
 }
 
@@ -274,3 +312,73 @@
 	// will be empty.
 	URLs []string
 }
+
+type VulncheckArgs struct {
+	// Dir is the directory from which vulncheck will run from.
+	Dir protocol.DocumentURI
+
+	// Package pattern. E.g. "", ".", "./...".
+	Pattern string
+
+	// TODO: Flag []string (flags accepted by govulncheck, e.g., -tests)
+	// TODO: Format string (json, text)
+}
+
+type VulncheckResult struct {
+	Vuln []Vuln
+
+	// TODO: Text string format output?
+}
+
+// CallStack models a trace of function calls starting
+// with a client function or method and ending with a
+// call to a vulnerable symbol.
+type CallStack []StackEntry
+
+// StackEntry models an element of a call stack.
+type StackEntry struct {
+	// See golang.org/x/exp/vulncheck.StackEntry.
+
+	// User-friendly representation of function/method names.
+	// e.g. package.funcName, package.(recvType).methodName, ...
+	Name string
+	URI  protocol.DocumentURI
+	Pos  protocol.Position // Start position. (0-based. Column is always 0)
+}
+
+// Vuln models an osv.Entry and representative call stacks.
+type Vuln struct {
+	// ID is the vulnerability ID (osv.Entry.ID).
+	// https://ossf.github.io/osv-schema/#id-modified-fields
+	ID string
+	// Details is the description of the vulnerability (osv.Entry.Details).
+	// https://ossf.github.io/osv-schema/#summary-details-fields
+	Details string `json:",omitempty"`
+	// Aliases are alternative IDs of the vulnerability.
+	// https://ossf.github.io/osv-schema/#aliases-field
+	Aliases []string `json:",omitempty"`
+
+	// Symbol is the name of the detected vulnerable function or method.
+	Symbol string `json:",omitempty"`
+	// PkgPath is the package path of the detected Symbol.
+	PkgPath string `json:",omitempty"`
+	// ModPath is the module path corresponding to PkgPath.
+	// TODO: how do we specify standard library's vulnerability?
+	ModPath string `json:",omitempty"`
+
+	// URL is the URL for more info about the information.
+	// Either the database specific URL or the one of the URLs
+	// included in osv.Entry.References.
+	URL string `json:",omitempty"`
+
+	// Current is the current module version.
+	CurrentVersion string `json:",omitempty"`
+
+	// Fixed is the minimum module version that contains the fix.
+	FixedVersion string `json:",omitempty"`
+
+	// Example call stacks.
+	CallStacks []CallStack `json:",omitempty"`
+
+	// TODO: import graph & module graph.
+}
diff --git a/internal/lsp/command/util.go b/internal/lsp/command/util.go
index 5915b9b..dc9f22f 100644
--- a/internal/lsp/command/util.go
+++ b/internal/lsp/command/util.go
@@ -25,7 +25,7 @@
 //
 // Example usage:
 //
-//   jsonArgs, err := EncodeArgs(1, "hello", true, StructuredArg{42, 12.6})
+//   jsonArgs, err := MarshalArgs(1, "hello", true, StructuredArg{42, 12.6})
 //
 func MarshalArgs(args ...interface{}) ([]json.RawMessage, error) {
 	var out []json.RawMessage
diff --git a/internal/lsp/completion.go b/internal/lsp/completion.go
index 4bec6cd..5c88ed0 100644
--- a/internal/lsp/completion.go
+++ b/internal/lsp/completion.go
@@ -16,6 +16,7 @@
 	"golang.org/x/tools/internal/lsp/source"
 	"golang.org/x/tools/internal/lsp/source/completion"
 	"golang.org/x/tools/internal/lsp/template"
+	"golang.org/x/tools/internal/lsp/work"
 	"golang.org/x/tools/internal/span"
 )
 
@@ -27,13 +28,24 @@
 	}
 	var candidates []completion.CompletionItem
 	var surrounding *completion.Selection
-	switch fh.Kind() {
+	switch snapshot.View().FileKind(fh) {
 	case source.Go:
 		candidates, surrounding, err = completion.Completion(ctx, snapshot, fh, params.Position, params.Context)
 	case source.Mod:
 		candidates, surrounding = nil, nil
+	case source.Work:
+		cl, err := work.Completion(ctx, snapshot, fh, params.Position)
+		if err != nil {
+			break
+		}
+		return cl, nil
 	case source.Tmpl:
-		candidates, surrounding, err = template.Completion(ctx, snapshot, fh, params.Position, params.Context)
+		var cl *protocol.CompletionList
+		cl, err = template.Completion(ctx, snapshot, fh, params.Position, params.Context)
+		if err != nil {
+			break // use common error handling, candidates==nil
+		}
+		return cl, nil
 	}
 	if err != nil {
 		event.Error(ctx, "no completions found", err, tag.Position.Of(params.Position))
diff --git a/internal/lsp/debug/buildinfo_go1.12.go b/internal/lsp/debug/buildinfo_go1.12.go
new file mode 100644
index 0000000..2f360db
--- /dev/null
+++ b/internal/lsp/debug/buildinfo_go1.12.go
@@ -0,0 +1,29 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.18
+// +build !go1.18
+
+package debug
+
+import (
+	"runtime"
+	"runtime/debug"
+)
+
+type BuildInfo struct {
+	debug.BuildInfo
+	GoVersion string // Version of Go that produced this binary
+}
+
+func readBuildInfo() (*BuildInfo, bool) {
+	rinfo, ok := debug.ReadBuildInfo()
+	if !ok {
+		return nil, false
+	}
+	return &BuildInfo{
+		GoVersion: runtime.Version(),
+		BuildInfo: *rinfo,
+	}, true
+}
diff --git a/internal/lsp/debug/buildinfo_go1.18.go b/internal/lsp/debug/buildinfo_go1.18.go
new file mode 100644
index 0000000..4121c4b
--- /dev/null
+++ b/internal/lsp/debug/buildinfo_go1.18.go
@@ -0,0 +1,19 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package debug
+
+import (
+	"runtime/debug"
+)
+
+type BuildInfo debug.BuildInfo
+
+func readBuildInfo() (*BuildInfo, bool) {
+	info, ok := debug.ReadBuildInfo()
+	return (*BuildInfo)(info), ok
+}
diff --git a/internal/lsp/debug/info.go b/internal/lsp/debug/info.go
index 3533b36..bcc2f4f 100644
--- a/internal/lsp/debug/info.go
+++ b/internal/lsp/debug/info.go
@@ -7,9 +7,11 @@
 
 import (
 	"context"
+	"encoding/json"
 	"fmt"
 	"io"
 	"reflect"
+	"runtime"
 	"runtime/debug"
 	"sort"
 	"strings"
@@ -23,6 +25,7 @@
 	PlainText = PrintMode(iota)
 	Markdown
 	HTML
+	JSON
 )
 
 // Version is a manually-updated mechanism for tracking versions.
@@ -31,8 +34,8 @@
 // ServerVersion is the format used by gopls to report its version to the
 // client. This format is structured so that the client can parse it easily.
 type ServerVersion struct {
-	Module
-	Deps []*Module `json:"deps,omitempty"`
+	*BuildInfo
+	Version string
 }
 
 type Module struct {
@@ -50,47 +53,24 @@
 // built in module mode, we return a GOPATH-specific message with the
 // hardcoded version.
 func VersionInfo() *ServerVersion {
-	if info, ok := debug.ReadBuildInfo(); ok {
+	if info, ok := readBuildInfo(); ok {
 		return getVersion(info)
 	}
-	path := "gopls, built in GOPATH mode"
+	buildInfo := &BuildInfo{}
+	// go1.17 or earlier, part of s.BuildInfo are embedded fields.
+	buildInfo.Path = "gopls, built in GOPATH mode"
+	buildInfo.GoVersion = runtime.Version()
 	return &ServerVersion{
-		Module: Module{
-			ModuleVersion: ModuleVersion{
-				Path:    path,
-				Version: Version,
-			},
-		},
+		Version:   Version,
+		BuildInfo: buildInfo,
 	}
 }
 
-func getVersion(info *debug.BuildInfo) *ServerVersion {
-	serverVersion := ServerVersion{
-		Module: Module{
-			ModuleVersion: ModuleVersion{
-				Path:    info.Main.Path,
-				Version: info.Main.Version,
-				Sum:     info.Main.Sum,
-			},
-		},
+func getVersion(info *BuildInfo) *ServerVersion {
+	return &ServerVersion{
+		Version:   Version,
+		BuildInfo: info,
 	}
-	for _, d := range info.Deps {
-		m := &Module{
-			ModuleVersion: ModuleVersion{
-				Path:    d.Path,
-				Version: d.Version,
-				Sum:     d.Sum,
-			},
-		}
-		if d.Replace != nil {
-			m.Replace = &ModuleVersion{
-				Path:    d.Replace.Path,
-				Version: d.Replace.Version,
-			}
-		}
-		serverVersion.Deps = append(serverVersion.Deps, m)
-	}
-	return &serverVersion
 }
 
 // PrintServerInfo writes HTML debug info to w for the Instance.
@@ -111,15 +91,29 @@
 // PrintVersionInfo writes version information to w, using the output format
 // specified by mode. verbose controls whether additional information is
 // written, including section headers.
-func PrintVersionInfo(ctx context.Context, w io.Writer, verbose bool, mode PrintMode) {
+func PrintVersionInfo(_ context.Context, w io.Writer, verbose bool, mode PrintMode) error {
 	info := VersionInfo()
+	if mode == JSON {
+		return printVersionInfoJSON(w, info)
+	}
+
 	if !verbose {
 		printBuildInfo(w, info, false, mode)
-		return
+		return nil
 	}
 	section(w, mode, "Build info", func() {
 		printBuildInfo(w, info, true, mode)
 	})
+	return nil
+}
+
+func printVersionInfoJSON(w io.Writer, info *ServerVersion) error {
+	js, err := json.MarshalIndent(info, "", "\t")
+	if err != nil {
+		return err
+	}
+	_, err = fmt.Fprint(w, string(js))
+	return err
 }
 
 func section(w io.Writer, mode PrintMode, title string, body func()) {
@@ -141,16 +135,17 @@
 
 func printBuildInfo(w io.Writer, info *ServerVersion, verbose bool, mode PrintMode) {
 	fmt.Fprintf(w, "%v %v\n", info.Path, Version)
-	printModuleInfo(w, &info.Module, mode)
+	printModuleInfo(w, info.Main, mode)
 	if !verbose {
 		return
 	}
 	for _, dep := range info.Deps {
-		printModuleInfo(w, dep, mode)
+		printModuleInfo(w, *dep, mode)
 	}
+	fmt.Fprintf(w, "go: %v\n", info.GoVersion)
 }
 
-func printModuleInfo(w io.Writer, m *Module, mode PrintMode) {
+func printModuleInfo(w io.Writer, m debug.Module, _ PrintMode) {
 	fmt.Fprintf(w, "    %s@%s", m.Path, m.Version)
 	if m.Sum != "" {
 		fmt.Fprintf(w, " %s", m.Sum)
@@ -183,10 +178,15 @@
 	}
 }
 
-func showOptions(o *source.Options) []string {
-	// non-breaking spaces for indenting current and defaults when they are on a separate line
-	const indent = "\u00a0\u00a0\u00a0\u00a0\u00a0"
-	var ans strings.Builder
+type sessionOption struct {
+	Name    string
+	Type    string
+	Current string
+	Default string
+}
+
+func showOptions(o *source.Options) []sessionOption {
+	var out []sessionOption
 	t := reflect.TypeOf(*o)
 	swalk(t, []int{}, "")
 	v := reflect.ValueOf(*o)
@@ -195,17 +195,26 @@
 		val := v.FieldByIndex(f.index)
 		def := do.FieldByIndex(f.index)
 		tx := t.FieldByIndex(f.index)
-		prefix := fmt.Sprintf("%s (type is %s): ", tx.Name, tx.Type)
 		is := strVal(val)
 		was := strVal(def)
-		if len(is) < 30 && len(was) < 30 {
-			fmt.Fprintf(&ans, "%s current:%s, default:%s\n", prefix, is, was)
-		} else {
-			fmt.Fprintf(&ans, "%s\n%scurrent:%s\n%sdefault:%s\n", prefix, indent, is, indent, was)
-		}
+		out = append(out, sessionOption{
+			Name:    tx.Name,
+			Type:    tx.Type.String(),
+			Current: is,
+			Default: was,
+		})
 	}
-	return strings.Split(ans.String(), "\n")
+	sort.Slice(out, func(i, j int) bool {
+		rd := out[i].Current == out[i].Default
+		ld := out[j].Current == out[j].Default
+		if rd != ld {
+			return ld
+		}
+		return out[i].Name < out[j].Name
+	})
+	return out
 }
+
 func strVal(val reflect.Value) string {
 	switch val.Kind() {
 	case reflect.Bool:
diff --git a/internal/lsp/debug/info_test.go b/internal/lsp/debug/info_test.go
new file mode 100644
index 0000000..5a53628
--- /dev/null
+++ b/internal/lsp/debug/info_test.go
@@ -0,0 +1,47 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package debug exports debug information for gopls.
+package debug
+
+import (
+	"bytes"
+	"context"
+	"encoding/json"
+	"runtime"
+	"testing"
+)
+
+func TestPrintVersionInfoJSON(t *testing.T) {
+	buf := new(bytes.Buffer)
+	if err := PrintVersionInfo(context.Background(), buf, true, JSON); err != nil {
+		t.Fatalf("PrintVersionInfo failed: %v", err)
+	}
+	res := buf.Bytes()
+
+	var got ServerVersion
+	if err := json.Unmarshal(res, &got); err != nil {
+		t.Fatalf("unexpected output: %v\n%s", err, res)
+	}
+	if g, w := got.GoVersion, runtime.Version(); g != w {
+		t.Errorf("go version = %v, want %v", g, w)
+	}
+	if g, w := got.Version, Version; g != w {
+		t.Errorf("gopls version = %v, want %v", g, w)
+	}
+	// Other fields of BuildInfo may not be available during test.
+}
+
+func TestPrintVersionInfoPlainText(t *testing.T) {
+	buf := new(bytes.Buffer)
+	if err := PrintVersionInfo(context.Background(), buf, true, PlainText); err != nil {
+		t.Fatalf("PrintVersionInfo failed: %v", err)
+	}
+	res := buf.Bytes()
+
+	// Other fields of BuildInfo may not be available during test.
+	if !bytes.Contains(res, []byte(Version)) || !bytes.Contains(res, []byte(runtime.Version())) {
+		t.Errorf("plaintext output = %q,\nwant (version: %v, go: %v)", res, Version, runtime.Version())
+	}
+}
diff --git a/internal/lsp/debug/serve.go b/internal/lsp/debug/serve.go
index b3699e1..b6dba60 100644
--- a/internal/lsp/debug/serve.go
+++ b/internal/lsp/debug/serve.go
@@ -791,7 +791,7 @@
 		}
 		return s
 	},
-	"options": func(s *cache.Session) []string {
+	"options": func(s *cache.Session) []sessionOption {
 		return showOptions(s.Options())
 	},
 })
@@ -919,7 +919,11 @@
 <h2>Overlays</h2>
 <ul>{{range .Overlays}}<li>{{template "filelink" .}}</li>{{end}}</ul>
 <h2>Options</h2>
-{{range options .}}<p>{{.}}{{end}}
+{{range options .}}
+<p><b>{{.Name}}</b> {{.Type}}</p>
+<p><i>default:</i> {{.Default}}</p>
+{{if ne .Default .Current}}<p><i>current:</i> {{.Current}}</p>{{end}}
+{{end}}
 {{end}}
 `))
 
diff --git a/internal/lsp/definition.go b/internal/lsp/definition.go
index f8932d9..599228a 100644
--- a/internal/lsp/definition.go
+++ b/internal/lsp/definition.go
@@ -13,13 +13,12 @@
 )
 
 func (s *Server) definition(ctx context.Context, params *protocol.DefinitionParams) ([]protocol.Location, error) {
-	kind := source.DetectLanguage("", params.TextDocument.URI.SpanURI().Filename())
-	snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, kind)
+	snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.UnknownKind)
 	defer release()
 	if !ok {
 		return nil, err
 	}
-	if fh.Kind() == source.Tmpl {
+	if snapshot.View().FileKind(fh) == source.Tmpl {
 		return template.Definition(snapshot, fh, params.Position)
 	}
 	ident, err := source.Identifier(ctx, snapshot, fh, params.Position)
diff --git a/internal/lsp/diagnostics.go b/internal/lsp/diagnostics.go
index d931f51..3bf8122 100644
--- a/internal/lsp/diagnostics.go
+++ b/internal/lsp/diagnostics.go
@@ -21,6 +21,7 @@
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
 	"golang.org/x/tools/internal/lsp/template"
+	"golang.org/x/tools/internal/lsp/work"
 	"golang.org/x/tools/internal/span"
 	"golang.org/x/tools/internal/xcontext"
 	errors "golang.org/x/xerrors"
@@ -35,6 +36,7 @@
 	analysisSource
 	typeCheckSource
 	orphanedSource
+	workSource
 )
 
 // A diagnosticReport holds results for a single diagnostic source.
@@ -153,7 +155,7 @@
 		if snapshot.IsBuiltin(ctx, uri) {
 			continue
 		}
-		pkgs, err := snapshot.PackagesForFile(ctx, uri, source.TypecheckFull)
+		pkgs, err := snapshot.PackagesForFile(ctx, uri, source.TypecheckFull, false)
 		if err != nil {
 			// TODO (findleyr): we should probably do something with the error here,
 			// but as of now this can fail repeatedly if load fails, so can be too
@@ -210,6 +212,23 @@
 		s.storeDiagnostics(snapshot, id.URI, modSource, diags)
 	}
 
+	// Diagnose the go.work file, if it exists.
+	workReports, workErr := work.Diagnostics(ctx, snapshot)
+	if ctx.Err() != nil {
+		log.Trace.Log(ctx, "diagnose cancelled")
+		return
+	}
+	if workErr != nil {
+		event.Error(ctx, "warning: diagnose go.work", workErr, tag.Directory.Of(snapshot.View().Folder().Filename()), tag.Snapshot.Of(snapshot.ID()))
+	}
+	for id, diags := range workReports {
+		if id.URI == "" {
+			event.Error(ctx, "missing URI for work file diagnostics", fmt.Errorf("empty URI"), tag.Directory.Of(snapshot.View().Folder().Filename()))
+			continue
+		}
+		s.storeDiagnostics(snapshot, id.URI, workSource, diags)
+	}
+
 	// Diagnose all of the packages in the workspace.
 	wsPkgs, err := snapshot.ActivePackages(ctx)
 	if s.shouldIgnoreError(ctx, snapshot, err) {
@@ -288,7 +307,11 @@
 		return
 	}
 	for _, cgf := range pkg.CompiledGoFiles() {
-		s.storeDiagnostics(snapshot, cgf.URI, typeCheckSource, pkgDiagnostics[cgf.URI])
+		// builtin.go exists only for documentation purposes, and is not valid Go code.
+		// Don't report distracting errors
+		if !snapshot.IsBuiltin(ctx, cgf.URI) {
+			s.storeDiagnostics(snapshot, cgf.URI, typeCheckSource, pkgDiagnostics[cgf.URI])
+		}
 	}
 	if includeAnalysis && !pkg.HasListOrParseErrors() {
 		reports, err := source.Analyze(ctx, snapshot, pkg, false)
@@ -416,14 +439,14 @@
 // If they cannot and the workspace is not otherwise unloaded, it also surfaces
 // a warning, suggesting that the user check the file for build tags.
 func (s *Server) checkForOrphanedFile(ctx context.Context, snapshot source.Snapshot, fh source.VersionedFileHandle) *source.Diagnostic {
-	if fh.Kind() != source.Go {
+	if snapshot.View().FileKind(fh) != source.Go {
 		return nil
 	}
 	// builtin files won't have a package, but they are never orphaned.
 	if snapshot.IsBuiltin(ctx, fh.URI()) {
 		return nil
 	}
-	pkgs, err := snapshot.PackagesForFile(ctx, fh.URI(), source.TypecheckWorkspace)
+	pkgs, err := snapshot.PackagesForFile(ctx, fh.URI(), source.TypecheckWorkspace, false)
 	if len(pkgs) > 0 || err == nil {
 		return nil
 	}
@@ -439,6 +462,10 @@
 	if err != nil {
 		return nil
 	}
+	// If the file no longer has a name ending in .go, this diagnostic is wrong
+	if filepath.Ext(fh.URI().Filename()) != ".go" {
+		return nil
+	}
 	// TODO(rstambler): We should be able to parse the build tags in the
 	// file and show a more specific error message. For now, put the diagnostic
 	// on the package declaration.
diff --git a/internal/lsp/fake/client.go b/internal/lsp/fake/client.go
index 331b9bd..fdc67a6 100644
--- a/internal/lsp/fake/client.go
+++ b/internal/lsp/fake/client.go
@@ -115,14 +115,14 @@
 }
 
 // ApplyEdit applies edits sent from the server.
-func (c *Client) ApplyEdit(ctx context.Context, params *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResponse, error) {
+func (c *Client) ApplyEdit(ctx context.Context, params *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResult, error) {
 	if len(params.Edit.Changes) != 0 {
-		return &protocol.ApplyWorkspaceEditResponse{FailureReason: "Edit.Changes is unsupported"}, nil
+		return &protocol.ApplyWorkspaceEditResult{FailureReason: "Edit.Changes is unsupported"}, nil
 	}
 	for _, change := range params.Edit.DocumentChanges {
 		if err := c.editor.applyProtocolEdit(ctx, change); err != nil {
 			return nil, err
 		}
 	}
-	return &protocol.ApplyWorkspaceEditResponse{Applied: true}, nil
+	return &protocol.ApplyWorkspaceEditResult{Applied: true}, nil
 }
diff --git a/internal/lsp/fake/editor.go b/internal/lsp/fake/editor.go
index c9780b8..5bce560 100644
--- a/internal/lsp/fake/editor.go
+++ b/internal/lsp/fake/editor.go
@@ -9,6 +9,7 @@
 	"context"
 	"fmt"
 	"os"
+	"path"
 	"path/filepath"
 	"regexp"
 	"strings"
@@ -114,6 +115,18 @@
 	// Whether to edit files with windows line endings.
 	WindowsLineEndings bool
 
+	// Map of language ID -> regexp to match, used to set the file type of new
+	// buffers. Applied as an overlay on top of the following defaults:
+	//  "go" -> ".*\.go"
+	//  "go.mod" -> "go\.mod"
+	//  "go.sum" -> "go\.sum"
+	//  "gotmpl" -> ".*tmpl"
+	FileAssociations map[string]string
+
+	// Settings holds arbitrary additional settings to apply to the gopls config.
+	// TODO(rfindley): replace existing EditorConfig fields with Settings.
+	Settings map[string]interface{}
+
 	ImportShortcut                 string
 	DirectoryFilters               []string
 	VerboseOutput                  bool
@@ -223,6 +236,10 @@
 		"completionBudget":        "10s",
 	}
 
+	for k, v := range e.Config.Settings {
+		config[k] = v
+	}
+
 	if e.Config.BuildFlags != nil {
 		config["buildFlags"] = e.Config.BuildFlags
 	}
@@ -287,6 +304,14 @@
 	}
 
 	params.Capabilities.TextDocument.Completion.CompletionItem.SnippetSupport = true
+	params.Capabilities.TextDocument.SemanticTokens.Requests.Full = true
+	// copied from lsp/semantic.go to avoid import cycle in tests
+	params.Capabilities.TextDocument.SemanticTokens.TokenTypes = []string{
+		"namespace", "type", "class", "enum", "interface",
+		"struct", "typeParameter", "parameter", "variable", "property", "enumMember",
+		"event", "function", "method", "macro", "keyword", "modifier", "comment",
+		"string", "number", "regexp", "operator",
+	}
 
 	// This is a bit of a hack, since the fake editor doesn't actually support
 	// watching changed files that match a specific glob pattern. However, the
@@ -370,21 +395,6 @@
 	return e.createBuffer(ctx, path, false, content)
 }
 
-func textDocumentItem(wd *Workdir, buf buffer) protocol.TextDocumentItem {
-	uri := wd.URI(buf.path)
-	languageID := ""
-	if strings.HasSuffix(buf.path, ".go") {
-		// TODO: what about go.mod files? What is their language ID?
-		languageID = "go"
-	}
-	return protocol.TextDocumentItem{
-		URI:        uri,
-		LanguageID: languageID,
-		Version:    int32(buf.version),
-		Text:       buf.text(),
-	}
-}
-
 // CreateBuffer creates a new unsaved buffer corresponding to the workdir path,
 // containing the given textual content.
 func (e *Editor) CreateBuffer(ctx context.Context, path, content string) error {
@@ -402,7 +412,13 @@
 	e.mu.Lock()
 	defer e.mu.Unlock()
 	e.buffers[path] = buf
-	item := textDocumentItem(e.sandbox.Workdir, buf)
+
+	item := protocol.TextDocumentItem{
+		URI:        e.sandbox.Workdir.URI(buf.path),
+		LanguageID: e.languageID(buf.path),
+		Version:    int32(buf.version),
+		Text:       buf.text(),
+	}
 
 	if e.Server != nil {
 		if err := e.Server.DidOpen(ctx, &protocol.DidOpenTextDocumentParams{
@@ -417,6 +433,30 @@
 	return nil
 }
 
+var defaultFileAssociations = map[string]*regexp.Regexp{
+	"go":      regexp.MustCompile(`^.*\.go$`), // '$' is important: don't match .gotmpl!
+	"go.mod":  regexp.MustCompile(`^go\.mod$`),
+	"go.sum":  regexp.MustCompile(`^go(\.work)?\.sum$`),
+	"go.work": regexp.MustCompile(`^go\.work$`),
+	"gotmpl":  regexp.MustCompile(`^.*tmpl$`),
+}
+
+func (e *Editor) languageID(p string) string {
+	base := path.Base(p)
+	for lang, re := range e.Config.FileAssociations {
+		re := regexp.MustCompile(re)
+		if re.MatchString(base) {
+			return lang
+		}
+	}
+	for lang, re := range defaultFileAssociations {
+		if re.MatchString(base) {
+			return lang
+		}
+	}
+	return ""
+}
+
 // lines returns line-ending agnostic line representation of content.
 func lines(content string) []string {
 	lines := strings.Split(content, "\n")
@@ -1068,6 +1108,16 @@
 	}, item.AdditionalTextEdits...)))
 }
 
+// Symbols executes a workspace/symbols request on the server.
+func (e *Editor) Symbols(ctx context.Context, sym string) ([]protocol.SymbolInformation, error) {
+	if e.Server == nil {
+		return nil, nil
+	}
+	params := &protocol.WorkspaceSymbolParams{Query: sym}
+	ans, err := e.Server.Symbol(ctx, params)
+	return ans, err
+}
+
 // References executes a reference request on the server.
 func (e *Editor) References(ctx context.Context, path string, pos Pos) ([]protocol.Location, error) {
 	if e.Server == nil {
diff --git a/internal/lsp/fake/workdir.go b/internal/lsp/fake/workdir.go
index d836deb..0be1d8f 100644
--- a/internal/lsp/fake/workdir.go
+++ b/internal/lsp/fake/workdir.go
@@ -12,8 +12,10 @@
 	"io/ioutil"
 	"os"
 	"path/filepath"
+	"runtime"
 	"strings"
 	"sync"
+	"time"
 
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/span"
@@ -49,16 +51,6 @@
 	return filepath.ToSlash(fp)
 }
 
-func writeTxtar(txt string, rel RelativeTo) error {
-	files := UnpackTxt(txt)
-	for name, data := range files {
-		if err := WriteFileData(name, data, rel); err != nil {
-			return errors.Errorf("writing to workdir: %w", err)
-		}
-	}
-	return nil
-}
-
 // WriteFileData writes content to the relative path, replacing the special
 // token $SANDBOX_WORKDIR with the relative root given by rel.
 func WriteFileData(path string, content []byte, rel RelativeTo) error {
@@ -67,12 +59,25 @@
 	if err := os.MkdirAll(filepath.Dir(fp), 0755); err != nil {
 		return errors.Errorf("creating nested directory: %w", err)
 	}
-	if err := ioutil.WriteFile(fp, []byte(content), 0644); err != nil {
-		return errors.Errorf("writing %q: %w", path, err)
+	backoff := 1 * time.Millisecond
+	for {
+		err := ioutil.WriteFile(fp, []byte(content), 0644)
+		if err != nil {
+			if isWindowsErrLockViolation(err) {
+				time.Sleep(backoff)
+				backoff *= 2
+				continue
+			}
+			return errors.Errorf("writing %q: %w", path, err)
+		}
+		return nil
 	}
-	return nil
 }
 
+// isWindowsErrLockViolation reports whether err is ERROR_LOCK_VIOLATION
+// on Windows.
+var isWindowsErrLockViolation = func(err error) bool { return false }
+
 // Workdir is a temporary working directory for tests. It exposes file
 // operations in terms of relative paths, and fakes file watching by triggering
 // events on file operations.
@@ -138,11 +143,21 @@
 
 // ReadFile reads a text file specified by a workdir-relative path.
 func (w *Workdir) ReadFile(path string) (string, error) {
-	b, err := ioutil.ReadFile(w.AbsPath(path))
-	if err != nil {
-		return "", err
+	backoff := 1 * time.Millisecond
+	for {
+		b, err := ioutil.ReadFile(w.AbsPath(path))
+		if err != nil {
+			if runtime.GOOS == "plan9" && strings.HasSuffix(err.Error(), " exclusive use file already open") {
+				// Plan 9 enforces exclusive access to locked files.
+				// Give the owner time to unlock it and retry.
+				time.Sleep(backoff)
+				backoff *= 2
+				continue
+			}
+			return "", err
+		}
+		return string(b), nil
 	}
-	return string(b), nil
 }
 
 func (w *Workdir) RegexpRange(path, re string) (Pos, Pos, error) {
diff --git a/internal/lsp/fake/workdir_windows.go b/internal/lsp/fake/workdir_windows.go
new file mode 100644
index 0000000..ed2b4bb
--- /dev/null
+++ b/internal/lsp/fake/workdir_windows.go
@@ -0,0 +1,20 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package fake
+
+import (
+	"syscall"
+
+	errors "golang.org/x/xerrors"
+)
+
+func init() {
+	// from https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499-
+	const ERROR_LOCK_VIOLATION syscall.Errno = 33
+
+	isWindowsErrLockViolation = func(err error) bool {
+		return errors.Is(err, ERROR_LOCK_VIOLATION)
+	}
+}
diff --git a/internal/lsp/format.go b/internal/lsp/format.go
index 62b25d8..19736af 100644
--- a/internal/lsp/format.go
+++ b/internal/lsp/format.go
@@ -10,6 +10,7 @@
 	"golang.org/x/tools/internal/lsp/mod"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
+	"golang.org/x/tools/internal/lsp/work"
 )
 
 func (s *Server) formatting(ctx context.Context, params *protocol.DocumentFormattingParams) ([]protocol.TextEdit, error) {
@@ -18,11 +19,13 @@
 	if !ok {
 		return nil, err
 	}
-	switch fh.Kind() {
+	switch snapshot.View().FileKind(fh) {
 	case source.Mod:
 		return mod.Format(ctx, snapshot, fh)
 	case source.Go:
 		return source.Format(ctx, snapshot, fh)
+	case source.Work:
+		return work.Format(ctx, snapshot, fh)
 	}
 	return nil, nil
 }
diff --git a/internal/lsp/fuzzy/symbol.go b/internal/lsp/fuzzy/symbol.go
index 062f491..df9fbd5 100644
--- a/internal/lsp/fuzzy/symbol.go
+++ b/internal/lsp/fuzzy/symbol.go
@@ -49,11 +49,6 @@
 //
 // Currently this matcher only accepts case-insensitive fuzzy patterns.
 //
-// TODO(rfindley):
-//  - implement smart-casing
-//  - implement space-separated groups
-//  - implement ', ^, and $ modifiers
-//
 // An empty pattern matches no input.
 func NewSymbolMatcher(pattern string) *SymbolMatcher {
 	m := &SymbolMatcher{}
@@ -176,7 +171,12 @@
 	//   1. 1.0 if the character starts a segment, .8 if the character start a
 	//      mid-segment word, otherwise 0.6. This carries over to immediately
 	//      following characters.
-	//   2. 1.0 if the character is part of the last segment, otherwise
+	//   2. For the final character match, the multiplier from (1) is reduced to
+	//     .8 if the next character in the input is a mid-segment word, or 0.6 if
+	//      the next character in the input is not a word or segment start. This
+	//      ensures that we favor whole-word or whole-segment matches over prefix
+	//      matches.
+	//   3. 1.0 if the character is part of the last segment, otherwise
 	//      1.0-.2*<segments from the right>, with a max segment count of 3.
 	//
 	// This is a very naive algorithm, but it is fast. There's lots of prior art
@@ -211,8 +211,20 @@
 			case m.roles[ii]&wordStart != 0 && wordStreak > streakBonus:
 				streakBonus = wordStreak
 			}
+			finalChar := pi >= m.patternLen
+			// finalCost := 1.0
+			if finalChar && streakBonus > noStreak {
+				switch {
+				case ii == inputLen-1 || m.roles[ii+1]&segmentStart != 0:
+					// Full segment: no reduction
+				case m.roles[ii+1]&wordStart != 0:
+					streakBonus = wordStreak
+				default:
+					streakBonus = noStreak
+				}
+			}
 			totScore += streakBonus * (1.0 - float64(m.segments[ii])*perSegment)
-			if pi >= m.patternLen {
+			if finalChar {
 				break
 			}
 		} else {
diff --git a/internal/lsp/fuzzy/symbol_test.go b/internal/lsp/fuzzy/symbol_test.go
index 9dc710e..cb28160 100644
--- a/internal/lsp/fuzzy/symbol_test.go
+++ b/internal/lsp/fuzzy/symbol_test.go
@@ -43,8 +43,9 @@
 		"atest",
 		"thebest",
 		"test.foo",
-		"tTest",
 		"test.foo",
+		"tTest",
+		"testage",
 		"foo.test",
 		"test",
 	}
diff --git a/internal/lsp/general.go b/internal/lsp/general.go
index 3c409d3..a3662ef 100644
--- a/internal/lsp/general.go
+++ b/internal/lsp/general.go
@@ -103,7 +103,7 @@
 		if dep.Path == "github.com/sergi/go-diff" && dep.Version == "v1.2.0" {
 			if err := s.eventuallyShowMessage(ctx, &protocol.ShowMessageParams{
 				Message: `It looks like you have a bad gopls installation.
-Please reinstall gopls by running 'GO111MODULE=on go get golang.org/x/tools/gopls@latest'.
+Please reinstall gopls by running 'GO111MODULE=on go install golang.org/x/tools/gopls@latest'.
 See https://github.com/golang/go/issues/45732 for more information.`,
 				Type: protocol.Error,
 			}); err != nil {
@@ -149,8 +149,8 @@
 					IncludeText: false,
 				},
 			},
-			Workspace: protocol.Workspace5Gn{
-				WorkspaceFolders: protocol.WorkspaceFolders4Gn{
+			Workspace: protocol.Workspace6Gn{
+				WorkspaceFolders: protocol.WorkspaceFolders5Gn{
 					Supported:           true,
 					ChangeNotifications: "workspace/didChangeWorkspaceFolders",
 				},
@@ -188,20 +188,17 @@
 	}
 	s.pendingFolders = nil
 
+	var registrations []protocol.Registration
 	if options.ConfigurationSupported && options.DynamicConfigurationSupported {
-		registrations := []protocol.Registration{
-			{
-				ID:     "workspace/didChangeConfiguration",
-				Method: "workspace/didChangeConfiguration",
-			},
-			{
-				ID:     "workspace/didChangeWorkspaceFolders",
-				Method: "workspace/didChangeWorkspaceFolders",
-			},
-		}
-		if options.SemanticTokens {
-			registrations = append(registrations, semanticTokenRegistration(options.SemanticTypes, options.SemanticMods))
-		}
+		registrations = append(registrations, protocol.Registration{
+			ID:     "workspace/didChangeConfiguration",
+			Method: "workspace/didChangeConfiguration",
+		})
+	}
+	if options.SemanticTokens && options.DynamicRegistrationSemanticTokensSupported {
+		registrations = append(registrations, semanticTokenRegistration(options.SemanticTypes, options.SemanticMods))
+	}
+	if len(registrations) > 0 {
 		if err := s.client.RegisterCapability(ctx, &protocol.RegistrationParams{
 			Registrations: registrations,
 		}); err != nil {
@@ -235,6 +232,9 @@
 		}
 		work := s.progress.Start(ctx, "Setting up workspace", "Loading packages...", nil, nil)
 		snapshot, release, err := s.addView(ctx, folder.Name, uri)
+		if err == source.ErrViewExists {
+			continue
+		}
 		if err != nil {
 			viewErrors[uri] = err
 			work.End(fmt.Sprintf("Error loading packages: %s", err))
@@ -466,7 +466,8 @@
 		release()
 		return nil, nil, false, func() {}, err
 	}
-	if expectKind != source.UnknownKind && fh.Kind() != expectKind {
+	kind := snapshot.View().FileKind(fh)
+	if expectKind != source.UnknownKind && kind != expectKind {
 		// Wrong kind of file. Nothing to do.
 		release()
 		return nil, nil, false, func() {}, nil
diff --git a/internal/lsp/helper/helper.go b/internal/lsp/helper/helper.go
index e9e71e6..cadda02 100644
--- a/internal/lsp/helper/helper.go
+++ b/internal/lsp/helper/helper.go
@@ -158,6 +158,9 @@
 					nm := ""
 					if len(f.Names) > 0 {
 						nm = f.Names[0].String()
+						if nm == "_" {
+							nm = "_gen"
+						}
 					}
 					fx.paramnames = append(fx.paramnames, nm)
 				}
diff --git a/internal/lsp/highlight.go b/internal/lsp/highlight.go
index a350dd5..5dc636e 100644
--- a/internal/lsp/highlight.go
+++ b/internal/lsp/highlight.go
@@ -21,7 +21,7 @@
 		return nil, err
 	}
 
-	if fh.Kind() == source.Tmpl {
+	if snapshot.View().FileKind(fh) == source.Tmpl {
 		return template.Highlight(ctx, snapshot, fh, params.Position)
 	}
 
diff --git a/internal/lsp/hover.go b/internal/lsp/hover.go
index 1e118bc..d59f5db 100644
--- a/internal/lsp/hover.go
+++ b/internal/lsp/hover.go
@@ -11,6 +11,7 @@
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
 	"golang.org/x/tools/internal/lsp/template"
+	"golang.org/x/tools/internal/lsp/work"
 )
 
 func (s *Server) hover(ctx context.Context, params *protocol.HoverParams) (*protocol.Hover, error) {
@@ -19,13 +20,15 @@
 	if !ok {
 		return nil, err
 	}
-	switch fh.Kind() {
+	switch snapshot.View().FileKind(fh) {
 	case source.Mod:
 		return mod.Hover(ctx, snapshot, fh, params.Position)
 	case source.Go:
 		return source.Hover(ctx, snapshot, fh, params.Position)
 	case source.Tmpl:
 		return template.Hover(ctx, snapshot, fh, params.Position)
+	case source.Work:
+		return work.Hover(ctx, snapshot, fh, params.Position)
 	}
 	return nil, nil
 }
diff --git a/internal/lsp/link.go b/internal/lsp/link.go
index 87692fa..86c59fc 100644
--- a/internal/lsp/link.go
+++ b/internal/lsp/link.go
@@ -30,7 +30,7 @@
 	if !ok {
 		return nil, err
 	}
-	switch fh.Kind() {
+	switch snapshot.View().FileKind(fh) {
 	case source.Mod:
 		links, err = modLinks(ctx, snapshot, fh)
 	case source.Go:
diff --git a/internal/lsp/lsp_test.go b/internal/lsp/lsp_test.go
index f095489..ca0985a 100644
--- a/internal/lsp/lsp_test.go
+++ b/internal/lsp/lsp_test.go
@@ -53,7 +53,7 @@
 	tests.DefaultOptions(options)
 	session.SetOptions(options)
 	options.SetEnvSlice(datum.Config.Env)
-	view, snapshot, release, err := session.NewView(ctx, datum.Config.Dir, span.URIFromPath(datum.Config.Dir), "", options)
+	view, snapshot, release, err := session.NewView(ctx, datum.Config.Dir, span.URIFromPath(datum.Config.Dir), options)
 	if err != nil {
 		t.Fatal(err)
 	}
@@ -71,8 +71,7 @@
 
 	var modifications []source.FileModification
 	for filename, content := range datum.Config.Overlay {
-		kind := source.DetectLanguage("", filename)
-		if kind != source.Go {
+		if filepath.Ext(filename) != ".go" {
 			continue
 		}
 		modifications = append(modifications, source.FileModification{
@@ -117,13 +116,13 @@
 	return nil
 }
 
-func (c testClient) ApplyEdit(ctx context.Context, params *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResponse, error) {
+func (c testClient) ApplyEdit(ctx context.Context, params *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResult, error) {
 	res, err := applyTextDocumentEdits(c.runner, params.Edit.DocumentChanges)
 	if err != nil {
 		return nil, err
 	}
 	c.runner.editRecv <- res
-	return &protocol.ApplyWorkspaceEditResponse{Applied: true}, nil
+	return &protocol.ApplyWorkspaceEditResult{Applied: true}, nil
 }
 
 func (r *runner) CallHierarchy(t *testing.T, spn span.Span, expectedCalls *tests.CallHierarchyResult) {
@@ -187,7 +186,7 @@
 }
 
 func (r *runner) CodeLens(t *testing.T, uri span.URI, want []protocol.CodeLens) {
-	if source.DetectLanguage("", uri.Filename()) != source.Mod {
+	if !strings.HasSuffix(uri.Filename(), "go.mod") {
 		return
 	}
 	got, err := r.server.codeLens(r.ctx, &protocol.CodeLensParams{
@@ -718,12 +717,14 @@
 	didSomething := false
 	if hover != nil {
 		didSomething = true
-		tag := fmt.Sprintf("%s-hover", d.Name)
+		tag := fmt.Sprintf("%s-hoverdef", d.Name)
 		expectHover := string(r.data.Golden(tag, d.Src.URI().Filename(), func() ([]byte, error) {
 			return []byte(hover.Contents.Value), nil
 		}))
-		if hover.Contents.Value != expectHover {
-			t.Errorf("%s:\n%s", d.Src, tests.Diff(t, expectHover, hover.Contents.Value))
+		got := tests.StripSubscripts(hover.Contents.Value)
+		expectHover = tests.StripSubscripts(expectHover)
+		if got != expectHover {
+			t.Errorf("%s:\n%s", d.Src, tests.Diff(t, expectHover, got))
 		}
 	}
 	if !d.OnlyHover {
@@ -840,6 +841,43 @@
 	}
 }
 
+func (r *runner) Hover(t *testing.T, src span.Span, text string) {
+	m, err := r.data.Mapper(src.URI())
+	if err != nil {
+		t.Fatal(err)
+	}
+	loc, err := m.Location(src)
+	if err != nil {
+		t.Fatalf("failed for %v", err)
+	}
+	tdpp := protocol.TextDocumentPositionParams{
+		TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI},
+		Position:     loc.Range.Start,
+	}
+	params := &protocol.HoverParams{
+		TextDocumentPositionParams: tdpp,
+	}
+	hover, err := r.server.Hover(r.ctx, params)
+	if err != nil {
+		t.Fatal(err)
+	}
+	if text == "" {
+		if hover != nil {
+			t.Errorf("want nil, got %v\n", hover)
+		}
+	} else {
+		if hover == nil {
+			t.Fatalf("want hover result to include %s, but got nil", text)
+		}
+		if got := hover.Contents.Value; got != text {
+			t.Errorf("want %v, got %v\n", text, got)
+		}
+		if want, got := loc.Range, hover.Range; want != got {
+			t.Errorf("want range %v, got %v instead", want, got)
+		}
+	}
+}
+
 func (r *runner) References(t *testing.T, src span.Span, itemList []span.Span) {
 	sm, err := r.data.Mapper(src.URI())
 	if err != nil {
@@ -980,17 +1018,20 @@
 		}
 		return
 	}
-	if got.Start == got.End {
+	if got.Range.Start == got.Range.End {
 		// Special case for 0-length ranges. Marks can't specify a 0-length range,
 		// so just compare the start.
-		if got.Start != want.Range.Start {
-			t.Errorf("prepare rename failed: incorrect point, got %v want %v", got.Start, want.Range.Start)
+		if got.Range.Start != want.Range.Start {
+			t.Errorf("prepare rename failed: incorrect point, got %v want %v", got.Range.Start, want.Range.Start)
 		}
 	} else {
-		if protocol.CompareRange(*got, want.Range) != 0 {
-			t.Errorf("prepare rename failed: incorrect range got %v want %v", *got, want.Range)
+		if protocol.CompareRange(got.Range, want.Range) != 0 {
+			t.Errorf("prepare rename failed: incorrect range got %v want %v", got.Range, want.Range)
 		}
 	}
+	if got.Placeholder != want.Text {
+		t.Errorf("prepare rename failed: incorrect text got %v want %v", got.Placeholder, want.Text)
+	}
 }
 
 func applyTextDocumentEdits(r *runner, edits []protocol.TextDocumentEdit) (map[span.URI]string, error) {
diff --git a/internal/lsp/lsprpc/lsprpc_test.go b/internal/lsp/lsprpc/lsprpc_test.go
index 24decbe..795c887 100644
--- a/internal/lsp/lsprpc/lsprpc_test.go
+++ b/internal/lsp/lsprpc/lsprpc_test.go
@@ -107,7 +107,7 @@
 	return &protocol.Hover{}, nil
 }
 
-func (s WaitableServer) Resolve(_ context.Context, item *protocol.CompletionItem) (*protocol.CompletionItem, error) {
+func (s WaitableServer) ResolveCompletionItem(_ context.Context, item *protocol.CompletionItem) (*protocol.CompletionItem, error) {
 	return item, nil
 }
 
@@ -294,7 +294,7 @@
 func TestEnvForwarding(t *testing.T) {
 	testenv.NeedsGo1Point(t, 13)
 	server := &initServer{}
-	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+	ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
 	defer cancel()
 	_, tsForwarded, cleanup := setupForwarding(ctx, t, server)
 	defer cleanup()
diff --git a/internal/lsp/mod/code_lens.go b/internal/lsp/mod/code_lens.go
index f18aaf7..b26bae7 100644
--- a/internal/lsp/mod/code_lens.go
+++ b/internal/lsp/mod/code_lens.go
@@ -14,7 +14,6 @@
 	"golang.org/x/tools/internal/lsp/command"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
-	"golang.org/x/tools/internal/span"
 )
 
 // LensFuncs returns the supported lensFuncs for go.mod files.
@@ -129,7 +128,7 @@
 		return protocol.Range{}, fmt.Errorf("no module statement in %s", fh.URI())
 	}
 	syntax := pm.File.Module.Syntax
-	return lineToRange(pm.Mapper, fh.URI(), syntax.Start, syntax.End)
+	return source.LineToRange(pm.Mapper, fh.URI(), syntax.Start, syntax.End)
 }
 
 // firstRequireRange returns the range for the first "require" in the given
@@ -150,19 +149,5 @@
 	if start.Byte == 0 || firstRequire.Start.Byte < start.Byte {
 		start, end = firstRequire.Start, firstRequire.End
 	}
-	return lineToRange(pm.Mapper, fh.URI(), start, end)
-}
-
-func lineToRange(m *protocol.ColumnMapper, uri span.URI, start, end modfile.Position) (protocol.Range, error) {
-	line, col, err := m.Converter.ToPosition(start.Byte)
-	if err != nil {
-		return protocol.Range{}, err
-	}
-	s := span.NewPoint(line, col, start.Byte)
-	line, col, err = m.Converter.ToPosition(end.Byte)
-	if err != nil {
-		return protocol.Range{}, err
-	}
-	e := span.NewPoint(line, col, end.Byte)
-	return m.Range(span.New(uri, s, e))
+	return source.LineToRange(pm.Mapper, fh.URI(), start, end)
 }
diff --git a/internal/lsp/mod/diagnostics.go b/internal/lsp/mod/diagnostics.go
index 4b4d0cb..9c49d8b 100644
--- a/internal/lsp/mod/diagnostics.go
+++ b/internal/lsp/mod/diagnostics.go
@@ -61,7 +61,7 @@
 		if !ok || req.Mod.Version == ver {
 			continue
 		}
-		rng, err := lineToRange(pm.Mapper, fh.URI(), req.Syntax.Start, req.Syntax.End)
+		rng, err := source.LineToRange(pm.Mapper, fh.URI(), req.Syntax.Start, req.Syntax.End)
 		if err != nil {
 			return nil, err
 		}
diff --git a/internal/lsp/mod/hover.go b/internal/lsp/mod/hover.go
index 82ba20f..0837e2a 100644
--- a/internal/lsp/mod/hover.go
+++ b/internal/lsp/mod/hover.go
@@ -15,7 +15,6 @@
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
-	"golang.org/x/tools/internal/span"
 	errors "golang.org/x/xerrors"
 )
 
@@ -85,20 +84,10 @@
 	}
 
 	// Get the range to highlight for the hover.
-	line, col, err := pm.Mapper.Converter.ToPosition(startPos)
+	rng, err := source.ByteOffsetsToRange(pm.Mapper, fh.URI(), startPos, endPos)
 	if err != nil {
 		return nil, err
 	}
-	start := span.NewPoint(line, col, startPos)
-
-	line, col, err = pm.Mapper.Converter.ToPosition(endPos)
-	if err != nil {
-		return nil, err
-	}
-	end := span.NewPoint(line, col, endPos)
-
-	spn = span.New(fh.URI(), start, end)
-	rng, err := pm.Mapper.Range(spn)
 	if err != nil {
 		return nil, err
 	}
diff --git a/internal/lsp/mod/mod_test.go b/internal/lsp/mod/mod_test.go
index 3298910..b2d257c 100644
--- a/internal/lsp/mod/mod_test.go
+++ b/internal/lsp/mod/mod_test.go
@@ -45,7 +45,7 @@
 	if err != nil {
 		t.Fatal(err)
 	}
-	_, _, release, err := session.NewView(ctx, "diagnostics_test", span.URIFromPath(folder), "", options)
+	_, _, release, err := session.NewView(ctx, "diagnostics_test", span.URIFromPath(folder), options)
 	release()
 	if err != nil {
 		t.Fatal(err)
diff --git a/internal/lsp/protocol/tsclient.go b/internal/lsp/protocol/tsclient.go
index 9a88f33..004cad9 100644
--- a/internal/lsp/protocol/tsclient.go
+++ b/internal/lsp/protocol/tsclient.go
@@ -2,14 +2,14 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
+// Code generated (see typescript/README.md) DO NOT EDIT.
+
 package protocol
 
-// Package protocol contains data types and code for LSP jsonrpcs
+// Package protocol contains data types and code for LSP json rpcs
 // generated automatically from vscode-languageserver-node
-// commit: 0cb3812e7d540ef3a904e96df795bc37a21de9b0
-// last fetched Mon Aug 02 2021 10:08:19 GMT-0400 (Eastern Daylight Time)
-
-// Code generated (see typescript/README.md) DO NOT EDIT.
+// commit: 696f9285bf849b73745682fdb1c1feac73eb8772
+// last fetched Fri Mar 04 2022 14:48:10 GMT-0500 (Eastern Standard Time)
 
 import (
 	"context"
@@ -26,13 +26,13 @@
 	PublishDiagnostics(context.Context, *PublishDiagnosticsParams) error
 	Progress(context.Context, *ProgressParams) error
 	WorkspaceFolders(context.Context) ([]WorkspaceFolder /*WorkspaceFolder[] | null*/, error)
-	Configuration(context.Context, *ParamConfiguration) ([]interface{}, error)
+	Configuration(context.Context, *ParamConfiguration) ([]LSPAny, error)
 	WorkDoneProgressCreate(context.Context, *WorkDoneProgressCreateParams) error
 	ShowDocument(context.Context, *ShowDocumentParams) (*ShowDocumentResult, error)
 	RegisterCapability(context.Context, *RegistrationParams) error
 	UnregisterCapability(context.Context, *UnregistrationParams) error
 	ShowMessageRequest(context.Context, *ShowMessageRequestParams) (*MessageActionItem /*MessageActionItem | null*/, error)
-	ApplyEdit(context.Context, *ApplyWorkspaceEditParams) (*ApplyWorkspaceEditResponse, error)
+	ApplyEdit(context.Context, *ApplyWorkspaceEditParams) (*ApplyWorkspaceEditResult, error)
 }
 
 func clientDispatch(ctx context.Context, client Client, reply jsonrpc2.Replier, r jsonrpc2.Request) (bool, error) {
@@ -160,8 +160,8 @@
 	return result, nil
 }
 
-func (s *clientDispatcher) Configuration(ctx context.Context, params *ParamConfiguration) ([]interface{}, error) {
-	var result []interface{}
+func (s *clientDispatcher) Configuration(ctx context.Context, params *ParamConfiguration) ([]LSPAny, error) {
+	var result []LSPAny
 	if err := s.sender.Call(ctx, "workspace/configuration", params, &result); err != nil {
 		return nil, err
 	}
@@ -196,8 +196,8 @@
 	return result, nil
 }
 
-func (s *clientDispatcher) ApplyEdit(ctx context.Context, params *ApplyWorkspaceEditParams) (*ApplyWorkspaceEditResponse, error) {
-	var result *ApplyWorkspaceEditResponse
+func (s *clientDispatcher) ApplyEdit(ctx context.Context, params *ApplyWorkspaceEditParams) (*ApplyWorkspaceEditResult, error) {
+	var result *ApplyWorkspaceEditResult
 	if err := s.sender.Call(ctx, "workspace/applyEdit", params, &result); err != nil {
 		return nil, err
 	}
diff --git a/internal/lsp/protocol/tsprotocol.go b/internal/lsp/protocol/tsprotocol.go
index fe0e749..2438d40 100644
--- a/internal/lsp/protocol/tsprotocol.go
+++ b/internal/lsp/protocol/tsprotocol.go
@@ -2,14 +2,14 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-// Package protocol contains data types and code for LSP jsonrpcs
-// generated automatically from vscode-languageserver-node
-// commit: 0cb3812e7d540ef3a904e96df795bc37a21de9b0
-// last fetched Mon Aug 02 2021 10:08:19 GMT-0400 (Eastern Daylight Time)
-package protocol
-
 // Code generated (see typescript/README.md) DO NOT EDIT.
 
+// Package protocol contains data types and code for LSP json rpcs
+// generated automatically from vscode-languageserver-node
+// commit: 696f9285bf849b73745682fdb1c1feac73eb8772
+// last fetched Fri Mar 04 2022 14:48:10 GMT-0500 (Eastern Standard Time)
+package protocol
+
 import "encoding/json"
 
 /**
@@ -42,9 +42,11 @@
 }
 
 /**
- * A response returned from the apply workspace edit request.
+ * The result returned from the apply workspace edit request.
+ *
+ * @since 3.17 renamed from ApplyWorkspaceEditResponse
  */
-type ApplyWorkspaceEditResponse struct {
+type ApplyWorkspaceEditResult struct {
 	/**
 	 * Indicates whether the edit was applied or not.
 	 */
@@ -143,7 +145,7 @@
 	 * A data entry field that is preserved between a call hierarchy prepare and
 	 * incoming calls or outgoing calls requests.
 	 */
-	Data interface{} `json:"data,omitempty"`
+	Data LSPAny `json:"data,omitempty"`
 }
 
 /**
@@ -244,7 +246,7 @@
 	/**
 	 * The workspace client capabilities
 	 */
-	Workspace Workspace2Gn `json:"workspace,omitempty"`
+	Workspace Workspace3Gn `json:"workspace,omitempty"`
 	/**
 	 * Text document specific client capabilities.
 	 */
@@ -357,7 +359,7 @@
 	 *
 	 * @since 3.16.0
 	 */
-	Data interface{} `json:"data,omitempty"`
+	Data LSPAny `json:"data,omitempty"`
 }
 
 /**
@@ -454,6 +456,12 @@
 	 * can omit computing them.
 	 */
 	Only []CodeActionKind `json:"only,omitempty"`
+	/**
+	 * The reason why code actions were requested.
+	 *
+	 * @since 3.17.0
+	 */
+	TriggerKind CodeActionTriggerKind `json:"triggerKind,omitempty"`
 }
 
 /**
@@ -503,6 +511,13 @@
 }
 
 /**
+ * The reason why code actions were requested.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type CodeActionTriggerKind float64
+
+/**
  * Structure to capture a description for an error code.
  *
  * @since 3.16.0
@@ -535,7 +550,7 @@
 	 * a [CodeLensRequest](#CodeLensRequest) and a [CodeLensResolveRequest]
 	 * (#CodeLensResolveRequest)
 	 */
-	Data interface{} `json:"data,omitempty"`
+	Data LSPAny `json:"data,omitempty"`
 }
 
 /**
@@ -725,10 +740,6 @@
 		 */
 		PreselectSupport bool `json:"preselectSupport,omitempty"`
 		/**
-		 * Client supports to kee
-		 */
-
-		/**
 		 * Client supports the tag property on a completion item. Clients supporting
 		 * tags have to handle unknown tags gracefully. Clients especially need to
 		 * preserve unknown tags when sending a completion item back to the server in
@@ -798,7 +809,7 @@
 	 * when accepting a completion item that uses multi line
 	 * text in either `insertText` or `textEdit`.
 	 *
-	 * @since 3.17.0
+	 * @since 3.17.0 - proposed state
 	 */
 	InsertTextMode InsertTextMode `json:"insertTextMode,omitempty"`
 	/**
@@ -806,6 +817,25 @@
 	 * `textDocument/completion` request.
 	 */
 	ContextSupport bool `json:"contextSupport,omitempty"`
+	/**
+	 * The client supports the following `CompletionList` specific
+	 * capabilities.
+	 *
+	 * @since 3.17.0 - proposed state
+	 */
+	CompletionList struct {
+		/**
+		 * The client supports the the following itemDefaults on
+		 * a completion list.
+		 *
+		 * The value lists the supported property names of the
+		 * `CompletionList.itemDefaults` object. If omitted
+		 * no properties are supported.
+		 *
+		 * @since 3.17.0 - proposed state
+		 */
+		ItemDefaults []string `json:"itemDefaults,omitempty"`
+	} `json:"completionList,omitempty"`
 }
 
 /**
@@ -906,6 +936,8 @@
 	 * The format of the insert text. The format applies to both the `insertText` property
 	 * and the `newText` property of a provided `textEdit`. If omitted defaults to
 	 * `InsertTextFormat.PlainText`.
+	 *
+	 * Please note that the insertTextFormat doesn't apply to `additionalTextEdits`.
 	 */
 	InsertTextFormat InsertTextFormat `json:"insertTextFormat,omitempty"`
 	/**
@@ -961,7 +993,7 @@
 	 * A data entry field that is preserved on a completion item between a
 	 * [CompletionRequest](#CompletionRequest) and a [CompletionResolveRequest](#CompletionResolveRequest).
 	 */
-	Data interface{} `json:"data,omitempty"`
+	Data LSPAny `json:"data,omitempty"`
 }
 
 /**
@@ -976,12 +1008,12 @@
  */
 type CompletionItemLabelDetails struct {
 	/**
-	 * An optional string which is rendered less prominently directly after {@link CompletionItemLabel.label label},
+	 * An optional string which is rendered less prominently directly after {@link CompletionItem.label label},
 	 * without any spacing. Should be used for function signatures or type annotations.
 	 */
 	Detail string `json:"detail,omitempty"`
 	/**
-	 * An optional string which is rendered less prominently after {@link CompletionItemLabel.detail}. Should be used
+	 * An optional string which is rendered less prominently after {@link CompletionItem.detail}. Should be used
 	 * for fully qualified names or file path.
 	 */
 	Description string `json:"description,omitempty"`
@@ -1005,6 +1037,47 @@
 	 */
 	IsIncomplete bool `json:"isIncomplete"`
 	/**
+	 * In many cases the items of an actual completion result share the same
+	 * value for properties like `commitCharacters` or the range of a text
+	 * edit. A completion list can therefore define item defaults which will
+	 * be used if a completion item itself doesn't specify the value.
+	 *
+	 * If a completion list specifies a default value and a completion item
+	 * also specifies a corresponding value the one from the item is used.
+	 *
+	 * Servers are only allowed to return default values if the client
+	 * signals support for this via the `completionList.itemDefaults`
+	 * capability.
+	 *
+	 * @since 3.17.0 - proposed state
+	 */
+	ItemDefaults struct {
+		/**
+		 * A default commit character set.
+		 *
+		 * @since 3.17.0 - proposed state
+		 */
+		CommitCharacters []string `json:"commitCharacters,omitempty"`
+		/**
+		 * A default edit range
+		 *
+		 * @since 3.17.0 - proposed state
+		 */
+		EditRange Range/*Range | { insert: Range; replace: Range; }*/ `json:"editRange,omitempty"`
+		/**
+		 * A default insert text format
+		 *
+		 * @since 3.17.0 - proposed state
+		 */
+		InsertTextFormat InsertTextFormat `json:"insertTextFormat,omitempty"`
+		/**
+		 * A default insert text mode
+		 *
+		 * @since 3.17.0 - proposed state
+		 */
+		InsertTextMode InsertTextMode `json:"insertTextMode,omitempty"`
+	} `json:"itemDefaults,omitempty"`
+	/**
 	 * The completion items.
 	 */
 	Items []CompletionItem `json:"items"`
@@ -1083,7 +1156,7 @@
 	/**
 	 * The workspace client capabilities
 	 */
-	Workspace Workspace3Gn `json:"workspace,omitempty"`
+	Workspace Workspace4Gn `json:"workspace,omitempty"`
 }
 
 type ConfigurationItem struct {
@@ -1321,6 +1394,7 @@
 	Code interface{}/*integer | string*/ `json:"code,omitempty"`
 	/**
 	 * An optional property to describe the error code.
+	 * Requires the code field (above) to be present/not null.
 	 *
 	 * @since 3.16.0
 	 */
@@ -1352,7 +1426,7 @@
 	 *
 	 * @since 3.16.0
 	 */
-	Data interface{} `json:"data,omitempty"`
+	Data LSPAny `json:"data,omitempty"`
 }
 
 /**
@@ -1397,7 +1471,38 @@
 	/**
 	 * The actual changed settings
 	 */
-	Settings interface{} `json:"settings"`
+	Settings LSPAny `json:"settings"`
+}
+
+/**
+ * The params sent in a change notebook document notification.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type DidChangeNotebookDocumentParams = struct {
+	/**
+	 * The notebook document that did change. The version number points
+	 * to the version after all provided changes have been applied. If
+	 * only the text document content of a cell changes the notebook version
+	 * doesn't necessarily have to change.
+	 */
+	NotebookDocument VersionedNotebookDocumentIdentifier `json:"notebookDocument"`
+	/**
+	 * The actual changes to the notebook document.
+	 *
+	 * The changes describe single state changes to the notebook document.
+	 * So if there are two changes c1 (at array index 0) and c2 (at array
+	 * index 1) for a notebook in state S then c1 moves the notebook from
+	 * S to S' and c2 from S' to S''. So c1 is computed on the state S and
+	 * c2 is computed on the state S'.
+	 *
+	 * To mirror the content of a notebook using change events use the following approach:
+	 * - start with the same initial content
+	 * - apply the 'notebookDocument/didChange' notifications in the order you receive them.
+	 * - apply the `NotebookChangeEvent`s in a single notification in the order
+	 *   you receive them.
+	 */
+	Change NotebookDocumentChangeEvent `json:"change"`
 }
 
 /**
@@ -1466,6 +1571,23 @@
 }
 
 /**
+ * The params sent in a close notebook document notification.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type DidCloseNotebookDocumentParams = struct {
+	/**
+	 * The notebook document that got closed.
+	 */
+	NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"`
+	/**
+	 * The text documents that represent the content
+	 * of a notebook cell that got closed.
+	 */
+	CellTextDocuments []TextDocumentIdentifier `json:"cellTextDocuments"`
+}
+
+/**
  * The parameters send in a close text document notification
  */
 type DidCloseTextDocumentParams struct {
@@ -1476,6 +1598,23 @@
 }
 
 /**
+ * The params sent in a open notebook document notification.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type DidOpenNotebookDocumentParams = struct {
+	/**
+	 * The notebook document that got opened.
+	 */
+	NotebookDocument NotebookDocument `json:"notebookDocument"`
+	/**
+	 * The text documents that represent the content
+	 * of a notebook cell.
+	 */
+	CellTextDocuments []TextDocumentItem `json:"cellTextDocuments"`
+}
+
+/**
  * The parameters send in a open text document notification
  */
 type DidOpenTextDocumentParams struct {
@@ -1486,6 +1625,18 @@
 }
 
 /**
+ * The params sent in a save notebook document notification.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type DidSaveNotebookDocumentParams = struct {
+	/**
+	 * The notebook document that got saved.
+	 */
+	NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"`
+}
+
+/**
  * The parameters send in a save text document notification
  */
 type DidSaveTextDocumentParams struct {
@@ -1538,6 +1689,15 @@
  */
 type DocumentDiagnosticParams struct {
 	/**
+	 * An optional token that a server can use to report work done progress.
+	 */
+	WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"`
+	/**
+	 * An optional token that a server can use to report partial results (e.g. streaming) to
+	 * the client.
+	 */
+	PartialResultToken ProgressToken `json:"partialResultToken,omitempty"`
+	/**
 	 * The text document.
 	 */
 	TextDocument TextDocumentIdentifier `json:"textDocument"`
@@ -1549,8 +1709,6 @@
 	 * The result id of a previous response if provided.
 	 */
 	PreviousResultID string `json:"previousResultId,omitempty"`
-	WorkDoneProgressParams
-	PartialResultParams
 }
 
 /**
@@ -1565,29 +1723,12 @@
 type DocumentDiagnosticReport = interface{} /*RelatedFullDocumentDiagnosticReport | RelatedUnchangedDocumentDiagnosticReport*/
 
 /**
- * A document filter denotes a document by different properties like
- * the [language](#TextDocument.languageId), the [scheme](#Uri.scheme) of
- * its resource, or a glob-pattern that is applied to the [path](#TextDocument.fileName).
+ * A document filter describes a top level text document or
+ * a notebook cell document.
  *
- * Glob patterns can have the following syntax:
- * - `*` to match one or more characters in a path segment
- * - `?` to match on one character in a path segment
- * - `**` to match any number of path segments, including none
- * - `{}` to group sub patterns into an OR expression. (e.g. `**​/*.{ts,js}` matches all TypeScript and JavaScript files)
- * - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
- * - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`)
- *
- * @sample A language filter that applies to typescript files on disk: `{ language: 'typescript', scheme: 'file' }`
- * @sample A language filter that applies to all package.json paths: `{ language: 'json', pattern: '**package.json' }`
+ * @since 3.17.0 - proposed support for NotebookCellTextDocumentFilter.
  */
-type DocumentFilter = struct {
-	/** A language id, like `typescript`. */
-	Language string `json:"language"`
-	/** A Uri [scheme](#Uri.scheme), like `file` or `untitled`. */
-	Scheme string `json:"scheme,omitempty"`
-	/** A glob pattern, like `*.{ts,js}`. */
-	Pattern string `json:"pattern,omitempty"`
-}
+type DocumentFilter = interface{} /*TextDocumentFilter | NotebookCellTextDocumentFilter*/
 
 /**
  * Client capabilities of a [DocumentFormattingRequest](#DocumentFormattingRequest).
@@ -1695,7 +1836,7 @@
 	 * A data entry field that is preserved on a document link between a
 	 * DocumentLinkRequest and a DocumentLinkResolveRequest.
 	 */
-	Data interface{} `json:"data,omitempty"`
+	Data LSPAny `json:"data,omitempty"`
 }
 
 /**
@@ -1849,7 +1990,7 @@
 	 */
 	Kind SymbolKind `json:"kind"`
 	/**
-	 * Tags for this completion item.
+	 * Tags for this document symbol.
 	 *
 	 * @since 3.16.0
 	 */
@@ -1994,6 +2135,20 @@
 	WorkDoneProgressParams
 }
 
+type ExecutionSummary = struct {
+	/**
+	 * A strict monotonically increasing value
+	 * indicating the execution order of a cell
+	 * inside a notebook.
+	 */
+	ExecutionOrder uint32 `json:"executionOrder"`
+	/**
+	 * Whether the execution was successful or
+	 * not if known by the client.
+	 */
+	Success bool `json:"success,omitempty"`
+}
+
 type FailureHandlingKind string
 
 /**
@@ -2337,7 +2492,7 @@
  *
  * @since 3.17.0 - proposed state
  */
-type FullDocumentDiagnosticReport struct {
+type FullDocumentDiagnosticReport = struct {
 	/**
 	 * A full document diagnostic report.
 	 */
@@ -2376,7 +2531,7 @@
 		/**
 		 * The list of requests for which the client
 		 * will retry the request if it receives a
-		 * response with error code `ContentModified``
+		 * response with error code `ContentModified`
 		 */
 		RetryOnContentModified []string `json:"retryOnContentModified"`
 	} `json:"staleRequestSupport,omitempty"`
@@ -2528,11 +2683,11 @@
 	/**
 	 * User provided initialization options.
 	 */
-	InitializationOptions interface{} `json:"initializationOptions,omitempty"`
+	InitializationOptions LSPAny `json:"initializationOptions,omitempty"`
 	/**
 	 * The initial trace setting. If omitted trace is disabled ('off').
 	 */
-	Trace string/*'off' | 'messages' | 'verbose'*/ `json:"trace,omitempty"`
+	Trace string/* 'off' | 'messages' | 'compact' | 'verbose' */ `json:"trace,omitempty"`
 	/**
 	 * The actual configured workspace folders.
 	 */
@@ -2568,6 +2723,355 @@
 }
 
 /**
+ * Inlay hint information.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlayHint = struct {
+	/**
+	 * The position of this hint.
+	 */
+	Position *Position `json:"position"`
+	/**
+	 * The label of this hint. A human readable string or an array of
+	 * InlayHintLabelPart label parts.
+	 *
+	 * *Note* that neither the string nor the label part can be empty.
+	 */
+	Label []InlayHintLabelPart/*string | InlayHintLabelPart[]*/ `json:"label"`
+	/**
+	 * The kind of this hint. Can be omitted in which case the client
+	 * should fall back to a reasonable default.
+	 */
+	Kind InlayHintKind `json:"kind,omitempty"`
+	/**
+	 * The tooltip text when you hover over this item.
+	 */
+	Tooltip string/*string | MarkupContent*/ `json:"tooltip,omitempty"`
+	/**
+	 * Render padding before the hint.
+	 *
+	 * Note: Padding should use the editor's background color, not the
+	 * background color of the hint itself. That means padding can be used
+	 * to visually align/separate an inlay hint.
+	 */
+	PaddingLeft bool `json:"paddingLeft,omitempty"`
+	/**
+	 * Render padding after the hint.
+	 *
+	 * Note: Padding should use the editor's background color, not the
+	 * background color of the hint itself. That means padding can be used
+	 * to visually align/separate an inlay hint.
+	 */
+	PaddingRight bool `json:"paddingRight,omitempty"`
+}
+
+/**
+ * Inlay hint client capabilities
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlayHintClientCapabilities = struct {
+	/**
+	 * Whether inlay hints support dynamic registration.
+	 */
+	DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
+	/**
+	 * Indicates which properties a client can resolve lazily on a inlay
+	 * hint.
+	 */
+	ResolveSupport struct {
+		/**
+		 * The properties that a client can resolve lazily.
+		 */
+		Properties []string `json:"properties"`
+	} `json:"resolveSupport,omitempty"`
+}
+
+/**
+ * Inlay hint kinds.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlayHintKind float64
+
+/**
+ * An inlay hint label part allows for interactive and composite labels
+ * of inlay hints.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlayHintLabelPart = struct {
+	/**
+	 * The value of this label part.
+	 */
+	Value string `json:"value"`
+	/**
+	 * The tooltip text when you hover over this label part. Depending on
+	 * the client capability `inlayHint.resolveSupport` clients might resolve
+	 * this property late using the resolve request.
+	 */
+	Tooltip string/*string | MarkupContent*/ `json:"tooltip,omitempty"`
+	/**
+	 * An optional source code location that represents this
+	 * label part.
+	 *
+	 * The editor will use this location for the hover and for code navigation
+	 * features: This part will become a clickable link that resolves to the
+	 * definition of the symbol at the given location (not necessarily the
+	 * location itself), it shows the hover that shows at the given location,
+	 * and it shows a context menu with further code navigation commands.
+	 *
+	 * Depending on the client capability `inlayHint.resolveSupport` clients
+	 * might resolve this property late using the resolve request.
+	 */
+	Location *Location `json:"location,omitempty"`
+	/**
+	 * An optional command for this label part.
+	 *
+	 * Depending on the client capability `inlayHint.resolveSupport` clients
+	 * might resolve this property late using the resolve request.
+	 */
+	Command *Command `json:"command,omitempty"`
+}
+
+/**
+ * Inlay hint options used during static registration.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlayHintOptions struct {
+	WorkDoneProgress bool `json:"workDoneProgress,omitempty"`
+	/**
+	 * The server provides support to resolve additional
+	 * information for an inlay hint item.
+	 */
+	ResolveProvider bool `json:"resolveProvider,omitempty"`
+}
+
+/**
+ * A parameter literal used in inlay hints requests.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlayHintParams struct {
+	/**
+	 * An optional token that a server can use to report work done progress.
+	 */
+	WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"`
+	/**
+	 * The text document.
+	 */
+	TextDocument TextDocumentIdentifier `json:"textDocument"`
+	/**
+	 * The visible document range for which inlay hints should be computed.
+	 */
+	ViewPort Range `json:"viewPort"`
+}
+
+/**
+ * Inlay hint options used during static or dynamic registration.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlayHintRegistrationOptions struct {
+	WorkDoneProgress bool `json:"workDoneProgress,omitempty"`
+	/**
+	 * The server provides support to resolve additional
+	 * information for an inlay hint item.
+	 */
+	ResolveProvider bool `json:"resolveProvider,omitempty"`
+	/**
+	 * A document selector to identify the scope of the registration. If set to null
+	 * the document selector provided on the client side will be used.
+	 */
+	DocumentSelector DocumentSelector/*DocumentSelector | null*/ `json:"documentSelector"`
+	/**
+	 * The id used to register the request. The id can be used to deregister
+	 * the request again. See also Registration#id.
+	 */
+	ID string `json:"id,omitempty"`
+}
+
+/**
+ * Client workspace capabilities specific to inlay hints.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlayHintWorkspaceClientCapabilities = struct {
+	/**
+	 * Whether the client implementation supports a refresh request sent from
+	 * the server to the client.
+	 *
+	 * Note that this event is global and will force the client to refresh all
+	 * inlay hints currently shown. It should be used with absolute care and
+	 * is useful for situation where a server for example detects a project wide
+	 * change that requires such a calculation.
+	 */
+	RefreshSupport bool `json:"refreshSupport,omitempty"`
+}
+
+/**
+ * Inline value information can be provided by different means:
+ * - directly as a text value (class InlineValueText).
+ * - as a name to use for a variable lookup (class InlineValueVariableLookup)
+ * - as an evaluatable expression (class InlineValueEvaluatableExpression)
+ * The InlineValue types combines all inline value types into one type.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlineValue = interface{} /* InlineValueText | InlineValueVariableLookup | InlineValueEvaluatableExpression*/
+
+/**
+ * Client capabilities specific to inline values.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlineValueClientCapabilities = struct {
+	/**
+	 * Whether implementation supports dynamic registration for inline value providers.
+	 */
+	DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
+}
+
+/**
+ * @since 3.17.0 - proposed state
+ */
+type InlineValueContext = struct {
+	/**
+	 * The document range where execution has stopped.
+	 * Typically the end position of the range denotes the line where the inline values are shown.
+	 */
+	StoppedLocation *Range `json:"stoppedLocation"`
+}
+
+/**
+ * Provide an inline value through an expression evaluation.
+ * If only a range is specified, the expression will be extracted from the underlying document.
+ * An optional expression can be used to override the extracted expression.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlineValueEvaluatableExpression = struct {
+	/**
+	 * The document range for which the inline value applies.
+	 * The range is used to extract the evaluatable expression from the underlying document.
+	 */
+	Range *Range `json:"range"`
+	/**
+	 * If specified the expression overrides the extracted expression.
+	 */
+	Expression string `json:"expression,omitempty"`
+}
+
+/**
+ * Inline value options used during static registration.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlineValueOptions = WorkDoneProgressOptions
+
+/**
+ * A parameter literal used in inline value requests.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlineValueParams struct {
+	/**
+	 * An optional token that a server can use to report work done progress.
+	 */
+	WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"`
+	/**
+	 * The text document.
+	 */
+	TextDocument TextDocumentIdentifier `json:"textDocument"`
+	/**
+	 * The visible document range for which inline values should be computed.
+	 */
+	ViewPort Range `json:"viewPort"`
+	/**
+	 * Additional information about the context in which inline values were
+	 * requested.
+	 */
+	Context InlineValueContext `json:"context"`
+}
+
+/**
+ * Inline value options used during static or dynamic registration.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlineValueRegistrationOptions struct {
+	/**
+	 * A document selector to identify the scope of the registration. If set to null
+	 * the document selector provided on the client side will be used.
+	 */
+	DocumentSelector DocumentSelector/*DocumentSelector | null*/ `json:"documentSelector"`
+	/**
+	 * The id used to register the request. The id can be used to deregister
+	 * the request again. See also Registration#id.
+	 */
+	ID string `json:"id,omitempty"`
+}
+
+/**
+ * Provide inline value as text.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlineValueText = struct {
+	/**
+	 * The document range for which the inline value applies.
+	 */
+	Range *Range `json:"range"`
+	/**
+	 * The text of the inline value.
+	 */
+	Text string `json:"text"`
+}
+
+/**
+ * Provide inline value through a variable lookup.
+ * If only a range is specified, the variable name will be extracted from the underlying document.
+ * An optional variable name can be used to override the extracted name.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlineValueVariableLookup = struct {
+	/**
+	 * The document range for which the inline value applies.
+	 * The range is used to extract the variable name from the underlying document.
+	 */
+	Range *Range `json:"range"`
+	/**
+	 * If specified the name of the variable to look up.
+	 */
+	VariableName string `json:"variableName,omitempty"`
+	/**
+	 * How to perform the lookup.
+	 */
+	CaseSensitiveLookup bool `json:"caseSensitiveLookup"`
+}
+
+/**
+ * Client workspace capabilities specific to inline values.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type InlineValueWorkspaceClientCapabilities = struct {
+	/**
+	 * Whether the client implementation supports a refresh request sent from the
+	 * server to the client.
+	 *
+	 * Note that this event is global and will force the client to refresh all
+	 * inline values currently shown. It should be used with absolute care and is
+	 * useful for situation where a server for example detects a project wide
+	 * change that requires such a calculation.
+	 */
+	RefreshSupport bool `json:"refreshSupport,omitempty"`
+}
+
+/**
  * A special text edit to provide an insert and a replace operation.
  *
  * @since 3.16.0
@@ -2602,6 +3106,27 @@
 type InsertTextMode float64
 
 /**
+ * The LSP any type
+ *
+ * @since 3.17.0
+ */
+type LSPAny = interface{} /* LSPObject | LSPArray | string | int32 | uint32 | Decimal | bool | float64*/
+
+/**
+ * LSP arrays.
+ *
+ * @since 3.17.0
+ */
+type LSPArray = []LSPAny
+
+/**
+ * LSP object definition.
+ *
+ * @since 3.17.0
+ */
+type LSPObject = map[string]interface{} /*[key: string]: LSPAny*/
+
+/**
  * Client capabilities for the linked editing range request.
  *
  * @since 3.16.0
@@ -2720,6 +3245,13 @@
 	 * The version of the parser.
 	 */
 	Version string `json:"version,omitempty"`
+	/**
+	 * A list of HTML tags that the client allows / supports in
+	 * Markdown.
+	 *
+	 * @since 3.17.0
+	 */
+	AllowedTags []string `json:"allowedTags,omitempty"`
 }
 
 /**
@@ -2856,6 +3388,191 @@
 }
 
 /**
+ * A notebook cell.
+ *
+ * A cell's document URI must be unique across ALL notebook
+ * cells and can therefore be used to uniquely identify a
+ * notebook cell or the cell's text document.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type NotebookCell = struct {
+	/**
+	 * The cell's kind
+	 */
+	Kind NotebookCellKind `json:"kind"`
+	/**
+	 * The URI of the cell's text document
+	 * content.
+	 */
+	Document DocumentURI `json:"document"`
+	/**
+	 * Additional metadata stored with the cell.
+	 */
+	Metadata LSPObject `json:"metadata,omitempty"`
+	/**
+	 * Additional execution summary information
+	 * if supported by the client.
+	 */
+	ExecutionSummary ExecutionSummary `json:"executionSummary,omitempty"`
+}
+
+/**
+ * A change describing how to move a `NotebookCell`
+ * array from state S to S'.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type NotebookCellArrayChange = struct {
+	/**
+	 * The start oftest of the cell that changed.
+	 */
+	Start uint32 `json:"start"`
+	/**
+	 * The deleted cells
+	 */
+	DeleteCount uint32 `json:"deleteCount"`
+	/**
+	 * The new cells, if any
+	 */
+	Cells []NotebookCell `json:"cells,omitempty"`
+}
+
+/**
+ * A notebook cell kind.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type NotebookCellKind float64
+
+/**
+ * A notebook cell text document filter denotes a cell text
+ * document by different properties.
+ *
+ * @since 3.17.0 - proposed state.
+ */
+type NotebookCellTextDocumentFilter = struct {
+	/**
+	 * A filter that matches against the notebook
+	 * containing the notebook cell.
+	 */
+	NotebookDocument NotebookDocumentFilter `json:"notebookDocument"`
+	/**
+	 * A language id like `python`.
+	 *
+	 * Will be matched against the language id of the
+	 * notebook cell document.
+	 */
+	CellLanguage string `json:"cellLanguage,omitempty"`
+}
+
+/**
+ * A notebook document.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type NotebookDocument = struct {
+	/**
+	 * The notebook document's uri.
+	 */
+	URI URI `json:"uri"`
+	/**
+	 * The type of the notebook.
+	 */
+	NotebookType string `json:"notebookType"`
+	/**
+	 * The version number of this document (it will increase after each
+	 * change, including undo/redo).
+	 */
+	Version int32 `json:"version"`
+	/**
+	 * Additional metadata stored with the notebook
+	 * document.
+	 */
+	Metadata LSPObject `json:"metadata,omitempty"`
+	/**
+	 * The cells of a notebook.
+	 */
+	Cells []NotebookCell `json:"cells"`
+}
+
+/**
+ * A change event for a notebook document.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type NotebookDocumentChangeEvent = struct {
+	/**
+	 * The changed meta data if any.
+	 */
+	Metadata LSPObject `json:"metadata,omitempty"`
+	/**
+	 * Changes to cells
+	 */
+	Cells struct {
+		/**
+		 * Changes to the cell structure to add or
+		 * remove cells.
+		 */
+		Structure struct {
+			/**
+			 * The change to the cell array.
+			 */
+			Array NotebookCellArrayChange `json:"array"`
+			/**
+			 * Additional opened cell text documents.
+			 */
+			DidOpen []TextDocumentItem `json:"didOpen,omitempty"`
+			/**
+			 * Additional closed cell text documents.
+			 */
+			DidClose []TextDocumentIdentifier `json:"didClose,omitempty"`
+		} `json:"structure,omitempty"`
+		/**
+		 * Changes to notebook cells properties like its
+		 * kind, execution summary or metadata.
+		 */
+		Data []NotebookCell `json:"data,omitempty"`
+		/**
+		 * Changes to the text content of notebook cells.
+		 */
+		TextContent []struct {
+			Document VersionedTextDocumentIdentifier  `json:"document"`
+			Changes  []TextDocumentContentChangeEvent `json:"changes"`
+		} `json:"textContent,omitempty"`
+	} `json:"cells,omitempty"`
+}
+
+/**
+ * A notebook document filter denotes a notebook document by
+ * different properties.
+ *
+ * @since 3.17.0 - proposed state.
+ */
+type NotebookDocumentFilter = struct {
+	/** The type of the enclosing notebook. */
+	NotebookType string `json:"notebookType"`
+	/** A Uri [scheme](#Uri.scheme), like `file` or `untitled`.
+	 * Will be matched against the URI of the notebook. */
+	Scheme string `json:"scheme,omitempty"`
+	/** A glob pattern, like `*.ipynb`.
+	 * Will be matched against the notebooks` URI path section.*/
+	Pattern string `json:"pattern,omitempty"`
+}
+
+/**
+ * A literal to identify a notebook document in the client.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type NotebookDocumentIdentifier = struct {
+	/**
+	 * The notebook document's uri.
+	 */
+	URI URI `json:"uri"`
+}
+
+/**
  * A text document identifier to optionally denote a specific version of a text document.
  */
 type OptionalVersionedTextDocumentIdentifier struct {
@@ -3105,7 +3822,7 @@
 	/**
 	 * Options necessary for the registration.
 	 */
-	RegisterOptions interface{} `json:"registerOptions,omitempty"`
+	RegisterOptions LSPAny `json:"registerOptions,omitempty"`
 }
 
 type RegistrationParams struct {
@@ -3143,8 +3860,7 @@
 	 *
 	 * @since 3.17.0 - proposed state
 	 */
-	RelatedDocuments map[string]interface{}/*[uri: string ** DocumentUri *]: FullDocumentDiagnosticReport | UnchangedDocumentDiagnosticReport;*/ `json:"relatedDocuments,omitempty"`
-	FullDocumentDiagnosticReport
+	RelatedDocuments map[string]interface{} /*[uri: string ** DocumentUri *]: FullDocumentDiagnosticReport | UnchangedDocumentDiagnosticReport;*/ `json:"relatedDocuments,omitempty"`
 }
 
 /**
@@ -3162,8 +3878,7 @@
 	 *
 	 * @since 3.17.0 - proposed state
 	 */
-	RelatedDocuments map[string]interface{}/*[uri: string ** DocumentUri *]: FullDocumentDiagnosticReport | UnchangedDocumentDiagnosticReport;*/ `json:"relatedDocuments,omitempty"`
-	UnchangedDocumentDiagnosticReport
+	RelatedDocuments map[string]interface{} /*[uri: string ** DocumentUri *]: FullDocumentDiagnosticReport | UnchangedDocumentDiagnosticReport;*/ `json:"relatedDocuments,omitempty"`
 }
 
 type RenameClientCapabilities struct {
@@ -3430,6 +4145,28 @@
 	 * Whether the client supports tokens that can span multiple lines.
 	 */
 	MultilineTokenSupport bool `json:"multilineTokenSupport,omitempty"`
+	/**
+	 * Whether the client allows the server to actively cancel a
+	 * semantic token request, e.g. supports returning
+	 * LSPErrorCodes.ServerCancelled. If a server does the client
+	 * needs to retrigger the request.
+	 *
+	 * @since 3.17.0
+	 */
+	ServerCancelSupport bool `json:"serverCancelSupport,omitempty"`
+	/**
+	 * Whether the client uses semantic tokens to augment existing
+	 * syntax tokens. If set to `true` client side created syntax
+	 * tokens and semantic tokens are both used for colorization. If
+	 * set to `false` the client only uses the returned semantic tokens
+	 * for colorization.
+	 *
+	 * If the value is `undefined` then the client behavior is not
+	 * specified.
+	 *
+	 * @since 3.17.0
+	 */
+	AugmentsSyntaxTokens bool `json:"augmentsSyntaxTokens,omitempty"`
 }
 
 /**
@@ -3684,7 +4421,7 @@
 	/**
 	 * The workspace server capabilities
 	 */
-	Workspace Workspace5Gn `json:"workspace,omitempty"`
+	Workspace Workspace6Gn `json:"workspace,omitempty"`
 	/**
 	 * The server provides moniker support.
 	 *
@@ -3692,6 +4429,24 @@
 	 */
 	MonikerProvider interface{}/* bool | MonikerOptions | MonikerRegistrationOptions*/ `json:"monikerProvider,omitempty"`
 	/**
+	 * The server provides type hierarchy support.
+	 *
+	 * @since 3.17.0 - proposed state
+	 */
+	TypeHierarchyProvider interface{}/* bool | TypeHierarchyOptions | TypeHierarchyRegistrationOptions*/ `json:"typeHierarchyProvider,omitempty"`
+	/**
+	 * The server provides inline values.
+	 *
+	 * @since 3.17.0 - proposed state
+	 */
+	InlineValueProvider interface{}/* bool | InlineValueOptions | InlineValueRegistrationOptions*/ `json:"inlineValueProvider,omitempty"`
+	/**
+	 * The server provides inlay hints.
+	 *
+	 * @since 3.17.0 - proposed state
+	 */
+	InlayHintProvider interface{}/* bool | InlayHintOptions | InlayHintRegistrationOptions*/ `json:"inlayHintProvider,omitempty"`
+	/**
 	 * Experimental server capabilities.
 	 */
 	Experimental interface{} `json:"experimental,omitempty"`
@@ -3815,15 +4570,27 @@
 	 */
 	Signatures []SignatureInformation `json:"signatures"`
 	/**
-	 * The active signature. Set to `null` if no
-	 * signatures exist.
+	 * The active signature. If omitted or the value lies outside the
+	 * range of `signatures` the value defaults to zero or is ignored if
+	 * the `SignatureHelp` has no signatures.
+	 *
+	 * Whenever possible implementors should make an active decision about
+	 * the active signature and shouldn't rely on a default value.
+	 *
+	 * In future version of the protocol this property might become
+	 * mandatory to better express this.
 	 */
-	ActiveSignature uint32/*uinteger | null*/ `json:"activeSignature"`
+	ActiveSignature uint32 `json:"activeSignature,omitempty"`
 	/**
-	 * The active parameter of the active signature. Set to `null`
-	 * if the active signature has no parameters.
+	 * The active parameter of the active signature. If omitted or the value
+	 * lies outside the range of `signatures[activeSignature].parameters`
+	 * defaults to 0 if the active signature has parameters. If
+	 * the active signature has no parameters it is ignored.
+	 * In future version of the protocol this property might become
+	 * mandatory to better express the active parameter if the
+	 * active signature does have any.
 	 */
-	ActiveParameter uint32/*uinteger | null*/ `json:"activeParameter"`
+	ActiveParameter uint32 `json:"activeParameter,omitempty"`
 }
 
 /**
@@ -4174,6 +4941,24 @@
 	 * @since 3.16.0
 	 */
 	Moniker MonikerClientCapabilities `json:"moniker,omitempty"`
+	/**
+	 * Capabilities specific to the various type hierarchy requests.
+	 *
+	 * @since 3.17.0 - proposed state
+	 */
+	TypeHierarchy TypeHierarchyClientCapabilities `json:"typeHierarchy,omitempty"`
+	/**
+	 * Capabilities specific to the `textDocument/inlineValue` request.
+	 *
+	 * @since 3.17.0 - proposed state
+	 */
+	InlineValue InlineValueClientCapabilities `json:"inlineValue,omitempty"`
+	/**
+	 * Capabilities specific to the `textDocument/inlayHint` request.
+	 *
+	 * @since 3.17.0 - proposed state
+	 */
+	InlayHint InlayHintClientCapabilities `json:"inlayHint,omitempty"`
 }
 
 /**
@@ -4218,6 +5003,33 @@
 }
 
 /**
+ * A document filter denotes a document by different properties like
+ * the [language](#TextDocument.languageId), the [scheme](#Uri.scheme) of
+ * its resource, or a glob-pattern that is applied to the [path](#TextDocument.fileName).
+ *
+ * Glob patterns can have the following syntax:
+ * - `*` to match one or more characters in a path segment
+ * - `?` to match on one character in a path segment
+ * - `**` to match any number of path segments, including none
+ * - `{}` to group sub patterns into an OR expression. (e.g. `**​/*.{ts,js}` matches all TypeScript and JavaScript files)
+ * - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
+ * - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`)
+ *
+ * @sample A language filter that applies to typescript files on disk: `{ language: 'typescript', scheme: 'file' }`
+ * @sample A language filter that applies to all package.json paths: `{ language: 'json', pattern: '**package.json' }`
+ *
+ * @since 3.17.0 - proposed state.
+ */
+type TextDocumentFilter = struct {
+	/** A language id, like `typescript`. */
+	Language string `json:"language"`
+	/** A Uri [scheme](#Uri.scheme), like `file` or `untitled`. */
+	Scheme string `json:"scheme,omitempty"`
+	/** A glob pattern, like `*.{ts,js}`. */
+	Pattern string `json:"pattern,omitempty"`
+}
+
+/**
  * A literal to identify a text document in the client.
  */
 type TextDocumentIdentifier struct {
@@ -4355,7 +5167,7 @@
 
 type TokenFormat = string
 
-type TraceValues = string /*'off' | 'messages' | 'verbose'*/
+type TraceValues = string /* 'off' | 'messages' | 'compact' | 'verbose' */
 
 /**
  * Since 3.6.0
@@ -4392,6 +5204,143 @@
 }
 
 /**
+ * @since 3.17.0 - proposed state
+ */
+type TypeHierarchyClientCapabilities = struct {
+	/**
+	 * Whether implementation supports dynamic registration. If this is set to `true`
+	 * the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
+	 * return value for the corresponding server capability as well.
+	 */
+	DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
+}
+
+/**
+ * @since 3.17.0 - proposed state
+ */
+type TypeHierarchyItem = struct {
+	/**
+	 * The name of this item.
+	 */
+	Name string `json:"name"`
+	/**
+	 * The kind of this item.
+	 */
+	Kind SymbolKind `json:"kind"`
+	/**
+	 * Tags for this item.
+	 */
+	Tags []SymbolTag `json:"tags,omitempty"`
+	/**
+	 * More detail for this item, e.g. the signature of a function.
+	 */
+	Detail string `json:"detail,omitempty"`
+	/**
+	 * The resource identifier of this item.
+	 */
+	URI DocumentURI `json:"uri"`
+	/**
+	 * The range enclosing this symbol not including leading/trailing whitespace
+	 * but everything else, e.g. comments and code.
+	 */
+	Range *Range `json:"range"`
+	/**
+	 * The range that should be selected and revealed when this symbol is being
+	 * picked, e.g. the name of a function. Must be contained by the
+	 * [`range`](#TypeHierarchyItem.range).
+	 */
+	SelectionRange *Range `json:"selectionRange"`
+	/**
+	 * A data entry field that is preserved between a type hierarchy prepare and
+	 * supertypes or subtypes requests. It could also be used to identify the
+	 * type hierarchy in the server, helping improve the performance on
+	 * resolving supertypes and subtypes.
+	 */
+	Data LSPAny `json:"data,omitempty"`
+}
+
+/**
+ * Type hierarchy options used during static registration.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type TypeHierarchyOptions = WorkDoneProgressOptions
+
+/**
+ * The parameter of a `textDocument/prepareTypeHierarchy` request.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type TypeHierarchyPrepareParams struct {
+	/**
+	 * The text document.
+	 */
+	TextDocument TextDocumentIdentifier `json:"textDocument"`
+	/**
+	 * The position inside the text document.
+	 */
+	Position Position `json:"position"`
+	/**
+	 * An optional token that a server can use to report work done progress.
+	 */
+	WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"`
+}
+
+/**
+ * Type hierarchy options used during static or dynamic registration.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type TypeHierarchyRegistrationOptions struct {
+	/**
+	 * A document selector to identify the scope of the registration. If set to null
+	 * the document selector provided on the client side will be used.
+	 */
+	DocumentSelector DocumentSelector/*DocumentSelector | null*/ `json:"documentSelector"`
+	/**
+	 * The id used to register the request. The id can be used to deregister
+	 * the request again. See also Registration#id.
+	 */
+	ID string `json:"id,omitempty"`
+}
+
+/**
+ * The parameter of a `typeHierarchy/subtypes` request.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type TypeHierarchySubtypesParams struct {
+	/**
+	 * An optional token that a server can use to report work done progress.
+	 */
+	WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"`
+	/**
+	 * An optional token that a server can use to report partial results (e.g. streaming) to
+	 * the client.
+	 */
+	PartialResultToken ProgressToken     `json:"partialResultToken,omitempty"`
+	Item               TypeHierarchyItem `json:"item"`
+}
+
+/**
+ * The parameter of a `typeHierarchy/supertypes` request.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type TypeHierarchySupertypesParams struct {
+	/**
+	 * An optional token that a server can use to report work done progress.
+	 */
+	WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"`
+	/**
+	 * An optional token that a server can use to report partial results (e.g. streaming) to
+	 * the client.
+	 */
+	PartialResultToken ProgressToken     `json:"partialResultToken,omitempty"`
+	Item               TypeHierarchyItem `json:"item"`
+}
+
+/**
  * A tagging type for string properties that are actually URIs
  *
  * @since 3.16.0
@@ -4404,7 +5353,7 @@
  *
  * @since 3.17.0 - proposed state
  */
-type UnchangedDocumentDiagnosticReport struct {
+type UnchangedDocumentDiagnosticReport = struct {
 	/**
 	 * A document diagnostic report indicating
 	 * no changes to the last result. A server can
@@ -4446,6 +5395,22 @@
 }
 
 /**
+ * A versioned notebook document identifier.
+ *
+ * @since 3.17.0 - proposed state
+ */
+type VersionedNotebookDocumentIdentifier = struct {
+	/**
+	 * The version number of this notebook document.
+	 */
+	Version int32 `json:"version"`
+	/**
+	 * The notebook document's uri.
+	 */
+	URI URI `json:"uri"`
+}
+
+/**
  * A text document identifier to denote a specific version of a text document.
  */
 type VersionedTextDocumentIdentifier struct {
@@ -4645,6 +5610,20 @@
 	 * Since 3.16.0
 	 */
 	FileOperations FileOperationClientCapabilities `json:"fileOperations,omitempty"`
+	/**
+	 * Capabilities specific to the inline values requests scoped to the
+	 * workspace.
+	 *
+	 * @since 3.17.0.
+	 */
+	InlineValue InlineValueWorkspaceClientCapabilities `json:"inlineValue,omitempty"`
+	/**
+	 * Capabilities specific to the inlay hints requests scoped to the
+	 * workspace.
+	 *
+	 * @since 3.17.0.
+	 */
+	InlayHint InlayHintWorkspaceClientCapabilities `json:"inlayHint,omitempty"`
 }
 
 /**
@@ -4654,6 +5633,15 @@
  */
 type WorkspaceDiagnosticParams struct {
 	/**
+	 * An optional token that a server can use to report work done progress.
+	 */
+	WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"`
+	/**
+	 * An optional token that a server can use to report partial results (e.g. streaming) to
+	 * the client.
+	 */
+	PartialResultToken ProgressToken `json:"partialResultToken,omitempty"`
+	/**
 	 * The additional identifier provided during registration.
 	 */
 	Identifier string `json:"identifier,omitempty"`
@@ -4662,8 +5650,6 @@
 	 * previous result ids.
 	 */
 	PreviousResultIds []PreviousResultID `json:"previousResultIds"`
-	WorkDoneProgressParams
-	PartialResultParams
 }
 
 /**
@@ -4671,7 +5657,7 @@
  *
  * @since 3.17.0 - proposed state
  */
-type WorkspaceDiagnosticReport struct {
+type WorkspaceDiagnosticReport = struct {
 	Items []WorkspaceDocumentDiagnosticReport `json:"items"`
 }
 
@@ -4700,7 +5686,7 @@
 	/**
 	 * Holds changes to existing resources.
 	 */
-	Changes map[string][]TextEdit/*[uri: string]: TextEdit[];*/ `json:"changes,omitempty"`
+	Changes map[DocumentURI][]TextEdit/*[uri: DocumentUri]: TextEdit[]*/ `json:"changes,omitempty"`
 	/**
 	 * Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes
 	 * are either an array of `TextDocumentEdit`s to express changes to n different text documents
@@ -4722,7 +5708,7 @@
 	 *
 	 * @since 3.16.0
 	 */
-	ChangeAnnotations map[string]ChangeAnnotationIdentifier/*[id: string * ChangeAnnotationIdentifier *]: ChangeAnnotation;*/ `json:"changeAnnotations,omitempty"`
+	ChangeAnnotations map[string]ChangeAnnotationIdentifier/*[id: ChangeAnnotationIdentifier]: ChangeAnnotation;*/ `json:"changeAnnotations,omitempty"`
 }
 
 type WorkspaceEditClientCapabilities struct {
@@ -4800,7 +5786,7 @@
 	/**
 	 * The workspace client capabilities
 	 */
-	Workspace Workspace6Gn `json:"workspace,omitempty"`
+	Workspace Workspace7Gn `json:"workspace,omitempty"`
 }
 
 type WorkspaceFoldersInitializeParams struct {
@@ -4814,7 +5800,7 @@
 	/**
 	 * The workspace server capabilities
 	 */
-	Workspace Workspace8Gn `json:"workspace,omitempty"`
+	Workspace Workspace9Gn `json:"workspace,omitempty"`
 }
 
 /**
@@ -4832,7 +5818,25 @@
 	 * If the document is not marked as open `null` can be provided.
 	 */
 	Version int32/*integer | null*/ `json:"version"`
-	FullDocumentDiagnosticReport
+}
+
+/**
+ * A special workspace symbol that supports locations without a range
+ *
+ * @since 3.17.0 - proposed state
+ */
+type WorkspaceSymbol struct {
+	/**
+	 * The location of the symbol.
+	 *
+	 * See SymbolInformation#location for more details.
+	 */
+	Location Location/*Location | { uri: DocumentUri }*/ `json:"location"`
+	/**
+	 * A data entry field that is preserved on a workspace symbol between a
+	 * workspace symbol request and a workspace symbol resolve request.
+	 */
+	Data LSPAny `json:"data,omitempty"`
 }
 
 /**
@@ -4871,12 +5875,33 @@
 		 */
 		ValueSet []SymbolTag `json:"valueSet"`
 	} `json:"tagSupport,omitempty"`
+	/**
+	 * The client support partial workspace symbols. The client will send the
+	 * request `workspaceSymbol/resolve` to the server to resolve additional
+	 * properties.
+	 *
+	 * @since 3.17.0 - proposedState
+	 */
+	ResolveSupport struct {
+		/**
+		 * The properties that a client can resolve lazily. Usually
+		 * `location.range`
+		 */
+		Properties []string `json:"properties"`
+	} `json:"resolveSupport,omitempty"`
 }
 
 /**
  * Server capabilities for a [WorkspaceSymbolRequest](#WorkspaceSymbolRequest).
  */
 type WorkspaceSymbolOptions struct {
+	/**
+	 * The server provides support to resolve additional
+	 * information for a workspace symbol.
+	 *
+	 * @since 3.17.0 - proposed state
+	 */
+	ResolveProvider bool `json:"resolveProvider,omitempty"`
 	WorkDoneProgressOptions
 }
 
@@ -4908,7 +5933,6 @@
 	 * If the document is not marked as open `null` can be provided.
 	 */
 	Version int32/*integer | null*/ `json:"version"`
-	UnchangedDocumentDiagnosticReport
 }
 
 const (
@@ -4987,32 +6011,45 @@
 	 * @since 3.15.0
 	 */
 
-	SourceFixAll            CodeActionKind     = "source.fixAll"
-	TextCompletion          CompletionItemKind = 1
-	MethodCompletion        CompletionItemKind = 2
-	FunctionCompletion      CompletionItemKind = 3
-	ConstructorCompletion   CompletionItemKind = 4
-	FieldCompletion         CompletionItemKind = 5
-	VariableCompletion      CompletionItemKind = 6
-	ClassCompletion         CompletionItemKind = 7
-	InterfaceCompletion     CompletionItemKind = 8
-	ModuleCompletion        CompletionItemKind = 9
-	PropertyCompletion      CompletionItemKind = 10
-	UnitCompletion          CompletionItemKind = 11
-	ValueCompletion         CompletionItemKind = 12
-	EnumCompletion          CompletionItemKind = 13
-	KeywordCompletion       CompletionItemKind = 14
-	SnippetCompletion       CompletionItemKind = 15
-	ColorCompletion         CompletionItemKind = 16
-	FileCompletion          CompletionItemKind = 17
-	ReferenceCompletion     CompletionItemKind = 18
-	FolderCompletion        CompletionItemKind = 19
-	EnumMemberCompletion    CompletionItemKind = 20
-	ConstantCompletion      CompletionItemKind = 21
-	StructCompletion        CompletionItemKind = 22
-	EventCompletion         CompletionItemKind = 23
-	OperatorCompletion      CompletionItemKind = 24
-	TypeParameterCompletion CompletionItemKind = 25
+	SourceFixAll CodeActionKind = "source.fixAll"
+	/**
+	 * Code actions were explicitly requested by the user or by an extension.
+	 */
+
+	CodeActionInvoked CodeActionTriggerKind = 1
+	/**
+	 * Code actions were requested automatically.
+	 *
+	 * This typically happens when current selection in a file changes, but can
+	 * also be triggered when file content changes.
+	 */
+
+	CodeActionAutomatic     CodeActionTriggerKind = 2
+	TextCompletion          CompletionItemKind    = 1
+	MethodCompletion        CompletionItemKind    = 2
+	FunctionCompletion      CompletionItemKind    = 3
+	ConstructorCompletion   CompletionItemKind    = 4
+	FieldCompletion         CompletionItemKind    = 5
+	VariableCompletion      CompletionItemKind    = 6
+	ClassCompletion         CompletionItemKind    = 7
+	InterfaceCompletion     CompletionItemKind    = 8
+	ModuleCompletion        CompletionItemKind    = 9
+	PropertyCompletion      CompletionItemKind    = 10
+	UnitCompletion          CompletionItemKind    = 11
+	ValueCompletion         CompletionItemKind    = 12
+	EnumCompletion          CompletionItemKind    = 13
+	KeywordCompletion       CompletionItemKind    = 14
+	SnippetCompletion       CompletionItemKind    = 15
+	ColorCompletion         CompletionItemKind    = 16
+	FileCompletion          CompletionItemKind    = 17
+	ReferenceCompletion     CompletionItemKind    = 18
+	FolderCompletion        CompletionItemKind    = 19
+	EnumMemberCompletion    CompletionItemKind    = 20
+	ConstantCompletion      CompletionItemKind    = 21
+	StructCompletion        CompletionItemKind    = 22
+	EventCompletion         CompletionItemKind    = 23
+	OperatorCompletion      CompletionItemKind    = 24
+	TypeParameterCompletion CompletionItemKind    = 25
 	/**
 	 * Render a completion as obsolete, usually using a strike-out.
 	 */
@@ -5155,6 +6192,16 @@
 
 	UnknownProtocolVersion InitializeError = 1
 	/**
+	 * An inlay hint that for a type annotation.
+	 */
+
+	Type InlayHintKind = 1
+	/**
+	 * An inlay hint that is for a parameter.
+	 */
+
+	Parameter InlayHintKind = 2
+	/**
 	 * The primary text to be inserted is treated as a plain string.
 	 */
 
@@ -5235,6 +6282,16 @@
 	 */
 	Local MonikerKind = "local"
 	/**
+	 * A markup-cell is formatted source that is used for display.
+	 */
+
+	Markup NotebookCellKind = 1
+	/**
+	 * A code-cell is source code.
+	 */
+
+	Code NotebookCellKind = 2
+	/**
 	 * Supports creating new files and folders.
 	 */
 
@@ -5375,75 +6432,9 @@
 	InitializeParams
 	WorkDoneProgressParams
 }
-type Workspace2Gn struct {
-	/**
-	 * The client supports applying batch edits
-	 * to the workspace by supporting the request
-	 * 'workspace/applyEdit'
-	 */
-	ApplyEdit bool `json:"applyEdit,omitempty"`
-
-	/**
-	 * Capabilities specific to `WorkspaceEdit`s
-	 */
-	WorkspaceEdit *WorkspaceEditClientCapabilities `json:"workspaceEdit,omitempty"`
-
-	/**
-	 * Capabilities specific to the `workspace/didChangeConfiguration` notification.
-	 */
-	DidChangeConfiguration DidChangeConfigurationClientCapabilities `json:"didChangeConfiguration,omitempty"`
-
-	/**
-	 * Capabilities specific to the `workspace/didChangeWatchedFiles` notification.
-	 */
-	DidChangeWatchedFiles DidChangeWatchedFilesClientCapabilities `json:"didChangeWatchedFiles,omitempty"`
-
-	/**
-	 * Capabilities specific to the `workspace/symbol` request.
-	 */
-	Symbol *WorkspaceSymbolClientCapabilities `json:"symbol,omitempty"`
-
-	/**
-	 * Capabilities specific to the `workspace/executeCommand` request.
-	 */
-	ExecuteCommand ExecuteCommandClientCapabilities `json:"executeCommand,omitempty"`
-
-	/**
-	 * Capabilities specific to the semantic token requests scoped to the
-	 * workspace.
-	 *
-	 * @since 3.16.0.
-	 */
-	SemanticTokens SemanticTokensWorkspaceClientCapabilities `json:"semanticTokens,omitempty"`
-
-	/**
-	 * Capabilities specific to the code lens requests scoped to the
-	 * workspace.
-	 *
-	 * @since 3.16.0.
-	 */
-	CodeLens CodeLensWorkspaceClientCapabilities `json:"codeLens,omitempty"`
-
-	/**
-	 * The client has support for file notifications/requests for user operations on files.
-	 *
-	 * Since 3.16.0
-	 */
-	FileOperations *FileOperationClientCapabilities `json:"fileOperations,omitempty"`
-
-	/**
-	 * The client has support for workspace folders
-	 *
-	 * @since 3.6.0
-	 */
-	WorkspaceFolders bool `json:"workspaceFolders,omitempty"`
-
-	/**
-	 * The client supports `workspace/configuration` requests.
-	 *
-	 * @since 3.6.0
-	 */
-	Configuration bool `json:"configuration,omitempty"`
+type PrepareRename2Gn struct {
+	Range       Range  `json:"range"`
+	Placeholder string `json:"placeholder"`
 }
 type Workspace3Gn struct {
 	/**
@@ -5502,6 +6493,22 @@
 	FileOperations *FileOperationClientCapabilities `json:"fileOperations,omitempty"`
 
 	/**
+	 * Capabilities specific to the inline values requests scoped to the
+	 * workspace.
+	 *
+	 * @since 3.17.0.
+	 */
+	InlineValue InlineValueWorkspaceClientCapabilities `json:"inlineValue,omitempty"`
+
+	/**
+	 * Capabilities specific to the inlay hints requests scoped to the
+	 * workspace.
+	 *
+	 * @since 3.17.0.
+	 */
+	InlayHint InlayHintWorkspaceClientCapabilities `json:"inlayHint,omitempty"`
+
+	/**
 	 * The client has support for workspace folders
 	 *
 	 * @since 3.6.0
@@ -5515,34 +6522,7 @@
 	 */
 	Configuration bool `json:"configuration,omitempty"`
 }
-type WorkspaceFolders4Gn struct {
-	/**
-	 * The Server has support for workspace folders
-	 */
-	Supported bool `json:"supported,omitempty"`
-
-	/**
-	 * Whether the server wants to receive workspace folder
-	 * change notifications.
-	 *
-	 * If a strings is provided the string is treated as a ID
-	 * under which the notification is registered on the client
-	 * side. The ID can be used to unregister for these events
-	 * using the `client/unregisterCapability` request.
-	 */
-	ChangeNotifications string/*string | boolean*/ `json:"changeNotifications,omitempty"`
-}
-type Workspace5Gn struct {
-	/**
-	* The server is interested in notifications/requests for operations on files.
-	*
-	* @since 3.16.0
-	 */
-	FileOperations *FileOperationOptions `json:"fileOperations,omitempty"`
-
-	WorkspaceFolders WorkspaceFolders4Gn `json:"workspaceFolders,omitempty"`
-}
-type Workspace6Gn struct {
+type Workspace4Gn struct {
 	/**
 	 * The client supports applying batch edits
 	 * to the workspace by supporting the request
@@ -5599,6 +6579,22 @@
 	FileOperations *FileOperationClientCapabilities `json:"fileOperations,omitempty"`
 
 	/**
+	 * Capabilities specific to the inline values requests scoped to the
+	 * workspace.
+	 *
+	 * @since 3.17.0.
+	 */
+	InlineValue InlineValueWorkspaceClientCapabilities `json:"inlineValue,omitempty"`
+
+	/**
+	 * Capabilities specific to the inlay hints requests scoped to the
+	 * workspace.
+	 *
+	 * @since 3.17.0.
+	 */
+	InlayHint InlayHintWorkspaceClientCapabilities `json:"inlayHint,omitempty"`
+
+	/**
 	 * The client has support for workspace folders
 	 *
 	 * @since 3.6.0
@@ -5612,7 +6608,7 @@
 	 */
 	Configuration bool `json:"configuration,omitempty"`
 }
-type WorkspaceFolders7Gn struct {
+type WorkspaceFolders5Gn struct {
 	/**
 	 * The Server has support for workspace folders
 	 */
@@ -5629,7 +6625,7 @@
 	 */
 	ChangeNotifications string/*string | boolean*/ `json:"changeNotifications,omitempty"`
 }
-type Workspace8Gn struct {
+type Workspace6Gn struct {
 	/**
 	* The server is interested in notifications/requests for operations on files.
 	*
@@ -5637,5 +6633,118 @@
 	 */
 	FileOperations *FileOperationOptions `json:"fileOperations,omitempty"`
 
-	WorkspaceFolders WorkspaceFolders7Gn `json:"workspaceFolders,omitempty"`
+	WorkspaceFolders WorkspaceFolders5Gn `json:"workspaceFolders,omitempty"`
+}
+type Workspace7Gn struct {
+	/**
+	 * The client supports applying batch edits
+	 * to the workspace by supporting the request
+	 * 'workspace/applyEdit'
+	 */
+	ApplyEdit bool `json:"applyEdit,omitempty"`
+
+	/**
+	 * Capabilities specific to `WorkspaceEdit`s
+	 */
+	WorkspaceEdit *WorkspaceEditClientCapabilities `json:"workspaceEdit,omitempty"`
+
+	/**
+	 * Capabilities specific to the `workspace/didChangeConfiguration` notification.
+	 */
+	DidChangeConfiguration DidChangeConfigurationClientCapabilities `json:"didChangeConfiguration,omitempty"`
+
+	/**
+	 * Capabilities specific to the `workspace/didChangeWatchedFiles` notification.
+	 */
+	DidChangeWatchedFiles DidChangeWatchedFilesClientCapabilities `json:"didChangeWatchedFiles,omitempty"`
+
+	/**
+	 * Capabilities specific to the `workspace/symbol` request.
+	 */
+	Symbol *WorkspaceSymbolClientCapabilities `json:"symbol,omitempty"`
+
+	/**
+	 * Capabilities specific to the `workspace/executeCommand` request.
+	 */
+	ExecuteCommand ExecuteCommandClientCapabilities `json:"executeCommand,omitempty"`
+
+	/**
+	 * Capabilities specific to the semantic token requests scoped to the
+	 * workspace.
+	 *
+	 * @since 3.16.0.
+	 */
+	SemanticTokens SemanticTokensWorkspaceClientCapabilities `json:"semanticTokens,omitempty"`
+
+	/**
+	 * Capabilities specific to the code lens requests scoped to the
+	 * workspace.
+	 *
+	 * @since 3.16.0.
+	 */
+	CodeLens CodeLensWorkspaceClientCapabilities `json:"codeLens,omitempty"`
+
+	/**
+	 * The client has support for file notifications/requests for user operations on files.
+	 *
+	 * Since 3.16.0
+	 */
+	FileOperations *FileOperationClientCapabilities `json:"fileOperations,omitempty"`
+
+	/**
+	 * Capabilities specific to the inline values requests scoped to the
+	 * workspace.
+	 *
+	 * @since 3.17.0.
+	 */
+	InlineValue InlineValueWorkspaceClientCapabilities `json:"inlineValue,omitempty"`
+
+	/**
+	 * Capabilities specific to the inlay hints requests scoped to the
+	 * workspace.
+	 *
+	 * @since 3.17.0.
+	 */
+	InlayHint InlayHintWorkspaceClientCapabilities `json:"inlayHint,omitempty"`
+
+	/**
+	 * The client has support for workspace folders
+	 *
+	 * @since 3.6.0
+	 */
+	WorkspaceFolders bool `json:"workspaceFolders,omitempty"`
+
+	/**
+	 * The client supports `workspace/configuration` requests.
+	 *
+	 * @since 3.6.0
+	 */
+	Configuration bool `json:"configuration,omitempty"`
+}
+type WorkspaceFolders8Gn struct {
+	/**
+	 * The Server has support for workspace folders
+	 */
+	Supported bool `json:"supported,omitempty"`
+
+	/**
+	 * Whether the server wants to receive workspace folder
+	 * change notifications.
+	 *
+	 * If a strings is provided the string is treated as a ID
+	 * under which the notification is registered on the client
+	 * side. The ID can be used to unregister for these events
+	 * using the `client/unregisterCapability` request.
+	 */
+	ChangeNotifications string/*string | boolean*/ `json:"changeNotifications,omitempty"`
+}
+type Workspace9Gn struct {
+	/**
+	* The server is interested in notifications/requests for operations on files.
+	*
+	* @since 3.16.0
+	 */
+	FileOperations *FileOperationOptions `json:"fileOperations,omitempty"`
+
+	WorkspaceFolders WorkspaceFolders8Gn `json:"workspaceFolders,omitempty"`
 }
diff --git a/internal/lsp/protocol/tsserver.go b/internal/lsp/protocol/tsserver.go
index b274eb1..db345b3 100644
--- a/internal/lsp/protocol/tsserver.go
+++ b/internal/lsp/protocol/tsserver.go
@@ -2,14 +2,14 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
+// Code generated (see typescript/README.md) DO NOT EDIT.
+
 package protocol
 
-// Package protocol contains data types and code for LSP jsonrpcs
+// Package protocol contains data types and code for LSP json rpcs
 // generated automatically from vscode-languageserver-node
-// commit: 0cb3812e7d540ef3a904e96df795bc37a21de9b0
-// last fetched Mon Aug 02 2021 10:08:19 GMT-0400 (Eastern Daylight Time)
-
-// Code generated (see typescript/README.md) DO NOT EDIT.
+// commit: 696f9285bf849b73745682fdb1c1feac73eb8772
+// last fetched Fri Mar 04 2022 14:48:10 GMT-0500 (Eastern Standard Time)
 
 import (
 	"context"
@@ -34,6 +34,10 @@
 	DidSave(context.Context, *DidSaveTextDocumentParams) error
 	WillSave(context.Context, *WillSaveTextDocumentParams) error
 	DidChangeWatchedFiles(context.Context, *DidChangeWatchedFilesParams) error
+	DidOpenNotebookDocument(context.Context, *DidOpenNotebookDocumentParams) error
+	DidChangeNotebookDocument(context.Context, *DidChangeNotebookDocumentParams) error
+	DidSaveNotebookDocument(context.Context, *DidSaveNotebookDocumentParams) error
+	DidCloseNotebookDocument(context.Context, *DidCloseNotebookDocumentParams) error
 	SetTrace(context.Context, *SetTraceParams) error
 	LogTrace(context.Context, *LogTraceParams) error
 	Implementation(context.Context, *ImplementationParams) (Definition /*Definition | DefinitionLink[] | null*/, error)
@@ -55,11 +59,19 @@
 	WillRenameFiles(context.Context, *RenameFilesParams) (*WorkspaceEdit /*WorkspaceEdit | null*/, error)
 	WillDeleteFiles(context.Context, *DeleteFilesParams) (*WorkspaceEdit /*WorkspaceEdit | null*/, error)
 	Moniker(context.Context, *MonikerParams) ([]Moniker /*Moniker[] | null*/, error)
+	PrepareTypeHierarchy(context.Context, *TypeHierarchyPrepareParams) ([]TypeHierarchyItem /*TypeHierarchyItem[] | null*/, error)
+	Supertypes(context.Context, *TypeHierarchySupertypesParams) ([]TypeHierarchyItem /*TypeHierarchyItem[] | null*/, error)
+	Subtypes(context.Context, *TypeHierarchySubtypesParams) ([]TypeHierarchyItem /*TypeHierarchyItem[] | null*/, error)
+	InlineValue(context.Context, *InlineValueParams) ([]InlineValue /*InlineValue[] | null*/, error)
+	InlineValueRefresh(context.Context) error
+	InlayHint(context.Context, *InlayHintParams) ([]InlayHint /*InlayHint[] | null*/, error)
+	Resolve(context.Context, *InlayHint) (*InlayHint, error)
+	InlayHintRefresh(context.Context) error
 	Initialize(context.Context, *ParamInitialize) (*InitializeResult, error)
 	Shutdown(context.Context) error
 	WillSaveWaitUntil(context.Context, *WillSaveTextDocumentParams) ([]TextEdit /*TextEdit[] | null*/, error)
 	Completion(context.Context, *CompletionParams) (*CompletionList /*CompletionItem[] | CompletionList | null*/, error)
-	Resolve(context.Context, *CompletionItem) (*CompletionItem, error)
+	ResolveCompletionItem(context.Context, *CompletionItem) (*CompletionItem, error)
 	Hover(context.Context, *HoverParams) (*Hover /*Hover | null*/, error)
 	SignatureHelp(context.Context, *SignatureHelpParams) (*SignatureHelp /*SignatureHelp | null*/, error)
 	Definition(context.Context, *DefinitionParams) (Definition /*Definition | DefinitionLink[] | null*/, error)
@@ -68,7 +80,8 @@
 	DocumentSymbol(context.Context, *DocumentSymbolParams) ([]interface{} /*SymbolInformation[] | DocumentSymbol[] | null*/, error)
 	CodeAction(context.Context, *CodeActionParams) ([]CodeAction /*(Command | CodeAction)[] | null*/, error)
 	ResolveCodeAction(context.Context, *CodeAction) (*CodeAction, error)
-	Symbol(context.Context, *WorkspaceSymbolParams) ([]SymbolInformation /*SymbolInformation[] | null*/, error)
+	Symbol(context.Context, *WorkspaceSymbolParams) ([]SymbolInformation /*SymbolInformation[] | WorkspaceSymbol[] | null*/, error)
+	ResolveWorkspaceSymbol(context.Context, *WorkspaceSymbol) (*WorkspaceSymbol, error)
 	CodeLens(context.Context, *CodeLensParams) ([]CodeLens /*CodeLens[] | null*/, error)
 	ResolveCodeLens(context.Context, *CodeLens) (*CodeLens, error)
 	CodeLensRefresh(context.Context) error
@@ -78,8 +91,8 @@
 	RangeFormatting(context.Context, *DocumentRangeFormattingParams) ([]TextEdit /*TextEdit[] | null*/, error)
 	OnTypeFormatting(context.Context, *DocumentOnTypeFormattingParams) ([]TextEdit /*TextEdit[] | null*/, error)
 	Rename(context.Context, *RenameParams) (*WorkspaceEdit /*WorkspaceEdit | null*/, error)
-	PrepareRename(context.Context, *PrepareRenameParams) (*Range /*Range | { range: Range, placeholder: string } | { defaultBehavior: boolean } | null*/, error)
-	ExecuteCommand(context.Context, *ExecuteCommandParams) (interface{} /*any | null*/, error)
+	PrepareRename(context.Context, *PrepareRenameParams) (*PrepareRename2Gn /*Range | { range: Range; placeholder: string } | { defaultBehavior: boolean } | null*/, error)
+	ExecuteCommand(context.Context, *ExecuteCommandParams) (interface{} /* LSPAny | void | float64*/, error)
 	Diagnostic(context.Context, *string) (*string, error)
 	DiagnosticWorkspace(context.Context, *WorkspaceDiagnosticParams) (*WorkspaceDiagnosticReport, error)
 	DiagnosticRefresh(context.Context) error
@@ -182,6 +195,34 @@
 		}
 		err := server.DidChangeWatchedFiles(ctx, &params)
 		return true, reply(ctx, nil, err)
+	case "notebookDocument/didOpen": // notif
+		var params DidOpenNotebookDocumentParams
+		if err := json.Unmarshal(r.Params(), &params); err != nil {
+			return true, sendParseError(ctx, reply, err)
+		}
+		err := server.DidOpenNotebookDocument(ctx, &params)
+		return true, reply(ctx, nil, err)
+	case "notebookDocument/didChange": // notif
+		var params DidChangeNotebookDocumentParams
+		if err := json.Unmarshal(r.Params(), &params); err != nil {
+			return true, sendParseError(ctx, reply, err)
+		}
+		err := server.DidChangeNotebookDocument(ctx, &params)
+		return true, reply(ctx, nil, err)
+	case "notebookDocument/didSave": // notif
+		var params DidSaveNotebookDocumentParams
+		if err := json.Unmarshal(r.Params(), &params); err != nil {
+			return true, sendParseError(ctx, reply, err)
+		}
+		err := server.DidSaveNotebookDocument(ctx, &params)
+		return true, reply(ctx, nil, err)
+	case "notebookDocument/didClose": // notif
+		var params DidCloseNotebookDocumentParams
+		if err := json.Unmarshal(r.Params(), &params); err != nil {
+			return true, sendParseError(ctx, reply, err)
+		}
+		err := server.DidCloseNotebookDocument(ctx, &params)
+		return true, reply(ctx, nil, err)
 	case "$/setTrace": // notif
 		var params SetTraceParams
 		if err := json.Unmarshal(r.Params(), &params); err != nil {
@@ -328,6 +369,60 @@
 		}
 		resp, err := server.Moniker(ctx, &params)
 		return true, reply(ctx, resp, err)
+	case "textDocument/prepareTypeHierarchy": // req
+		var params TypeHierarchyPrepareParams
+		if err := json.Unmarshal(r.Params(), &params); err != nil {
+			return true, sendParseError(ctx, reply, err)
+		}
+		resp, err := server.PrepareTypeHierarchy(ctx, &params)
+		return true, reply(ctx, resp, err)
+	case "typeHierarchy/supertypes": // req
+		var params TypeHierarchySupertypesParams
+		if err := json.Unmarshal(r.Params(), &params); err != nil {
+			return true, sendParseError(ctx, reply, err)
+		}
+		resp, err := server.Supertypes(ctx, &params)
+		return true, reply(ctx, resp, err)
+	case "typeHierarchy/subtypes": // req
+		var params TypeHierarchySubtypesParams
+		if err := json.Unmarshal(r.Params(), &params); err != nil {
+			return true, sendParseError(ctx, reply, err)
+		}
+		resp, err := server.Subtypes(ctx, &params)
+		return true, reply(ctx, resp, err)
+	case "textDocument/inlineValue": // req
+		var params InlineValueParams
+		if err := json.Unmarshal(r.Params(), &params); err != nil {
+			return true, sendParseError(ctx, reply, err)
+		}
+		resp, err := server.InlineValue(ctx, &params)
+		return true, reply(ctx, resp, err)
+	case "workspace/inlineValue/refresh": // req
+		if len(r.Params()) > 0 {
+			return true, reply(ctx, nil, errors.Errorf("%w: expected no params", jsonrpc2.ErrInvalidParams))
+		}
+		err := server.InlineValueRefresh(ctx)
+		return true, reply(ctx, nil, err)
+	case "textDocument/inlayHint": // req
+		var params InlayHintParams
+		if err := json.Unmarshal(r.Params(), &params); err != nil {
+			return true, sendParseError(ctx, reply, err)
+		}
+		resp, err := server.InlayHint(ctx, &params)
+		return true, reply(ctx, resp, err)
+	case "inlayHint/resolve": // req
+		var params InlayHint
+		if err := json.Unmarshal(r.Params(), &params); err != nil {
+			return true, sendParseError(ctx, reply, err)
+		}
+		resp, err := server.Resolve(ctx, &params)
+		return true, reply(ctx, resp, err)
+	case "workspace/inlayHint/refresh": // req
+		if len(r.Params()) > 0 {
+			return true, reply(ctx, nil, errors.Errorf("%w: expected no params", jsonrpc2.ErrInvalidParams))
+		}
+		err := server.InlayHintRefresh(ctx)
+		return true, reply(ctx, nil, err)
 	case "initialize": // req
 		var params ParamInitialize
 		if err := json.Unmarshal(r.Params(), &params); err != nil {
@@ -362,7 +457,7 @@
 		if err := json.Unmarshal(r.Params(), &params); err != nil {
 			return true, sendParseError(ctx, reply, err)
 		}
-		resp, err := server.Resolve(ctx, &params)
+		resp, err := server.ResolveCompletionItem(ctx, &params)
 		return true, reply(ctx, resp, err)
 	case "textDocument/hover": // req
 		var params HoverParams
@@ -427,6 +522,13 @@
 		}
 		resp, err := server.Symbol(ctx, &params)
 		return true, reply(ctx, resp, err)
+	case "workspaceSymbol/resolve": // req
+		var params WorkspaceSymbol
+		if err := json.Unmarshal(r.Params(), &params); err != nil {
+			return true, sendParseError(ctx, reply, err)
+		}
+		resp, err := server.ResolveWorkspaceSymbol(ctx, &params)
+		return true, reply(ctx, resp, err)
 	case "textDocument/codeLens": // req
 		var params CodeLensParams
 		if err := json.Unmarshal(r.Params(), &params); err != nil {
@@ -585,6 +687,22 @@
 	return s.sender.Notify(ctx, "workspace/didChangeWatchedFiles", params)
 }
 
+func (s *serverDispatcher) DidOpenNotebookDocument(ctx context.Context, params *DidOpenNotebookDocumentParams) error {
+	return s.sender.Notify(ctx, "notebookDocument/didOpen", params)
+}
+
+func (s *serverDispatcher) DidChangeNotebookDocument(ctx context.Context, params *DidChangeNotebookDocumentParams) error {
+	return s.sender.Notify(ctx, "notebookDocument/didChange", params)
+}
+
+func (s *serverDispatcher) DidSaveNotebookDocument(ctx context.Context, params *DidSaveNotebookDocumentParams) error {
+	return s.sender.Notify(ctx, "notebookDocument/didSave", params)
+}
+
+func (s *serverDispatcher) DidCloseNotebookDocument(ctx context.Context, params *DidCloseNotebookDocumentParams) error {
+	return s.sender.Notify(ctx, "notebookDocument/didClose", params)
+}
+
 func (s *serverDispatcher) SetTrace(ctx context.Context, params *SetTraceParams) error {
 	return s.sender.Notify(ctx, "$/setTrace", params)
 }
@@ -740,6 +858,62 @@
 	return result, nil
 }
 
+func (s *serverDispatcher) PrepareTypeHierarchy(ctx context.Context, params *TypeHierarchyPrepareParams) ([]TypeHierarchyItem /*TypeHierarchyItem[] | null*/, error) {
+	var result []TypeHierarchyItem /*TypeHierarchyItem[] | null*/
+	if err := s.sender.Call(ctx, "textDocument/prepareTypeHierarchy", params, &result); err != nil {
+		return nil, err
+	}
+	return result, nil
+}
+
+func (s *serverDispatcher) Supertypes(ctx context.Context, params *TypeHierarchySupertypesParams) ([]TypeHierarchyItem /*TypeHierarchyItem[] | null*/, error) {
+	var result []TypeHierarchyItem /*TypeHierarchyItem[] | null*/
+	if err := s.sender.Call(ctx, "typeHierarchy/supertypes", params, &result); err != nil {
+		return nil, err
+	}
+	return result, nil
+}
+
+func (s *serverDispatcher) Subtypes(ctx context.Context, params *TypeHierarchySubtypesParams) ([]TypeHierarchyItem /*TypeHierarchyItem[] | null*/, error) {
+	var result []TypeHierarchyItem /*TypeHierarchyItem[] | null*/
+	if err := s.sender.Call(ctx, "typeHierarchy/subtypes", params, &result); err != nil {
+		return nil, err
+	}
+	return result, nil
+}
+
+func (s *serverDispatcher) InlineValue(ctx context.Context, params *InlineValueParams) ([]InlineValue /*InlineValue[] | null*/, error) {
+	var result []InlineValue /*InlineValue[] | null*/
+	if err := s.sender.Call(ctx, "textDocument/inlineValue", params, &result); err != nil {
+		return nil, err
+	}
+	return result, nil
+}
+
+func (s *serverDispatcher) InlineValueRefresh(ctx context.Context) error {
+	return s.sender.Call(ctx, "workspace/inlineValue/refresh", nil, nil)
+}
+
+func (s *serverDispatcher) InlayHint(ctx context.Context, params *InlayHintParams) ([]InlayHint /*InlayHint[] | null*/, error) {
+	var result []InlayHint /*InlayHint[] | null*/
+	if err := s.sender.Call(ctx, "textDocument/inlayHint", params, &result); err != nil {
+		return nil, err
+	}
+	return result, nil
+}
+
+func (s *serverDispatcher) Resolve(ctx context.Context, params *InlayHint) (*InlayHint, error) {
+	var result *InlayHint
+	if err := s.sender.Call(ctx, "inlayHint/resolve", params, &result); err != nil {
+		return nil, err
+	}
+	return result, nil
+}
+
+func (s *serverDispatcher) InlayHintRefresh(ctx context.Context) error {
+	return s.sender.Call(ctx, "workspace/inlayHint/refresh", nil, nil)
+}
+
 func (s *serverDispatcher) Initialize(ctx context.Context, params *ParamInitialize) (*InitializeResult, error) {
 	var result *InitializeResult
 	if err := s.sender.Call(ctx, "initialize", params, &result); err != nil {
@@ -768,7 +942,7 @@
 	return result, nil
 }
 
-func (s *serverDispatcher) Resolve(ctx context.Context, params *CompletionItem) (*CompletionItem, error) {
+func (s *serverDispatcher) ResolveCompletionItem(ctx context.Context, params *CompletionItem) (*CompletionItem, error) {
 	var result *CompletionItem
 	if err := s.sender.Call(ctx, "completionItem/resolve", params, &result); err != nil {
 		return nil, err
@@ -840,14 +1014,22 @@
 	return result, nil
 }
 
-func (s *serverDispatcher) Symbol(ctx context.Context, params *WorkspaceSymbolParams) ([]SymbolInformation /*SymbolInformation[] | null*/, error) {
-	var result []SymbolInformation /*SymbolInformation[] | null*/
+func (s *serverDispatcher) Symbol(ctx context.Context, params *WorkspaceSymbolParams) ([]SymbolInformation /*SymbolInformation[] | WorkspaceSymbol[] | null*/, error) {
+	var result []SymbolInformation /*SymbolInformation[] | WorkspaceSymbol[] | null*/
 	if err := s.sender.Call(ctx, "workspace/symbol", params, &result); err != nil {
 		return nil, err
 	}
 	return result, nil
 }
 
+func (s *serverDispatcher) ResolveWorkspaceSymbol(ctx context.Context, params *WorkspaceSymbol) (*WorkspaceSymbol, error) {
+	var result *WorkspaceSymbol
+	if err := s.sender.Call(ctx, "workspaceSymbol/resolve", params, &result); err != nil {
+		return nil, err
+	}
+	return result, nil
+}
+
 func (s *serverDispatcher) CodeLens(ctx context.Context, params *CodeLensParams) ([]CodeLens /*CodeLens[] | null*/, error) {
 	var result []CodeLens /*CodeLens[] | null*/
 	if err := s.sender.Call(ctx, "textDocument/codeLens", params, &result); err != nil {
@@ -916,16 +1098,16 @@
 	return result, nil
 }
 
-func (s *serverDispatcher) PrepareRename(ctx context.Context, params *PrepareRenameParams) (*Range /*Range | { range: Range, placeholder: string } | { defaultBehavior: boolean } | null*/, error) {
-	var result *Range /*Range | { range: Range, placeholder: string } | { defaultBehavior: boolean } | null*/
+func (s *serverDispatcher) PrepareRename(ctx context.Context, params *PrepareRenameParams) (*PrepareRename2Gn /*Range | { range: Range; placeholder: string } | { defaultBehavior: boolean } | null*/, error) {
+	var result *PrepareRename2Gn /*Range | { range: Range; placeholder: string } | { defaultBehavior: boolean } | null*/
 	if err := s.sender.Call(ctx, "textDocument/prepareRename", params, &result); err != nil {
 		return nil, err
 	}
 	return result, nil
 }
 
-func (s *serverDispatcher) ExecuteCommand(ctx context.Context, params *ExecuteCommandParams) (interface{} /*any | null*/, error) {
-	var result interface{} /*any | null*/
+func (s *serverDispatcher) ExecuteCommand(ctx context.Context, params *ExecuteCommandParams) (interface{} /* LSPAny | void | float64*/, error) {
+	var result interface{} /* LSPAny | void | float64*/
 	if err := s.sender.Call(ctx, "workspace/executeCommand", params, &result); err != nil {
 		return nil, err
 	}
diff --git a/internal/lsp/protocol/typescript/README.md b/internal/lsp/protocol/typescript/README.md
index 61ba187..74bcd18 100644
--- a/internal/lsp/protocol/typescript/README.md
+++ b/internal/lsp/protocol/typescript/README.md
@@ -48,7 +48,7 @@
 3. (There's a good chance that soon you will be asked to upgrade your new npm. `sudo npm install -g npm` is the command.)
 4. For either system, node and nvm should now be available. Running `node -v` and `npm -v` should produce version numbers.
 5. `npm install typescript`
-    1. This will likely give warning messages that indicate you've failed to set up a project. Ignore them.
+    1. This may give warning messages that indicate you've failed to set up a project. Ignore them.
     2. Your home directory will now have new directories `.npm` and `node_modules` (and a `package_lock.json` file)
     3. The typescript executable `tsc` will be in `node_modules/.bin`, so put that directory in your path.
     4. `tsc -v` should print "Version 4.2.4" (or later). If not you may (as I did) have an obsolete tsc earlier in your path.
diff --git a/internal/lsp/protocol/typescript/code.ts b/internal/lsp/protocol/typescript/code.ts
index a262058..dcb1b67 100644
--- a/internal/lsp/protocol/typescript/code.ts
+++ b/internal/lsp/protocol/typescript/code.ts
@@ -532,6 +532,7 @@
 function sameType(a: ts.TypeNode, b: ts.TypeNode): boolean {
   if (a.kind !== b.kind) return false;
   if (a.kind === ts.SyntaxKind.BooleanKeyword) return true;
+  if (a.kind === ts.SyntaxKind.StringKeyword) return true;
   if (ts.isTypeReferenceNode(a) && ts.isTypeReferenceNode(b) &&
     a.typeName.getText() === b.typeName.getText()) return true;
   if (ts.isArrayTypeNode(a) && ts.isArrayTypeNode(b)) return sameType(a.elementType, b.elementType);
@@ -540,7 +541,7 @@
     if (a.members.length === 1) return a.members[0].name.getText() === b.members[0].name.getText();
     if (loc(a) === loc(b)) return true;
   }
-  throw new Error(`546 sameType? ${strKind(a)} ${strKind(b)}`);
+  throw new Error(`544 sameType? ${strKind(a)} ${strKind(b)} ${a.getText()}`);
 }
 type CreateMutable<Type> = {
   -readonly [Property in keyof Type]: Type[Property];
@@ -595,12 +596,17 @@
       addToProperties(pm, ps.type, name);
     });
   } else if (strKind(tn) === 'TypeLiteral') {
-    if (!ts.isTypeLiteralNode(tn)) new Error(`598 ${strKind(tn)}`);
+    if (!ts.isTypeLiteralNode(tn)) new Error(`599 ${strKind(tn)}`);
     tn.forEachChild((child: ts.Node) => {
-      if (!ts.isPropertySignature(child)) throw new Error(`600 ${strKind(child)}`);
-      const name = `${prefix}.${child.name.getText()}`;
-      propMapSet(pm, name, child);
-      addToProperties(pm, child.type, name);
+      if (ts.isPropertySignature(child)) {
+        const name = `${prefix}.${child.name.getText()}`;
+        propMapSet(pm, name, child);
+        addToProperties(pm, child.type, name);
+      } else if (!ts.isIndexSignatureDeclaration(child)) {
+        // ignoring IndexSignatures, seen as relatedDocument in
+        // RelatedFullDocumentDiagnosticReport
+        throw new Error(`608 ${strKind(child)} ${loc(child)}`);
+      }
     });
   }
 }
@@ -721,6 +727,7 @@
   const f = function (n: ts.ExpressionWithTypeArguments) {
     if (!ts.isIdentifier(n.expression))
       throw new Error(`Interface ${nm} heritage ${strKind(n.expression)} `);
+    if (n.expression.getText() === 'Omit') return;  // Type modification type
     ans = ans.concat(goName(n.expression.getText()), '\n');
   };
   d.as.forEach((n: ts.HeritageClause) => n.types.forEach(f));
@@ -874,9 +881,8 @@
       if (a == 'NumberKeyword' && b == 'StringKeyword') {  // ID
         return `interface{} ${help}`;
       }
-      if (b == 'NullKeyword' || n.types[1].getText() === 'null') {
-        // PJW: fix this. it looks like 'null' is now being parsed as LiteralType
-        // and check the other keyword cases
+      // for null, b is not useful (LiternalType)
+      if (n.types[1].getText() === 'null') {
         if (nm == 'textDocument/codeAction') {
           // (Command | CodeAction)[] | null
           return `[]CodeAction ${help}`;
@@ -896,9 +902,11 @@
         return `*TextEdit ${help}`;
       }
       if (a == 'TypeReference') {
-        if (nm == 'edits') return `${goType(n.types[0], '715')} ${help}`;
+        if (nm == 'edits') return `${goType(n.types[0], '901')} ${help}`;
         if (a == b) return `interface{} ${help}`;
         if (nm == 'code') return `interface{} ${help}`;
+        if (nm == 'editRange') return `${goType(n.types[0], '904')} ${help}`;
+        if (nm === 'location') return `${goType(n.types[0], '905')} ${help}`;
       }
       if (a == 'StringKeyword') return `string ${help}`;
       if (a == 'TypeLiteral' && nm == 'TextDocumentContentChangeEvent') {
@@ -915,7 +923,8 @@
       const aa = strKind(n.types[0]);
       const bb = strKind(n.types[1]);
       const cc = strKind(n.types[2]);
-      if (nm == 'DocumentFilter') {
+      if (nm === 'workspace/symbol') return `${goType(n.types[0], '930')} ${help}`;
+      if (nm == 'DocumentFilter' || nm == 'NotebookDocumentFilter' || nm == 'TextDocumentFilter') {
         // not really a union. the first is enough, up to a missing
         // omitempty but avoid repetitious comments
         return `${goType(n.types[0], 'g')}`;
@@ -941,10 +950,18 @@
     }
     case 4:
       if (nm == 'documentChanges') return `TextDocumentEdit ${help} `;
-      if (nm == 'textDocument/prepareRename') return `Range ${help} `;
-    // eslint-disable-next-line no-fallthrough
+      if (nm == 'textDocument/prepareRename') {
+        // these names have to be made unique
+        const genName = `${goName("prepareRename")}${extraTypes.size}Gn`;
+        extraTypes.set(genName, [`Range       Range  \`json:"range"\`
+          Placeholder string \`json:"placeholder"\``]);
+        return `${genName} ${help} `;
+      }
+      break;
+    case 8: // LSPany
+      break;
     default:
-      throw new Error(`goUnionType len=${n.types.length} nm=${nm}`);
+      throw new Error(`957 goUnionType len=${n.types.length} nm=${nm} ${n.getText()}`);
   }
 
   // Result will be interface{} with a comment
@@ -1048,7 +1065,7 @@
     case 'TypeReference': {
       if (!ts.isTypeReferenceNode(te)) throw new Error(`1047 impossible ${strKind(te)}`);
       const d = seenTypes.get(goName(te.typeName.getText()));
-      if (d === undefined) return false;
+      if (d === undefined || d.properties.length == 0) return false;
       if (d.properties.length > 1) return true;
       // alias or interface with a single property (The alias is Uinteger, which we ignore later)
       if (d.alias) return false;
@@ -1067,6 +1084,10 @@
     if (ts.isPropertySignature(nx)) {
       let json = u.JSON(nx);
       let typ = goType(nx.type, nx.name.getText());
+      // }/*\n*/`json:v` is not legal, the comment is a newline
+      if (typ.includes('\n') && typ.indexOf('*/') === typ.length - 2) {
+        typ = typ.replace(/\n\t*/g, ' ');
+      }
       const v = getComments(nx) || '';
       starred.forEach(([a, b]) => {
         if (a != nm || b != typ.toLowerCase()) return;
@@ -1080,12 +1101,16 @@
       const comment = nx.getText().replace(/[/]/g, '');
       if (nx.getText() == '[uri: string]: TextEdit[];') {
         res = 'map[string][]TextEdit';
-      } else if (nx.getText() == '[id: string /* ChangeAnnotationIdentifier */]: ChangeAnnotation;') {
+      } else if (nx.getText().startsWith('[id: ChangeAnnotationIdentifier]')) {
         res = 'map[string]ChangeAnnotationIdentifier';
       } else if (nx.getText().startsWith('[uri: string')) {
         res = 'map[string]interface{}';
+      } else if (nx.getText().startsWith('[uri: DocumentUri')) {
+        res = 'map[DocumentURI][]TextEdit';
+      } else if (nx.getText().startsWith('[key: string')) {
+        res = 'map[string]interface{}';
       } else {
-        throw new Error(`1088 handle ${nx.getText()} ${loc(nx)}`);
+        throw new Error(`1100 handle ${nx.getText()} ${loc(nx)}`);
       }
       res += ` /*${comment}*/`;
       ans.push(res);
diff --git a/internal/lsp/protocol/typescript/util.ts b/internal/lsp/protocol/typescript/util.ts
index a32aab0..9475b26 100644
--- a/internal/lsp/protocol/typescript/util.ts
+++ b/internal/lsp/protocol/typescript/util.ts
@@ -15,7 +15,7 @@
   `${dir}/${srcDir}/protocol/src/browser/main.ts`, `${dir}${srcDir}/types/src/main.ts`,
   `${dir}${srcDir}/jsonrpc/src/node/main.ts`
 ];
-export const gitHash = '0cb3812e7d540ef3a904e96df795bc37a21de9b0';
+export const gitHash = '696f9285bf849b73745682fdb1c1feac73eb8772';
 let outFname = 'tsprotocol.go';
 let fda: number, fdb: number, fde: number;  // file descriptors
 
@@ -72,17 +72,17 @@
 
   `;
   const a =
-    '// Package protocol contains data types and code for LSP jsonrpcs\n' +
+    '// Package protocol contains data types and code for LSP json rpcs\n' +
     '// generated automatically from vscode-languageserver-node\n' +
     `// commit: ${gitHash}\n` +
     `// last fetched ${lastDate}\n`;
   const b = 'package protocol\n';
   const c = '\n// Code generated (see typescript/README.md) DO NOT EDIT.\n\n';
   if (pkgDoc) {
-    return cp + a + b + c;
+    return cp + c + a + b;
   }
   else {
-    return cp + b + a + c;
+    return cp + c+ b + a;
   }
 }
 
@@ -112,7 +112,7 @@
   let pref = new Map<string, string>([
     ['DiagnosticSeverity', 'Severity'], ['WatchKind', 'Watch'],
     ['SignatureHelpTriggerKind', 'Sig'], ['CompletionItemTag', 'Compl'],
-    ['Integer', 'INT_'], ['Uinteger', 'UINT_']
+    ['Integer', 'INT_'], ['Uinteger', 'UINT_'], ['CodeActionTriggerKind', 'CodeAction']
   ]);  // typeName->prefix
   let suff = new Map<string, string>([
     ['CompletionItemKind', 'Completion'], ['InsertTextFormat', 'TextFormat'],
diff --git a/internal/lsp/references.go b/internal/lsp/references.go
index d8f2f1e..f96e553 100644
--- a/internal/lsp/references.go
+++ b/internal/lsp/references.go
@@ -18,7 +18,7 @@
 	if !ok {
 		return nil, err
 	}
-	if fh.Kind() == source.Tmpl {
+	if snapshot.View().FileKind(fh) == source.Tmpl {
 		return template.References(ctx, snapshot, fh, params)
 	}
 	references, err := source.References(ctx, snapshot, fh, params.Position, params.Context.IncludeDeclaration)
diff --git a/internal/lsp/regtest/expectation.go b/internal/lsp/regtest/expectation.go
index 8fb6afb..5cf2b6c 100644
--- a/internal/lsp/regtest/expectation.go
+++ b/internal/lsp/regtest/expectation.go
@@ -518,6 +518,23 @@
 	return desc
 }
 
+// NoOutstandingDiagnostics asserts that the workspace has no outstanding
+// diagnostic messages.
+func NoOutstandingDiagnostics() Expectation {
+	check := func(s State) Verdict {
+		for _, diags := range s.diagnostics {
+			if len(diags.Diagnostics) > 0 {
+				return Unmet
+			}
+		}
+		return Met
+	}
+	return SimpleExpectation{
+		check:       check,
+		description: "no outstanding diagnostics",
+	}
+}
+
 // EmptyDiagnostics asserts that empty diagnostics are sent for the
 // workspace-relative path name.
 func EmptyDiagnostics(name string) Expectation {
diff --git a/internal/lsp/regtest/regtest.go b/internal/lsp/regtest/regtest.go
index c1df26d..3180623 100644
--- a/internal/lsp/regtest/regtest.go
+++ b/internal/lsp/regtest/regtest.go
@@ -23,11 +23,24 @@
 var (
 	runSubprocessTests       = flag.Bool("enable_gopls_subprocess_tests", false, "run regtests against a gopls subprocess")
 	goplsBinaryPath          = flag.String("gopls_test_binary", "", "path to the gopls binary for use as a remote, for use with the -enable_gopls_subprocess_tests flag")
-	regtestTimeout           = flag.Duration("regtest_timeout", 20*time.Second, "default timeout for each regtest")
+	regtestTimeout           = flag.Duration("regtest_timeout", defaultRegtestTimeout(), "if nonzero, default timeout for each regtest; defaults to GOPLS_REGTEST_TIMEOUT")
 	skipCleanup              = flag.Bool("regtest_skip_cleanup", false, "whether to skip cleaning up temp directories")
 	printGoroutinesOnFailure = flag.Bool("regtest_print_goroutines", false, "whether to print goroutines info on failure")
 )
 
+func defaultRegtestTimeout() time.Duration {
+	s := os.Getenv("GOPLS_REGTEST_TIMEOUT")
+	if s == "" {
+		return 0
+	}
+	d, err := time.ParseDuration(s)
+	if err != nil {
+		fmt.Fprintf(os.Stderr, "invalid GOPLS_REGTEST_TIMEOUT %q: %v\n", s, err)
+		os.Exit(2)
+	}
+	return d
+}
+
 var runner *Runner
 
 type regtestRunner interface {
diff --git a/internal/lsp/regtest/runner.go b/internal/lsp/regtest/runner.go
index 05867c4..822a5a3 100644
--- a/internal/lsp/regtest/runner.go
+++ b/internal/lsp/regtest/runner.go
@@ -29,6 +29,7 @@
 	"golang.org/x/tools/internal/lsp/lsprpc"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
+	"golang.org/x/tools/internal/testenv"
 	"golang.org/x/tools/internal/xcontext"
 )
 
@@ -71,20 +72,19 @@
 }
 
 type runConfig struct {
-	editor      fake.EditorConfig
-	sandbox     fake.SandboxConfig
-	modes       Mode
-	timeout     time.Duration
-	debugAddr   string
-	skipLogs    bool
-	skipHooks   bool
-	optionsHook func(*source.Options)
+	editor           fake.EditorConfig
+	sandbox          fake.SandboxConfig
+	modes            Mode
+	noDefaultTimeout bool
+	debugAddr        string
+	skipLogs         bool
+	skipHooks        bool
+	optionsHook      func(*source.Options)
 }
 
 func (r *Runner) defaultConfig() *runConfig {
 	return &runConfig{
 		modes:       r.DefaultModes,
-		timeout:     r.Timeout,
 		optionsHook: r.OptionsHook,
 	}
 }
@@ -100,10 +100,12 @@
 	f(opts)
 }
 
-// Timeout configures a custom timeout for this test run.
-func Timeout(d time.Duration) RunOption {
+// NoDefaultTimeout removes the timeout set by the -regtest_timeout flag, for
+// individual tests that are expected to run longer than is reasonable for
+// ordinary regression tests.
+func NoDefaultTimeout() RunOption {
 	return optionSetter(func(opts *runConfig) {
-		opts.timeout = d
+		opts.noDefaultTimeout = true
 	})
 }
 
@@ -257,8 +259,18 @@
 		}
 
 		t.Run(tc.name, func(t *testing.T) {
-			ctx, cancel := context.WithTimeout(context.Background(), config.timeout)
-			defer cancel()
+			ctx := context.Background()
+			if r.Timeout != 0 && !config.noDefaultTimeout {
+				var cancel context.CancelFunc
+				ctx, cancel = context.WithTimeout(ctx, r.Timeout)
+				defer cancel()
+			} else if d, ok := testenv.Deadline(t); ok {
+				timeout := time.Until(d) * 19 / 20 // Leave an arbitrary 5% for cleanup.
+				var cancel context.CancelFunc
+				ctx, cancel = context.WithTimeout(ctx, timeout)
+				defer cancel()
+			}
+
 			ctx = debug.WithInstance(ctx, "", "off")
 			if config.debugAddr != "" {
 				di := debug.GetInstance(ctx)
diff --git a/internal/lsp/regtest/wrappers.go b/internal/lsp/regtest/wrappers.go
index 96844e3..9031e71 100644
--- a/internal/lsp/regtest/wrappers.go
+++ b/internal/lsp/regtest/wrappers.go
@@ -358,6 +358,16 @@
 	}
 }
 
+// WorkspaceSymbol calls workspace/symbol
+func (e *Env) WorkspaceSymbol(sym string) []protocol.SymbolInformation {
+	e.T.Helper()
+	ans, err := e.Editor.Symbols(e.Ctx, sym)
+	if err != nil {
+		e.T.Fatal(err)
+	}
+	return ans
+}
+
 // References calls textDocument/references for the given path at the given
 // position.
 func (e *Env) References(path string, pos fake.Pos) []protocol.Location {
diff --git a/internal/lsp/rename.go b/internal/lsp/rename.go
index 5f27d23..739ae90 100644
--- a/internal/lsp/rename.go
+++ b/internal/lsp/rename.go
@@ -35,7 +35,7 @@
 	}, nil
 }
 
-func (s *Server) prepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.Range, error) {
+func (s *Server) prepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.PrepareRename2Gn, error) {
 	snapshot, fh, ok, release, err := s.beginFileRequest(ctx, params.TextDocument.URI, source.Go)
 	defer release()
 	if !ok {
@@ -49,6 +49,8 @@
 		// internal error details.
 		return nil, usererr
 	}
-	// TODO(suzmue): return ident.Name as the placeholder text.
-	return &item.Range, nil
+	return &protocol.PrepareRename2Gn{
+		Range:       item.Range,
+		Placeholder: item.Text,
+	}, nil
 }
diff --git a/internal/lsp/semantic.go b/internal/lsp/semantic.go
index 6bf2338..7c0419c 100644
--- a/internal/lsp/semantic.go
+++ b/internal/lsp/semantic.go
@@ -11,6 +11,7 @@
 	"go/ast"
 	"go/token"
 	"go/types"
+	"log"
 	"path/filepath"
 	"sort"
 	"strings"
@@ -20,6 +21,7 @@
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
 	"golang.org/x/tools/internal/lsp/template"
+	"golang.org/x/tools/internal/typeparams"
 	errors "golang.org/x/xerrors"
 )
 
@@ -30,6 +32,10 @@
 // reject full semantic token requests for large files
 const maxFullFileSize int = 100000
 
+// to control comprehensive logging of decisions (gopls semtok foo.go > /dev/null shows log output)
+// semDebug should NEVER be true in checked-in code
+const semDebug = false
+
 func (s *Server) semanticTokensFull(ctx context.Context, p *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) {
 	ret, err := s.computeSemanticTokens(ctx, p.TextDocument, nil)
 	return ret, err
@@ -64,7 +70,8 @@
 		// the client won't remember the wrong answer
 		return nil, errors.Errorf("semantictokens are disabled")
 	}
-	if fh.Kind() == source.Tmpl {
+	kind := snapshot.View().FileKind(fh)
+	if kind == source.Tmpl {
 		// this is a little cumbersome to avoid both exporting 'encoded' and its methods
 		// and to avoid import cycles
 		e := &encoded{
@@ -81,14 +88,13 @@
 		}
 		return template.SemanticTokens(ctx, snapshot, fh.URI(), add, data)
 	}
-	if fh.Kind() != source.Go {
+	if kind != source.Go {
 		return nil, nil
 	}
 	pkg, err := snapshot.PackageForFile(ctx, fh.URI(), source.TypecheckFull, source.WidestPackage)
 	if err != nil {
 		return nil, err
 	}
-	info := pkg.GetTypesInfo()
 	pgf, err := pkg.File(fh.URI())
 	if err != nil {
 		return nil, err
@@ -103,7 +109,8 @@
 		ctx:      ctx,
 		pgf:      pgf,
 		rng:      rng,
-		ti:       info,
+		ti:       pkg.GetTypesInfo(),
+		pkg:      pkg,
 		fset:     snapshot.FileSet(),
 		tokTypes: s.session.Options().SemanticTypes,
 		tokMods:  s.session.Options().SemanticMods,
@@ -153,9 +160,10 @@
 	tokNamespace tokenType = "namespace"
 	tokType      tokenType = "type"
 	tokInterface tokenType = "interface"
+	tokTypeParam tokenType = "typeParameter"
 	tokParameter tokenType = "parameter"
 	tokVariable  tokenType = "variable"
-	tokMember    tokenType = "member"
+	tokMethod    tokenType = "method"
 	tokFunction  tokenType = "function"
 	tokKeyword   tokenType = "keyword"
 	tokComment   tokenType = "comment"
@@ -170,7 +178,6 @@
 
 	if !start.IsValid() {
 		// This is not worth reporting
-		//e.unexpected("token at token.NoPos")
 		return
 	}
 	if start >= e.end || start+token.Pos(leng) <= e.start {
@@ -220,6 +227,7 @@
 	pgf               *source.ParsedGoFile
 	rng               *protocol.Range
 	ti                *types.Info
+	pkg               source.Package
 	fset              *token.FileSet
 	// allowed starting and ending token.Pos, set by init
 	// used to avoid looking at declarations not in range
@@ -231,28 +239,39 @@
 // convert the stack to a string, for debugging
 func (e *encoded) strStack() string {
 	msg := []string{"["}
-	for _, s := range e.stack {
+	for i := len(e.stack) - 1; i >= 0; i-- {
+		s := e.stack[i]
 		msg = append(msg, fmt.Sprintf("%T", s)[5:])
 	}
 	if len(e.stack) > 0 {
 		loc := e.stack[len(e.stack)-1].Pos()
 		if !source.InRange(e.pgf.Tok, loc) {
 			msg = append(msg, fmt.Sprintf("invalid position %v for %s", loc, e.pgf.URI))
-		} else {
+		} else if locInRange(e.pgf.Tok, loc) {
 			add := e.pgf.Tok.PositionFor(loc, false)
 			nm := filepath.Base(add.Filename)
 			msg = append(msg, fmt.Sprintf("(%s:%d,col:%d)", nm, add.Line, add.Column))
+		} else {
+			msg = append(msg, fmt.Sprintf("(loc %d out of range)", loc))
 		}
 	}
 	msg = append(msg, "]")
 	return strings.Join(msg, " ")
 }
 
+// avoid panic in token.PostionFor() when typing at the end of the file
+func locInRange(f *token.File, loc token.Pos) bool {
+	return f.Base() <= int(loc) && int(loc) < f.Base()+f.Size()
+}
+
 // find the line in the source
 func (e *encoded) srcLine(x ast.Node) string {
 	file := e.pgf.Tok
 	line := file.Line(x.Pos())
-	start := file.Offset(file.LineStart(line))
+	start, err := source.Offset(file, file.LineStart(line))
+	if err != nil {
+		return ""
+	}
 	end := start
 	for ; end < len(e.pgf.Src) && e.pgf.Src[end] != '\n'; end++ {
 
@@ -359,6 +378,7 @@
 	case *ast.IncDecStmt:
 		e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil)
 	case *ast.IndexExpr:
+	case *typeparams.IndexListExpr: // accommodate generics
 	case *ast.InterfaceType:
 		e.token(x.Interface, len("interface"), tokKeyword, nil)
 	case *ast.KeyValueExpr:
@@ -410,7 +430,7 @@
 	case *ast.Comment, *ast.CommentGroup:
 		pop()
 		return false
-	default: // just to be super safe.
+	default:
 		e.unexpected(fmt.Sprintf("failed to implement %T", x))
 	}
 	return true
@@ -418,29 +438,55 @@
 
 func (e *encoded) ident(x *ast.Ident) {
 	if e.ti == nil {
-		e.unkIdent(x)
+		what, mods := e.unkIdent(x)
+		if what != "" {
+			e.token(x.Pos(), len(x.String()), what, mods)
+		}
+		if semDebug {
+			log.Printf(" nil %s/nil/nil %q %v %s", x.String(), what, mods, e.strStack())
+		}
 		return
 	}
 	def := e.ti.Defs[x]
 	if def != nil {
-		what, mods := e.definitionFor(x)
+		what, mods := e.definitionFor(x, def)
 		if what != "" {
 			e.token(x.Pos(), len(x.String()), what, mods)
 		}
+		if semDebug {
+			log.Printf(" for %s/%T/%T got %s %v (%s)", x.String(), def, def.Type(), what, mods, e.strStack())
+		}
 		return
 	}
 	use := e.ti.Uses[x]
+	tok := func(pos token.Pos, lng int, tok tokenType, mods []string) {
+		e.token(pos, lng, tok, mods)
+		q := "nil"
+		if use != nil {
+			q = fmt.Sprintf("%T", use.Type())
+		}
+		if semDebug {
+			log.Printf(" use %s/%T/%s got %s %v (%s)", x.String(), use, q, tok, mods, e.strStack())
+		}
+	}
+
 	switch y := use.(type) {
 	case nil:
-		e.unkIdent(x)
+		what, mods := e.unkIdent(x)
+		if what != "" {
+			tok(x.Pos(), len(x.String()), what, mods)
+		} else if semDebug {
+			// tok() wasn't called, so didn't log
+			log.Printf(" nil %s/%T/nil %q %v (%s)", x.String(), use, what, mods, e.strStack())
+		}
 		return
 	case *types.Builtin:
-		e.token(x.NamePos, len(x.Name), tokFunction, []string{"defaultLibrary"})
+		tok(x.NamePos, len(x.Name), tokFunction, []string{"defaultLibrary"})
 	case *types.Const:
 		mods := []string{"readonly"}
 		tt := y.Type()
 		if _, ok := tt.(*types.Basic); ok {
-			e.token(x.Pos(), len(x.String()), tokVariable, mods)
+			tok(x.Pos(), len(x.String()), tokVariable, mods)
 			break
 		}
 		if ttx, ok := tt.(*types.Named); ok {
@@ -448,7 +494,7 @@
 				e.unexpected(fmt.Sprintf("iota:%T", ttx))
 			}
 			if _, ok := ttx.Underlying().(*types.Basic); ok {
-				e.token(x.Pos(), len(x.String()), tokVariable, mods)
+				tok(x.Pos(), len(x.String()), tokVariable, mods)
 				break
 			}
 			e.unexpected(fmt.Sprintf("%q/%T", x.String(), tt))
@@ -456,22 +502,31 @@
 		// can this happen? Don't think so
 		e.unexpected(fmt.Sprintf("%s %T %#v", x.String(), tt, tt))
 	case *types.Func:
-		e.token(x.Pos(), len(x.Name), tokFunction, nil)
+		tok(x.Pos(), len(x.Name), tokFunction, nil)
 	case *types.Label:
 		// nothing to map it to
 	case *types.Nil:
 		// nil is a predeclared identifier
-		e.token(x.Pos(), len("nil"), tokVariable, []string{"readonly", "defaultLibrary"})
+		tok(x.Pos(), len("nil"), tokVariable, []string{"readonly", "defaultLibrary"})
 	case *types.PkgName:
-		e.token(x.Pos(), len(x.Name), tokNamespace, nil)
-	case *types.TypeName:
+		tok(x.Pos(), len(x.Name), tokNamespace, nil)
+	case *types.TypeName: // could be a tokTpeParam
 		var mods []string
 		if _, ok := y.Type().(*types.Basic); ok {
 			mods = []string{"defaultLibrary"}
+		} else if _, ok := y.Type().(*typeparams.TypeParam); ok {
+			tok(x.Pos(), len(x.String()), tokTypeParam, mods)
+			break
 		}
-		e.token(x.Pos(), len(x.String()), tokType, mods)
+		tok(x.Pos(), len(x.String()), tokType, mods)
 	case *types.Var:
-		e.token(x.Pos(), len(x.Name), tokVariable, nil)
+		if isSignature(y) {
+			tok(x.Pos(), len(x.Name), tokFunction, nil)
+		} else if _, ok := y.Type().(*typeparams.TypeParam); ok {
+			tok(x.Pos(), len(x.Name), tokTypeParam, nil)
+		} else {
+			tok(x.Pos(), len(x.Name), tokVariable, nil)
+		}
 	default:
 		// can't happen
 		if use == nil {
@@ -486,81 +541,96 @@
 	}
 }
 
-// both e.ti.Defs and e.ti.Uses are nil. use the parse stack
-// a lot of these only happen when the package doesn't compile
-func (e *encoded) unkIdent(x *ast.Ident) {
-	tok := func(tok tokenType, mod []string) {
-		e.token(x.Pos(), len(x.Name), tok, mod)
+func isSignature(use types.Object) bool {
+	if true {
+		return false //PJW: fix after generics seem ok
 	}
+	if _, ok := use.(*types.Var); !ok {
+		return false
+	}
+	v := use.Type()
+	if v == nil {
+		return false
+	}
+	if _, ok := v.(*types.Signature); ok {
+		return true
+	}
+	return false
+}
+
+// both e.ti.Defs and e.ti.Uses are nil. use the parse stack.
+// a lot of these only happen when the package doesn't compile
+// but in that case it is all best-effort from the parse tree
+func (e *encoded) unkIdent(x *ast.Ident) (tokenType, []string) {
 	def := []string{"definition"}
 	n := len(e.stack) - 2 // parent of Ident
 	if n < 0 {
 		e.unexpected("no stack?")
-		return
+		return "", nil
 	}
 	switch nd := e.stack[n].(type) {
 	case *ast.BinaryExpr, *ast.UnaryExpr, *ast.ParenExpr, *ast.StarExpr,
 		*ast.IncDecStmt, *ast.SliceExpr, *ast.ExprStmt, *ast.IndexExpr,
-		*ast.ReturnStmt,
+		*ast.ReturnStmt, *ast.ChanType, *ast.SendStmt,
 		*ast.ForStmt,      // possibly incomplete
 		*ast.IfStmt,       /* condition */
 		*ast.KeyValueExpr: // either key or value
-		tok(tokVariable, nil)
+		return tokVariable, nil
+	case *typeparams.IndexListExpr: // generic?
+		return tokVariable, nil
 	case *ast.Ellipsis:
-		tok(tokType, nil)
+		return tokType, nil
 	case *ast.CaseClause:
 		if n-2 >= 0 {
 			if _, ok := e.stack[n-2].(*ast.TypeSwitchStmt); ok {
-				tok(tokType, nil)
-				return
+				return tokType, nil
 			}
 		}
-		tok(tokVariable, nil)
+		return tokVariable, nil
 	case *ast.ArrayType:
 		if x == nd.Len {
-			tok(tokVariable, nil)
+			// or maybe a Type Param, but we can't just from the parse tree
+			return tokVariable, nil
 		} else {
-			tok(tokType, nil)
+			return tokType, nil
 		}
 	case *ast.MapType:
-		tok(tokType, nil)
+		return tokType, nil
 	case *ast.CallExpr:
 		if x == nd.Fun {
-			tok(tokFunction, nil)
-			return
+			return tokFunction, nil
 		}
-		tok(tokVariable, nil)
+		return tokVariable, nil
+	case *ast.SwitchStmt:
+		return tokVariable, nil
 	case *ast.TypeAssertExpr:
 		if x == nd.X {
-			tok(tokVariable, nil)
+			return tokVariable, nil
 		} else if x == nd.Type {
-			tok(tokType, nil)
+			return tokType, nil
 		}
 	case *ast.ValueSpec:
 		for _, p := range nd.Names {
 			if p == x {
-				tok(tokVariable, def)
-				return
+				return tokVariable, def
 			}
 		}
 		for _, p := range nd.Values {
 			if p == x {
-				tok(tokVariable, nil)
-				return
+				return tokVariable, nil
 			}
 		}
-		tok(tokType, nil)
+		return tokType, nil
 	case *ast.SelectorExpr: // e.ti.Selections[nd] is nil, so no help
 		if n-1 >= 0 {
 			if ce, ok := e.stack[n-1].(*ast.CallExpr); ok {
 				// ... CallExpr SelectorExpr Ident (_.x())
 				if ce.Fun == nd && nd.Sel == x {
-					tok(tokFunction, nil)
-					return
+					return tokFunction, nil
 				}
 			}
 		}
-		tok(tokVariable, nil)
+		return tokVariable, nil
 	case *ast.AssignStmt:
 		for _, p := range nd.Lhs {
 			// x := ..., or x = ...
@@ -568,51 +638,48 @@
 				if nd.Tok != token.DEFINE {
 					def = nil
 				}
-				tok(tokVariable, def)
-				return
+				return tokVariable, def
 			}
 		}
 		// RHS, = x
-		tok(tokVariable, nil)
+		return tokVariable, nil
 	case *ast.TypeSpec: // it's a type if it is either the Name or the Type
 		if x == nd.Type {
 			def = nil
 		}
-		tok(tokType, def)
+		return tokType, def
 	case *ast.Field:
 		// ident could be type in a field, or a method in an interface type, or a variable
 		if x == nd.Type {
-			tok(tokType, nil)
-			return
+			return tokType, nil
 		}
 		if n-2 >= 0 {
 			_, okit := e.stack[n-2].(*ast.InterfaceType)
 			_, okfl := e.stack[n-1].(*ast.FieldList)
 			if okit && okfl {
-				tok(tokMember, def)
-				return
+				return tokMethod, def
 			}
 		}
-		tok(tokVariable, nil)
+		return tokVariable, nil
 	case *ast.LabeledStmt, *ast.BranchStmt:
 		// nothing to report
 	case *ast.CompositeLit:
 		if nd.Type == x {
-			tok(tokType, nil)
-			return
+			return tokType, nil
 		}
-		tok(tokVariable, nil)
+		return tokVariable, nil
 	case *ast.RangeStmt:
 		if nd.Tok != token.DEFINE {
 			def = nil
 		}
-		tok(tokVariable, def)
+		return tokVariable, def
 	case *ast.FuncDecl:
-		tok(tokFunction, def)
+		return tokFunction, def
 	default:
 		msg := fmt.Sprintf("%T undexpected: %s %s%q", nd, x.Name, e.strStack(), e.srcLine(x))
 		e.unexpected(msg)
 	}
+	return "", nil
 }
 
 func isDeprecated(n *ast.CommentGroup) bool {
@@ -627,7 +694,9 @@
 	return false
 }
 
-func (e *encoded) definitionFor(x *ast.Ident) (tokenType, []string) {
+func (e *encoded) definitionFor(x *ast.Ident, def types.Object) (tokenType, []string) {
+	// PJW: def == types.Label? probably a nothing
+	// PJW: look into replaceing these syntactic tests with types more generally
 	mods := []string{"definition"}
 	for i := len(e.stack) - 1; i >= 0; i-- {
 		s := e.stack[i]
@@ -636,7 +705,7 @@
 			if x.Name == "_" {
 				return "", nil // not really a variable
 			}
-			return "variable", mods
+			return tokVariable, mods
 		case *ast.GenDecl:
 			if isDeprecated(y.Doc) {
 				mods = append(mods, "deprecated")
@@ -652,7 +721,7 @@
 					mods = append(mods, "deprecated")
 				}
 				if y.Recv != nil {
-					return tokMember, mods
+					return tokMethod, mods
 				}
 				return tokFunction, mods
 			}
@@ -662,8 +731,10 @@
 			}
 			// if x < ... < FieldList < FuncType < FuncDecl, this is a param
 			return tokParameter, mods
+		case *ast.FuncType:
+			return tokParameter, mods
 		case *ast.InterfaceType:
-			return tokMember, mods
+			return tokMethod, mods
 		case *ast.TypeSpec:
 			// GenDecl/Typespec/FuncType/FieldList/Field/Ident
 			// (type A func(b uint64)) (err error)
@@ -671,6 +742,10 @@
 			// and in GenDecl/TpeSpec/StructType/FieldList/Field/Ident
 			// (type A struct{b uint64}
 			// but on type B struct{C}), C is a type, but is not being defined.
+			// GenDecl/TypeSpec/FieldList/Field/Ident is a typeParam
+			if _, ok := e.stack[i+1].(*ast.FieldList); ok {
+				return tokTypeParam, mods
+			}
 			fldm := e.stack[len(e.stack)-2]
 			if fld, ok := fldm.(*ast.Field); ok {
 				// if len(fld.names) == 0 this is a tokType, being used
@@ -721,7 +796,7 @@
 	if idx != -1 {
 		return start + token.Pos(idx)
 	}
-	// can't happen
+	//(in unparsable programs: type _ <-<-chan int)
 	e.unexpected(fmt.Sprintf("not found:%s %v", keyword, e.fset.PositionFor(start, false)))
 	return token.NoPos
 }
@@ -753,8 +828,12 @@
 	// each semantic token needs five values
 	// (see Integer Encoding for Tokens in the LSP spec)
 	x := make([]uint32, 5*len(e.items))
+	var j int
 	for i := 0; i < len(e.items); i++ {
-		j := 5 * i
+		typ, ok := typeMap[e.items[i].typeStr]
+		if !ok {
+			continue // client doesn't want typeStr
+		}
 		if i == 0 {
 			x[0] = e.items[0].line
 		} else {
@@ -765,35 +844,26 @@
 			x[j+1] = e.items[i].start - e.items[i-1].start
 		}
 		x[j+2] = e.items[i].len
-		typ, ok := typeMap[e.items[i].typeStr]
-		if !ok {
-			continue // client doesn't want typeStr
-		}
 		x[j+3] = uint32(typ)
 		mask := 0
 		for _, s := range e.items[i].mods {
-			// modMpa[s] is 0 if the client doesn't want this modifier
+			// modMap[s] is 0 if the client doesn't want this modifier
 			mask |= modMap[s]
 		}
 		x[j+4] = uint32(mask)
+		j += 5
 	}
-	return x
+	return x[:j]
 }
 
 func (e *encoded) importSpec(d *ast.ImportSpec) {
 	// a local package name or the last component of the Path
 	if d.Name != nil {
 		nm := d.Name.String()
-		// import . x => x is not a namespace
-		// import _ x => x is a namespace
 		if nm != "_" && nm != "." {
 			e.token(d.Name.Pos(), len(nm), tokNamespace, nil)
-			return
 		}
-		if nm == "." {
-			return
-		}
-		// and fall through for _
+		return // don't mark anything for . or _
 	}
 	val := d.Path.Value
 	if len(val) < 2 || val[0] != '"' || val[len(val)-1] != '"' {
@@ -801,15 +871,32 @@
 		return
 	}
 	nm := val[1 : len(val)-1] // remove surrounding "s
-	nm = filepath.Base(nm)
-	// in import "lib/math", 'math' is the package name
-	start := d.Path.End() - token.Pos(1+len(nm))
-	e.token(start, len(nm), tokNamespace, nil)
-	// There may be more cases, as import strings are implementation defined.
+	// Import strings are implementation defined. Try to match with parse information.
+	x, err := e.pkg.GetImport(nm)
+	if err != nil {
+		// unexpected, but impact is that maybe some import is not colored
+		return
+	}
+	// expect that nm is x.PkgPath and that x.Name() is a component of it
+	if x.PkgPath() != nm {
+		// don't know how or what to color (if this can happen at all)
+		return
+	}
+	// this is not a precise test: imagine "github.com/nasty/v/v2"
+	j := strings.LastIndex(nm, x.Name())
+	if j == -1 {
+		// name doesn't show up, for whatever reason, so nothing to report
+		return
+	}
+	start := d.Path.Pos() + 1 + token.Pos(j) // skip the initial quote
+	e.token(start, len(x.Name()), tokNamespace, nil)
 }
 
 // log unexpected state
 func (e *encoded) unexpected(msg string) {
+	if semDebug {
+		panic(msg)
+	}
 	event.Error(e.ctx, e.strStack(), errors.New(msg))
 }
 
@@ -861,7 +948,7 @@
 	semanticTypes = [...]string{
 		"namespace", "type", "class", "enum", "interface",
 		"struct", "typeParameter", "parameter", "variable", "property", "enumMember",
-		"event", "function", "member", "macro", "keyword", "modifier", "comment",
+		"event", "function", "method", "macro", "keyword", "modifier", "comment",
 		"string", "number", "regexp", "operator",
 	}
 	semanticModifiers = [...]string{
diff --git a/internal/lsp/server_gen.go b/internal/lsp/server_gen.go
index 75069a0..2062693 100644
--- a/internal/lsp/server_gen.go
+++ b/internal/lsp/server_gen.go
@@ -56,8 +56,12 @@
 	return s.didChange(ctx, params)
 }
 
-func (s *Server) DidChangeConfiguration(ctx context.Context, _ *protocol.DidChangeConfigurationParams) error {
-	return s.didChangeConfiguration(ctx, nil)
+func (s *Server) DidChangeConfiguration(ctx context.Context, _gen *protocol.DidChangeConfigurationParams) error {
+	return s.didChangeConfiguration(ctx, _gen)
+}
+
+func (s *Server) DidChangeNotebookDocument(context.Context, *protocol.DidChangeNotebookDocumentParams) error {
+	return notImplemented("DidChangeNotebookDocument")
 }
 
 func (s *Server) DidChangeWatchedFiles(ctx context.Context, params *protocol.DidChangeWatchedFilesParams) error {
@@ -72,6 +76,10 @@
 	return s.didClose(ctx, params)
 }
 
+func (s *Server) DidCloseNotebookDocument(context.Context, *protocol.DidCloseNotebookDocumentParams) error {
+	return notImplemented("DidCloseNotebookDocument")
+}
+
 func (s *Server) DidCreateFiles(context.Context, *protocol.CreateFilesParams) error {
 	return notImplemented("DidCreateFiles")
 }
@@ -84,6 +92,10 @@
 	return s.didOpen(ctx, params)
 }
 
+func (s *Server) DidOpenNotebookDocument(context.Context, *protocol.DidOpenNotebookDocumentParams) error {
+	return notImplemented("DidOpenNotebookDocument")
+}
+
 func (s *Server) DidRenameFiles(context.Context, *protocol.RenameFilesParams) error {
 	return notImplemented("DidRenameFiles")
 }
@@ -92,6 +104,10 @@
 	return s.didSave(ctx, params)
 }
 
+func (s *Server) DidSaveNotebookDocument(context.Context, *protocol.DidSaveNotebookDocumentParams) error {
+	return notImplemented("DidSaveNotebookDocument")
+}
+
 func (s *Server) DocumentColor(context.Context, *protocol.DocumentColorParams) ([]protocol.ColorInformation, error) {
 	return nil, notImplemented("DocumentColor")
 }
@@ -144,6 +160,22 @@
 	return s.initialized(ctx, params)
 }
 
+func (s *Server) InlayHint(context.Context, *protocol.InlayHintParams) ([]protocol.InlayHint, error) {
+	return nil, notImplemented("InlayHint")
+}
+
+func (s *Server) InlayHintRefresh(context.Context) error {
+	return notImplemented("InlayHintRefresh")
+}
+
+func (s *Server) InlineValue(context.Context, *protocol.InlineValueParams) ([]protocol.InlineValue, error) {
+	return nil, notImplemented("InlineValue")
+}
+
+func (s *Server) InlineValueRefresh(context.Context) error {
+	return notImplemented("InlineValueRefresh")
+}
+
 func (s *Server) LinkedEditingRange(context.Context, *protocol.LinkedEditingRangeParams) (*protocol.LinkedEditingRanges, error) {
 	return nil, notImplemented("LinkedEditingRange")
 }
@@ -172,10 +204,14 @@
 	return s.prepareCallHierarchy(ctx, params)
 }
 
-func (s *Server) PrepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.Range, error) {
+func (s *Server) PrepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.PrepareRename2Gn, error) {
 	return s.prepareRename(ctx, params)
 }
 
+func (s *Server) PrepareTypeHierarchy(context.Context, *protocol.TypeHierarchyPrepareParams) ([]protocol.TypeHierarchyItem, error) {
+	return nil, notImplemented("PrepareTypeHierarchy")
+}
+
 func (s *Server) RangeFormatting(context.Context, *protocol.DocumentRangeFormattingParams) ([]protocol.TextEdit, error) {
 	return nil, notImplemented("RangeFormatting")
 }
@@ -188,7 +224,7 @@
 	return s.rename(ctx, params)
 }
 
-func (s *Server) Resolve(context.Context, *protocol.CompletionItem) (*protocol.CompletionItem, error) {
+func (s *Server) Resolve(context.Context, *protocol.InlayHint) (*protocol.InlayHint, error) {
 	return nil, notImplemented("Resolve")
 }
 
@@ -200,10 +236,18 @@
 	return nil, notImplemented("ResolveCodeLens")
 }
 
+func (s *Server) ResolveCompletionItem(context.Context, *protocol.CompletionItem) (*protocol.CompletionItem, error) {
+	return nil, notImplemented("ResolveCompletionItem")
+}
+
 func (s *Server) ResolveDocumentLink(context.Context, *protocol.DocumentLink) (*protocol.DocumentLink, error) {
 	return nil, notImplemented("ResolveDocumentLink")
 }
 
+func (s *Server) ResolveWorkspaceSymbol(context.Context, *protocol.WorkspaceSymbol) (*protocol.WorkspaceSymbol, error) {
+	return nil, notImplemented("ResolveWorkspaceSymbol")
+}
+
 func (s *Server) SelectionRange(context.Context, *protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) {
 	return nil, notImplemented("SelectionRange")
 }
@@ -236,6 +280,14 @@
 	return s.signatureHelp(ctx, params)
 }
 
+func (s *Server) Subtypes(context.Context, *protocol.TypeHierarchySubtypesParams) ([]protocol.TypeHierarchyItem, error) {
+	return nil, notImplemented("Subtypes")
+}
+
+func (s *Server) Supertypes(context.Context, *protocol.TypeHierarchySupertypesParams) ([]protocol.TypeHierarchyItem, error) {
+	return nil, notImplemented("Supertypes")
+}
+
 func (s *Server) Symbol(ctx context.Context, params *protocol.WorkspaceSymbolParams) ([]protocol.SymbolInformation, error) {
 	return s.symbol(ctx, params)
 }
diff --git a/internal/lsp/source/api_json.go b/internal/lsp/source/api_json.go
index c752220..14140bb 100755
--- a/internal/lsp/source/api_json.go
+++ b/internal/lsp/source/api_json.go
@@ -6,346 +6,193 @@
 	Options: map[string][]*OptionJSON{
 		"User": {
 			{
-				Name: "buildFlags",
-				Type: "[]string",
-				Doc:  "buildFlags is the set of flags passed on to the build system when invoked.\nIt is applied to queries like `go list`, which is used when discovering files.\nThe most common use is to set `-tags`.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "[]",
-				Status:     "",
-				Hierarchy:  "build",
+				Name:      "buildFlags",
+				Type:      "[]string",
+				Doc:       "buildFlags is the set of flags passed on to the build system when invoked.\nIt is applied to queries like `go list`, which is used when discovering files.\nThe most common use is to set `-tags`.\n",
+				Default:   "[]",
+				Hierarchy: "build",
 			},
 			{
-				Name: "env",
-				Type: "map[string]string",
-				Doc:  "env adds environment variables to external commands run by `gopls`, most notably `go list`.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "{}",
-				Status:     "",
-				Hierarchy:  "build",
+				Name:      "env",
+				Type:      "map[string]string",
+				Doc:       "env adds environment variables to external commands run by `gopls`, most notably `go list`.\n",
+				Default:   "{}",
+				Hierarchy: "build",
 			},
 			{
-				Name: "directoryFilters",
-				Type: "[]string",
-				Doc:  "directoryFilters can be used to exclude unwanted directories from the\nworkspace. By default, all directories are included. Filters are an\noperator, `+` to include and `-` to exclude, followed by a path prefix\nrelative to the workspace folder. They are evaluated in order, and\nthe last filter that applies to a path controls whether it is included.\nThe path prefix can be empty, so an initial `-` excludes everything.\n\nExamples:\n\nExclude node_modules: `-node_modules`\n\nInclude only project_a: `-` (exclude everything), `+project_a`\n\nInclude only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules`\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "[]",
-				Status:     "",
-				Hierarchy:  "build",
+				Name:      "directoryFilters",
+				Type:      "[]string",
+				Doc:       "directoryFilters can be used to exclude unwanted directories from the\nworkspace. By default, all directories are included. Filters are an\noperator, `+` to include and `-` to exclude, followed by a path prefix\nrelative to the workspace folder. They are evaluated in order, and\nthe last filter that applies to a path controls whether it is included.\nThe path prefix can be empty, so an initial `-` excludes everything.\n\nExamples:\n\nExclude node_modules: `-node_modules`\n\nInclude only project_a: `-` (exclude everything), `+project_a`\n\nInclude only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules`\n",
+				Default:   "[\"-node_modules\"]",
+				Hierarchy: "build",
+			},
+			{
+				Name:      "templateExtensions",
+				Type:      "[]string",
+				Doc:       "templateExtensions gives the extensions of file names that are treateed\nas template files. (The extension\nis the part of the file name after the final dot.)\n",
+				Default:   "[]",
+				Hierarchy: "build",
 			},
 			{
 				Name: "memoryMode",
 				Type: "enum",
 				Doc:  "memoryMode controls the tradeoff `gopls` makes between memory usage and\ncorrectness.\n\nValues other than `Normal` are untested and may break in surprising ways.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
 				EnumValues: []EnumValue{
 					{
 						Value: "\"DegradeClosed\"",
 						Doc:   "`\"DegradeClosed\"`: In DegradeClosed mode, `gopls` will collect less information about\npackages without open files. As a result, features like Find\nReferences and Rename will miss results in such packages.\n",
 					},
-					{
-						Value: "\"Normal\"",
-						Doc:   "",
-					},
+					{Value: "\"Normal\""},
 				},
 				Default:   "\"Normal\"",
 				Status:    "experimental",
 				Hierarchy: "build",
 			},
 			{
-				Name: "expandWorkspaceToModule",
-				Type: "bool",
-				Doc:  "expandWorkspaceToModule instructs `gopls` to adjust the scope of the\nworkspace to find the best available module root. `gopls` first looks for\na go.mod file in any parent directory of the workspace folder, expanding\nthe scope to that directory if it exists. If no viable parent directory is\nfound, gopls will check if there is exactly one child directory containing\na go.mod file, narrowing the scope to that directory if it exists.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "true",
-				Status:     "experimental",
-				Hierarchy:  "build",
+				Name:      "expandWorkspaceToModule",
+				Type:      "bool",
+				Doc:       "expandWorkspaceToModule instructs `gopls` to adjust the scope of the\nworkspace to find the best available module root. `gopls` first looks for\na go.mod file in any parent directory of the workspace folder, expanding\nthe scope to that directory if it exists. If no viable parent directory is\nfound, gopls will check if there is exactly one child directory containing\na go.mod file, narrowing the scope to that directory if it exists.\n",
+				Default:   "true",
+				Status:    "experimental",
+				Hierarchy: "build",
 			},
 			{
-				Name: "experimentalWorkspaceModule",
-				Type: "bool",
-				Doc:  "experimentalWorkspaceModule opts a user into the experimental support\nfor multi-module workspaces.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "false",
-				Status:     "experimental",
-				Hierarchy:  "build",
+				Name:      "experimentalWorkspaceModule",
+				Type:      "bool",
+				Doc:       "experimentalWorkspaceModule opts a user into the experimental support\nfor multi-module workspaces.\n",
+				Default:   "false",
+				Status:    "experimental",
+				Hierarchy: "build",
 			},
 			{
-				Name: "experimentalTemplateSupport",
-				Type: "bool",
-				Doc:  "experimentalTemplateSupport opts into the experimental support\nfor template files.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "false",
-				Status:     "experimental",
-				Hierarchy:  "build",
+				Name:      "experimentalPackageCacheKey",
+				Type:      "bool",
+				Doc:       "experimentalPackageCacheKey controls whether to use a coarser cache key\nfor package type information to increase cache hits. This setting removes\nthe user's environment, build flags, and working directory from the cache\nkey, which should be a safe change as all relevant inputs into the type\nchecking pass are already hashed into the key. This is temporarily guarded\nby an experiment because caching behavior is subtle and difficult to\ncomprehensively test.\n",
+				Default:   "true",
+				Status:    "experimental",
+				Hierarchy: "build",
 			},
 			{
-				Name: "experimentalPackageCacheKey",
-				Type: "bool",
-				Doc:  "experimentalPackageCacheKey controls whether to use a coarser cache key\nfor package type information to increase cache hits. This setting removes\nthe user's environment, build flags, and working directory from the cache\nkey, which should be a safe change as all relevant inputs into the type\nchecking pass are already hashed into the key. This is temporarily guarded\nby an experiment because caching behavior is subtle and difficult to\ncomprehensively test.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "true",
-				Status:     "experimental",
-				Hierarchy:  "build",
+				Name:      "allowModfileModifications",
+				Type:      "bool",
+				Doc:       "allowModfileModifications disables -mod=readonly, allowing imports from\nout-of-scope modules. This option will eventually be removed.\n",
+				Default:   "false",
+				Status:    "experimental",
+				Hierarchy: "build",
 			},
 			{
-				Name: "allowModfileModifications",
-				Type: "bool",
-				Doc:  "allowModfileModifications disables -mod=readonly, allowing imports from\nout-of-scope modules. This option will eventually be removed.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "false",
-				Status:     "experimental",
-				Hierarchy:  "build",
+				Name:      "allowImplicitNetworkAccess",
+				Type:      "bool",
+				Doc:       "allowImplicitNetworkAccess disables GOPROXY=off, allowing implicit module\ndownloads rather than requiring user action. This option will eventually\nbe removed.\n",
+				Default:   "false",
+				Status:    "experimental",
+				Hierarchy: "build",
 			},
 			{
-				Name: "allowImplicitNetworkAccess",
-				Type: "bool",
-				Doc:  "allowImplicitNetworkAccess disables GOPROXY=off, allowing implicit module\ndownloads rather than requiring user action. This option will eventually\nbe removed.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "false",
-				Status:     "experimental",
-				Hierarchy:  "build",
-			},
-			{
-				Name: "experimentalUseInvalidMetadata",
-				Type: "bool",
-				Doc:  "experimentalUseInvalidMetadata enables gopls to fall back on outdated\npackage metadata to provide editor features if the go command fails to\nload packages for some reason (like an invalid go.mod file). This will\neventually be the default behavior, and this setting will be removed.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "false",
-				Status:     "experimental",
-				Hierarchy:  "build",
+				Name:      "experimentalUseInvalidMetadata",
+				Type:      "bool",
+				Doc:       "experimentalUseInvalidMetadata enables gopls to fall back on outdated\npackage metadata to provide editor features if the go command fails to\nload packages for some reason (like an invalid go.mod file). This will\neventually be the default behavior, and this setting will be removed.\n",
+				Default:   "false",
+				Status:    "experimental",
+				Hierarchy: "build",
 			},
 			{
 				Name: "hoverKind",
 				Type: "enum",
 				Doc:  "hoverKind controls the information that appears in the hover text.\nSingleLine and Structured are intended for use only by authors of editor plugins.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
 				EnumValues: []EnumValue{
-					{
-						Value: "\"FullDocumentation\"",
-						Doc:   "",
-					},
-					{
-						Value: "\"NoDocumentation\"",
-						Doc:   "",
-					},
-					{
-						Value: "\"SingleLine\"",
-						Doc:   "",
-					},
+					{Value: "\"FullDocumentation\""},
+					{Value: "\"NoDocumentation\""},
+					{Value: "\"SingleLine\""},
 					{
 						Value: "\"Structured\"",
 						Doc:   "`\"Structured\"` is an experimental setting that returns a structured hover format.\nThis format separates the signature from the documentation, so that the client\ncan do more manipulation of these fields.\n\nThis should only be used by clients that support this behavior.\n",
 					},
-					{
-						Value: "\"SynopsisDocumentation\"",
-						Doc:   "",
-					},
+					{Value: "\"SynopsisDocumentation\""},
 				},
 				Default:   "\"FullDocumentation\"",
-				Status:    "",
 				Hierarchy: "ui.documentation",
 			},
 			{
-				Name: "linkTarget",
-				Type: "string",
-				Doc:  "linkTarget controls where documentation links go.\nIt might be one of:\n\n* `\"godoc.org\"`\n* `\"pkg.go.dev\"`\n\nIf company chooses to use its own `godoc.org`, its address can be used as well.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "\"pkg.go.dev\"",
-				Status:     "",
-				Hierarchy:  "ui.documentation",
+				Name:      "linkTarget",
+				Type:      "string",
+				Doc:       "linkTarget controls where documentation links go.\nIt might be one of:\n\n* `\"godoc.org\"`\n* `\"pkg.go.dev\"`\n\nIf company chooses to use its own `godoc.org`, its address can be used as well.\n",
+				Default:   "\"pkg.go.dev\"",
+				Hierarchy: "ui.documentation",
 			},
 			{
-				Name: "linksInHover",
-				Type: "bool",
-				Doc:  "linksInHover toggles the presence of links to documentation in hover.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "true",
-				Status:     "",
-				Hierarchy:  "ui.documentation",
+				Name:      "linksInHover",
+				Type:      "bool",
+				Doc:       "linksInHover toggles the presence of links to documentation in hover.\n",
+				Default:   "true",
+				Hierarchy: "ui.documentation",
 			},
 			{
-				Name: "usePlaceholders",
-				Type: "bool",
-				Doc:  "placeholders enables placeholders for function parameters or struct\nfields in completion responses.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "false",
-				Status:     "",
-				Hierarchy:  "ui.completion",
+				Name:      "usePlaceholders",
+				Type:      "bool",
+				Doc:       "placeholders enables placeholders for function parameters or struct\nfields in completion responses.\n",
+				Default:   "false",
+				Hierarchy: "ui.completion",
 			},
 			{
-				Name: "completionBudget",
-				Type: "time.Duration",
-				Doc:  "completionBudget is the soft latency goal for completion requests. Most\nrequests finish in a couple milliseconds, but in some cases deep\ncompletions can take much longer. As we use up our budget we\ndynamically reduce the search scope to ensure we return timely\nresults. Zero means unlimited.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "\"100ms\"",
-				Status:     "debug",
-				Hierarchy:  "ui.completion",
+				Name:      "completionBudget",
+				Type:      "time.Duration",
+				Doc:       "completionBudget is the soft latency goal for completion requests. Most\nrequests finish in a couple milliseconds, but in some cases deep\ncompletions can take much longer. As we use up our budget we\ndynamically reduce the search scope to ensure we return timely\nresults. Zero means unlimited.\n",
+				Default:   "\"100ms\"",
+				Status:    "debug",
+				Hierarchy: "ui.completion",
 			},
 			{
 				Name: "matcher",
 				Type: "enum",
 				Doc:  "matcher sets the algorithm that is used when calculating completion\ncandidates.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
 				EnumValues: []EnumValue{
-					{
-						Value: "\"CaseInsensitive\"",
-						Doc:   "",
-					},
-					{
-						Value: "\"CaseSensitive\"",
-						Doc:   "",
-					},
-					{
-						Value: "\"Fuzzy\"",
-						Doc:   "",
-					},
+					{Value: "\"CaseInsensitive\""},
+					{Value: "\"CaseSensitive\""},
+					{Value: "\"Fuzzy\""},
 				},
 				Default:   "\"Fuzzy\"",
 				Status:    "advanced",
 				Hierarchy: "ui.completion",
 			},
 			{
-				Name: "experimentalPostfixCompletions",
-				Type: "bool",
-				Doc:  "experimentalPostfixCompletions enables artifical method snippets\nsuch as \"someSlice.sort!\".\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "true",
-				Status:     "experimental",
-				Hierarchy:  "ui.completion",
+				Name:      "experimentalPostfixCompletions",
+				Type:      "bool",
+				Doc:       "experimentalPostfixCompletions enables artificial method snippets\nsuch as \"someSlice.sort!\".\n",
+				Default:   "true",
+				Status:    "experimental",
+				Hierarchy: "ui.completion",
 			},
 			{
 				Name: "importShortcut",
 				Type: "enum",
 				Doc:  "importShortcut specifies whether import statements should link to\ndocumentation or go to definitions.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
 				EnumValues: []EnumValue{
-					{
-						Value: "\"Both\"",
-						Doc:   "",
-					},
-					{
-						Value: "\"Definition\"",
-						Doc:   "",
-					},
-					{
-						Value: "\"Link\"",
-						Doc:   "",
-					},
+					{Value: "\"Both\""},
+					{Value: "\"Definition\""},
+					{Value: "\"Link\""},
 				},
 				Default:   "\"Both\"",
-				Status:    "",
 				Hierarchy: "ui.navigation",
 			},
 			{
 				Name: "symbolMatcher",
 				Type: "enum",
 				Doc:  "symbolMatcher sets the algorithm that is used when finding workspace symbols.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
 				EnumValues: []EnumValue{
-					{
-						Value: "\"CaseInsensitive\"",
-						Doc:   "",
-					},
-					{
-						Value: "\"CaseSensitive\"",
-						Doc:   "",
-					},
-					{
-						Value: "\"FastFuzzy\"",
-						Doc:   "",
-					},
-					{
-						Value: "\"Fuzzy\"",
-						Doc:   "",
-					},
+					{Value: "\"CaseInsensitive\""},
+					{Value: "\"CaseSensitive\""},
+					{Value: "\"FastFuzzy\""},
+					{Value: "\"Fuzzy\""},
 				},
-				Default:   "\"Fuzzy\"",
+				Default:   "\"FastFuzzy\"",
 				Status:    "advanced",
 				Hierarchy: "ui.navigation",
 			},
 			{
 				Name: "symbolStyle",
 				Type: "enum",
-				Doc:  "symbolStyle controls how symbols are qualified in symbol responses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n  \"symbolStyle\": \"dynamic\",\n...\n}\n```\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
+				Doc:  "symbolStyle controls how symbols are qualified in symbol responses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n  \"symbolStyle\": \"Dynamic\",\n...\n}\n```\n",
 				EnumValues: []EnumValue{
 					{
 						Value: "\"Dynamic\"",
@@ -442,6 +289,11 @@
 							Default: "true",
 						},
 						{
+							Name:    "\"infertypeargs\"",
+							Doc:     "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n",
+							Default: "true",
+						},
+						{
 							Name:    "\"loopclosure\"",
 							Doc:     "check references to loop variables from within nested functions\n\nThis analyzer checks for references to loop variables from within a\nfunction literal inside the loop body. It checks only instances where\nthe function literal is called in a defer or go statement that is the\nlast statement in the loop body, as otherwise we would need whole\nprogram analysis.\n\nFor example:\n\n\tfor i, v := range s {\n\t\tgo func() {\n\t\t\tprintln(i, v) // not what you might expect\n\t\t}()\n\t}\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines",
 							Default: "true",
@@ -552,8 +404,13 @@
 							Default: "false",
 						},
 						{
+							Name:    "\"useany\"",
+							Doc:     "check for constraints that could be simplified to \"any\"",
+							Default: "false",
+						},
+						{
 							Name:    "\"fillreturns\"",
-							Doc:     "suggested fixes for \"wrong number of return values (want %d, got %d)\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\nwill turn into\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.\n",
+							Doc:     "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\nwill turn into\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.\n",
 							Default: "true",
 						},
 						{
@@ -563,12 +420,12 @@
 						},
 						{
 							Name:    "\"noresultvalues\"",
-							Doc:     "suggested fixes for \"no result values expected\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\". For example:\n\tfunc z() { return nil }\nwill turn into\n\tfunc z() { return }\n",
+							Doc:     "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\tfunc z() { return nil }\nwill turn into\n\tfunc z() { return }\n",
 							Default: "true",
 						},
 						{
 							Name:    "\"undeclaredname\"",
-							Doc:     "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will insert a new statement:\n\"<> := \".",
+							Doc:     "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will either insert a new statement,\nsuch as:\n\n\"<> := \"\n\nor a new function declaration, such as:\n\nfunc <>(inferred parameters) {\n\tpanic(\"implement me!\")\n}\n",
 							Default: "true",
 						},
 						{
@@ -576,25 +433,23 @@
 							Doc:     "note incomplete struct initializations\n\nThis analyzer provides diagnostics for any struct literals that do not have\nany fields initialized. Because the suggested fix for this analysis is\nexpensive to compute, callers should compute it separately, using the\nSuggestedFix function below.\n",
 							Default: "true",
 						},
+						{
+							Name:    "\"stubmethods\"",
+							Doc:     "stub methods analyzer\n\nThis analyzer generates method stubs for concrete types\nin order to implement a target interface",
+							Default: "true",
+						},
 					},
 				},
-				EnumValues: nil,
-				Default:    "{}",
-				Status:     "",
-				Hierarchy:  "ui.diagnostic",
+				Default:   "{}",
+				Hierarchy: "ui.diagnostic",
 			},
 			{
-				Name: "staticcheck",
-				Type: "bool",
-				Doc:  "staticcheck enables additional analyses from staticcheck.io.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "false",
-				Status:     "experimental",
-				Hierarchy:  "ui.diagnostic",
+				Name:      "staticcheck",
+				Type:      "bool",
+				Doc:       "staticcheck enables additional analyses from staticcheck.io.\n",
+				Default:   "false",
+				Status:    "experimental",
+				Hierarchy: "ui.diagnostic",
 			},
 			{
 				Name: "annotations",
@@ -625,41 +480,30 @@
 						},
 					},
 				},
-				EnumValues: nil,
-				Default:    "{\"bounds\":true,\"escape\":true,\"inline\":true,\"nil\":true}",
-				Status:     "experimental",
-				Hierarchy:  "ui.diagnostic",
+				Default:   "{\"bounds\":true,\"escape\":true,\"inline\":true,\"nil\":true}",
+				Status:    "experimental",
+				Hierarchy: "ui.diagnostic",
 			},
 			{
-				Name: "diagnosticsDelay",
-				Type: "time.Duration",
-				Doc:  "diagnosticsDelay controls the amount of time that gopls waits\nafter the most recent file modification before computing deep diagnostics.\nSimple diagnostics (parsing and type-checking) are always run immediately\non recently modified packages.\n\nThis option must be set to a valid duration string, for example `\"250ms\"`.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "\"250ms\"",
-				Status:     "advanced",
-				Hierarchy:  "ui.diagnostic",
+				Name:      "diagnosticsDelay",
+				Type:      "time.Duration",
+				Doc:       "diagnosticsDelay controls the amount of time that gopls waits\nafter the most recent file modification before computing deep diagnostics.\nSimple diagnostics (parsing and type-checking) are always run immediately\non recently modified packages.\n\nThis option must be set to a valid duration string, for example `\"250ms\"`.\n",
+				Default:   "\"250ms\"",
+				Status:    "advanced",
+				Hierarchy: "ui.diagnostic",
 			},
 			{
-				Name: "experimentalWatchedFileDelay",
-				Type: "time.Duration",
-				Doc:  "experimentalWatchedFileDelay controls the amount of time that gopls waits\nfor additional workspace/didChangeWatchedFiles notifications to arrive,\nbefore processing all such notifications in a single batch. This is\nintended for use by LSP clients that don't support their own batching of\nfile system notifications.\n\nThis option must be set to a valid duration string, for example `\"100ms\"`.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "\"0s\"",
-				Status:     "experimental",
-				Hierarchy:  "ui.diagnostic",
+				Name:      "experimentalWatchedFileDelay",
+				Type:      "time.Duration",
+				Doc:       "experimentalWatchedFileDelay controls the amount of time that gopls waits\nfor additional workspace/didChangeWatchedFiles notifications to arrive,\nbefore processing all such notifications in a single batch. This is\nintended for use by LSP clients that don't support their own batching of\nfile system notifications.\n\nThis option must be set to a valid duration string, for example `\"100ms\"`.\n",
+				Default:   "\"0s\"",
+				Status:    "experimental",
+				Hierarchy: "ui.diagnostic",
 			},
 			{
 				Name: "codelenses",
 				Type: "map[string]bool",
-				Doc:  "codelenses overrides the enabled/disabled state of code lenses. See the\n\"Code Lenses\" section of the\n[Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md)\nfor the list of supported lenses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n  \"codelens\": {\n    \"generate\": false,  // Don't show the `go generate` lens.\n    \"gc_details\": true  // Show a code lens toggling the display of gc's choices.\n  }\n...\n}\n```\n",
+				Doc:  "codelenses overrides the enabled/disabled state of code lenses. See the\n\"Code Lenses\" section of the\n[Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#code-lenses)\nfor the list of supported lenses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n  \"codelenses\": {\n    \"generate\": false,  // Don't show the `go generate` lens.\n    \"gc_details\": true  // Show a code lens toggling the display of gc's choices.\n  }\n...\n}\n```\n",
 				EnumKeys: EnumKeys{
 					ValueType: "bool",
 					Keys: []EnumKey{
@@ -700,121 +544,101 @@
 						},
 					},
 				},
-				EnumValues: nil,
-				Default:    "{\"gc_details\":false,\"generate\":true,\"regenerate_cgo\":true,\"tidy\":true,\"upgrade_dependency\":true,\"vendor\":true}",
-				Status:     "",
-				Hierarchy:  "ui",
+				Default:   "{\"gc_details\":false,\"generate\":true,\"regenerate_cgo\":true,\"tidy\":true,\"upgrade_dependency\":true,\"vendor\":true}",
+				Hierarchy: "ui",
 			},
 			{
-				Name: "semanticTokens",
-				Type: "bool",
-				Doc:  "semanticTokens controls whether the LSP server will send\nsemantic tokens to the client.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "false",
-				Status:     "experimental",
-				Hierarchy:  "ui",
+				Name:      "semanticTokens",
+				Type:      "bool",
+				Doc:       "semanticTokens controls whether the LSP server will send\nsemantic tokens to the client.\n",
+				Default:   "false",
+				Status:    "experimental",
+				Hierarchy: "ui",
 			},
 			{
-				Name: "local",
-				Type: "string",
-				Doc:  "local is the equivalent of the `goimports -local` flag, which puts\nimports beginning with this string after third-party packages. It should\nbe the prefix of the import path whose imports should be grouped\nseparately.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "\"\"",
-				Status:     "",
-				Hierarchy:  "formatting",
+				Name:      "local",
+				Type:      "string",
+				Doc:       "local is the equivalent of the `goimports -local` flag, which puts\nimports beginning with this string after third-party packages. It should\nbe the prefix of the import path whose imports should be grouped\nseparately.\n",
+				Default:   "\"\"",
+				Hierarchy: "formatting",
 			},
 			{
-				Name: "gofumpt",
-				Type: "bool",
-				Doc:  "gofumpt indicates if we should run gofumpt formatting.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "false",
-				Status:     "",
-				Hierarchy:  "formatting",
+				Name:      "gofumpt",
+				Type:      "bool",
+				Doc:       "gofumpt indicates if we should run gofumpt formatting.\n",
+				Default:   "false",
+				Hierarchy: "formatting",
 			},
 			{
-				Name: "verboseOutput",
-				Type: "bool",
-				Doc:  "verboseOutput enables additional debug logging.\n",
-				EnumKeys: EnumKeys{
-					ValueType: "",
-					Keys:      nil,
-				},
-				EnumValues: nil,
-				Default:    "false",
-				Status:     "debug",
-				Hierarchy:  "",
+				Name:    "verboseOutput",
+				Type:    "bool",
+				Doc:     "verboseOutput enables additional debug logging.\n",
+				Default: "false",
+				Status:  "debug",
 			},
 		},
 	},
 	Commands: []*CommandJSON{
 		{
-			Command:   "gopls.add_dependency",
-			Title:     "Add a dependency",
-			Doc:       "Adds a dependency to the go.mod file for a module.",
-			ArgDoc:    "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// Additional args to pass to the go command.\n\t\"GoCmdArgs\": []string,\n\t// Whether to add a require directive.\n\t\"AddRequire\": bool,\n}",
-			ResultDoc: "",
+			Command: "gopls.add_dependency",
+			Title:   "Add a dependency",
+			Doc:     "Adds a dependency to the go.mod file for a module.",
+			ArgDoc:  "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// Additional args to pass to the go command.\n\t\"GoCmdArgs\": []string,\n\t// Whether to add a require directive.\n\t\"AddRequire\": bool,\n}",
 		},
 		{
-			Command:   "gopls.add_import",
-			Title:     "Add an import",
-			Doc:       "Ask the server to add an import path to a given Go file.  The method will\ncall applyEdit on the client so that clients don't have to apply the edit\nthemselves.",
-			ArgDoc:    "{\n\t// ImportPath is the target import path that should\n\t// be added to the URI file\n\t\"ImportPath\": string,\n\t// URI is the file that the ImportPath should be\n\t// added to\n\t\"URI\": string,\n}",
-			ResultDoc: "",
+			Command: "gopls.add_import",
+			Title:   "Add an import",
+			Doc:     "Ask the server to add an import path to a given Go file.  The method will\ncall applyEdit on the client so that clients don't have to apply the edit\nthemselves.",
+			ArgDoc:  "{\n\t// ImportPath is the target import path that should\n\t// be added to the URI file\n\t\"ImportPath\": string,\n\t// URI is the file that the ImportPath should be\n\t// added to\n\t\"URI\": string,\n}",
 		},
 		{
-			Command:   "gopls.apply_fix",
-			Title:     "Apply a fix",
-			Doc:       "Applies a fix to a region of source code.",
-			ArgDoc:    "{\n\t// The fix to apply.\n\t\"Fix\": string,\n\t// The file URI for the document to fix.\n\t\"URI\": string,\n\t// The document range to scan for fixes.\n\t\"Range\": {\n\t\t\"start\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t\t\"end\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t},\n}",
-			ResultDoc: "",
+			Command: "gopls.apply_fix",
+			Title:   "Apply a fix",
+			Doc:     "Applies a fix to a region of source code.",
+			ArgDoc:  "{\n\t// The fix to apply.\n\t\"Fix\": string,\n\t// The file URI for the document to fix.\n\t\"URI\": string,\n\t// The document range to scan for fixes.\n\t\"Range\": {\n\t\t\"start\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t\t\"end\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t},\n}",
 		},
 		{
-			Command:   "gopls.check_upgrades",
-			Title:     "Check for upgrades",
-			Doc:       "Checks for module upgrades.",
-			ArgDoc:    "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// The modules to check.\n\t\"Modules\": []string,\n}",
-			ResultDoc: "",
+			Command: "gopls.check_upgrades",
+			Title:   "Check for upgrades",
+			Doc:     "Checks for module upgrades.",
+			ArgDoc:  "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// The modules to check.\n\t\"Modules\": []string,\n}",
 		},
 		{
-			Command:   "gopls.gc_details",
-			Title:     "Toggle gc_details",
-			Doc:       "Toggle the calculation of gc annotations.",
-			ArgDoc:    "string",
-			ResultDoc: "",
+			Command: "gopls.edit_go_directive",
+			Title:   "Run go mod edit -go=version",
+			Doc:     "Runs `go mod edit -go=version` for a module.",
+			ArgDoc:  "{\n\t// Any document URI within the relevant module.\n\t\"URI\": string,\n\t// The version to pass to `go mod edit -go`.\n\t\"Version\": string,\n}",
 		},
 		{
-			Command:   "gopls.generate",
-			Title:     "Run go generate",
-			Doc:       "Runs `go generate` for a given directory.",
-			ArgDoc:    "{\n\t// URI for the directory to generate.\n\t\"Dir\": string,\n\t// Whether to generate recursively (go generate ./...)\n\t\"Recursive\": bool,\n}",
-			ResultDoc: "",
+			Command: "gopls.gc_details",
+			Title:   "Toggle gc_details",
+			Doc:     "Toggle the calculation of gc annotations.",
+			ArgDoc:  "string",
 		},
 		{
-			Command:   "gopls.generate_gopls_mod",
-			Title:     "Generate gopls.mod",
-			Doc:       "(Re)generate the gopls.mod file for a workspace.",
+			Command: "gopls.generate",
+			Title:   "Run go generate",
+			Doc:     "Runs `go generate` for a given directory.",
+			ArgDoc:  "{\n\t// URI for the directory to generate.\n\t\"Dir\": string,\n\t// Whether to generate recursively (go generate ./...)\n\t\"Recursive\": bool,\n}",
+		},
+		{
+			Command: "gopls.generate_gopls_mod",
+			Title:   "Generate gopls.mod",
+			Doc:     "(Re)generate the gopls.mod file for a workspace.",
+			ArgDoc:  "{\n\t// The file URI.\n\t\"URI\": string,\n}",
+		},
+		{
+			Command: "gopls.go_get_package",
+			Title:   "go get a package",
+			Doc:     "Runs `go get` to fetch a package.",
+			ArgDoc:  "{\n\t// Any document URI within the relevant module.\n\t\"URI\": string,\n\t// The package to go get.\n\t\"Pkg\": string,\n\t\"AddRequire\": bool,\n}",
+		},
+		{
+			Command:   "gopls.list_imports",
+			Title:     "List imports of a file and its package",
+			Doc:       "Retrieve a list of imports in the given Go file, and the package it\nbelongs to.",
 			ArgDoc:    "{\n\t// The file URI.\n\t\"URI\": string,\n}",
-			ResultDoc: "",
-		},
-		{
-			Command:   "gopls.go_get_package",
-			Title:     "go get a package",
-			Doc:       "Runs `go get` to fetch a package.",
-			ArgDoc:    "{\n\t// Any document URI within the relevant module.\n\t\"URI\": string,\n\t// The package to go get.\n\t\"Pkg\": string,\n\t\"AddRequire\": bool,\n}",
-			ResultDoc: "",
+			ResultDoc: "{\n\t// Imports is a list of imports in the requested file.\n\t\"Imports\": []{\n\t\t\"Path\": string,\n\t\t\"Name\": string,\n\t},\n\t// PackageImports is a list of all imports in the requested file's package.\n\t\"PackageImports\": []{\n\t\t\"Path\": string,\n\t},\n}",
 		},
 		{
 			Command:   "gopls.list_known_packages",
@@ -824,25 +648,29 @@
 			ResultDoc: "{\n\t// Packages is a list of packages relative\n\t// to the URIArg passed by the command request.\n\t// In other words, it omits paths that are already\n\t// imported or cannot be imported due to compiler\n\t// restrictions.\n\t\"Packages\": []string,\n}",
 		},
 		{
-			Command:   "gopls.regenerate_cgo",
-			Title:     "Regenerate cgo",
-			Doc:       "Regenerates cgo definitions.",
-			ArgDoc:    "{\n\t// The file URI.\n\t\"URI\": string,\n}",
-			ResultDoc: "",
+			Command: "gopls.regenerate_cgo",
+			Title:   "Regenerate cgo",
+			Doc:     "Regenerates cgo definitions.",
+			ArgDoc:  "{\n\t// The file URI.\n\t\"URI\": string,\n}",
 		},
 		{
-			Command:   "gopls.remove_dependency",
-			Title:     "Remove a dependency",
-			Doc:       "Removes a dependency from the go.mod file of a module.",
-			ArgDoc:    "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// The module path to remove.\n\t\"ModulePath\": string,\n\t\"OnlyDiagnostic\": bool,\n}",
-			ResultDoc: "",
+			Command: "gopls.remove_dependency",
+			Title:   "Remove a dependency",
+			Doc:     "Removes a dependency from the go.mod file of a module.",
+			ArgDoc:  "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// The module path to remove.\n\t\"ModulePath\": string,\n\t\"OnlyDiagnostic\": bool,\n}",
 		},
 		{
-			Command:   "gopls.run_tests",
-			Title:     "Run test(s)",
-			Doc:       "Runs `go test` for a specific set of test or benchmark functions.",
-			ArgDoc:    "{\n\t// The test file containing the tests to run.\n\t\"URI\": string,\n\t// Specific test names to run, e.g. TestFoo.\n\t\"Tests\": []string,\n\t// Specific benchmarks to run, e.g. BenchmarkFoo.\n\t\"Benchmarks\": []string,\n}",
-			ResultDoc: "",
+			Command: "gopls.run_tests",
+			Title:   "Run test(s)",
+			Doc:     "Runs `go test` for a specific set of test or benchmark functions.",
+			ArgDoc:  "{\n\t// The test file containing the tests to run.\n\t\"URI\": string,\n\t// Specific test names to run, e.g. TestFoo.\n\t\"Tests\": []string,\n\t// Specific benchmarks to run, e.g. BenchmarkFoo.\n\t\"Benchmarks\": []string,\n}",
+		},
+		{
+			Command:   "gopls.run_vulncheck_exp",
+			Title:     "Run vulncheck (experimental)",
+			Doc:       "Run vulnerability check (`govulncheck`).",
+			ArgDoc:    "{\n\t// Dir is the directory from which vulncheck will run from.\n\t\"Dir\": string,\n\t// Package pattern. E.g. \"\", \".\", \"./...\".\n\t\"Pattern\": string,\n}",
+			ResultDoc: "{\n\t\"Vuln\": []{\n\t\t\"ID\": string,\n\t\t\"Details\": string,\n\t\t\"Aliases\": []string,\n\t\t\"Symbol\": string,\n\t\t\"PkgPath\": string,\n\t\t\"ModPath\": string,\n\t\t\"URL\": string,\n\t\t\"CurrentVersion\": string,\n\t\t\"FixedVersion\": string,\n\t\t\"CallStacks\": [][]golang.org/x/tools/internal/lsp/command.StackEntry,\n\t},\n}",
 		},
 		{
 			Command:   "gopls.start_debugging",
@@ -852,53 +680,40 @@
 			ResultDoc: "{\n\t// The URLs to use to access the debug servers, for all gopls instances in\n\t// the serving path. For the common case of a single gopls instance (i.e. no\n\t// daemon), this will be exactly one address.\n\t// \n\t// In the case of one or more gopls instances forwarding the LSP to a daemon,\n\t// URLs will contain debug addresses for each server in the serving path, in\n\t// serving order. The daemon debug address will be the last entry in the\n\t// slice. If any intermediate gopls instance fails to start debugging, no\n\t// error will be returned but the debug URL for that server in the URLs slice\n\t// will be empty.\n\t\"URLs\": []string,\n}",
 		},
 		{
-			Command:   "gopls.test",
-			Title:     "Run test(s) (legacy)",
-			Doc:       "Runs `go test` for a specific set of test or benchmark functions.",
-			ArgDoc:    "string,\n[]string,\n[]string",
-			ResultDoc: "",
+			Command: "gopls.test",
+			Title:   "Run test(s) (legacy)",
+			Doc:     "Runs `go test` for a specific set of test or benchmark functions.",
+			ArgDoc:  "string,\n[]string,\n[]string",
 		},
 		{
-			Command:   "gopls.tidy",
-			Title:     "Run go mod tidy",
-			Doc:       "Runs `go mod tidy` for a module.",
-			ArgDoc:    "{\n\t// The file URIs.\n\t\"URIs\": []string,\n}",
-			ResultDoc: "",
+			Command: "gopls.tidy",
+			Title:   "Run go mod tidy",
+			Doc:     "Runs `go mod tidy` for a module.",
+			ArgDoc:  "{\n\t// The file URIs.\n\t\"URIs\": []string,\n}",
 		},
 		{
-			Command:   "gopls.toggle_gc_details",
-			Title:     "Toggle gc_details",
-			Doc:       "Toggle the calculation of gc annotations.",
-			ArgDoc:    "{\n\t// The file URI.\n\t\"URI\": string,\n}",
-			ResultDoc: "",
+			Command: "gopls.toggle_gc_details",
+			Title:   "Toggle gc_details",
+			Doc:     "Toggle the calculation of gc annotations.",
+			ArgDoc:  "{\n\t// The file URI.\n\t\"URI\": string,\n}",
 		},
 		{
-			Command:   "gopls.update_go_sum",
-			Title:     "Update go.sum",
-			Doc:       "Updates the go.sum file for a module.",
-			ArgDoc:    "{\n\t// The file URIs.\n\t\"URIs\": []string,\n}",
-			ResultDoc: "",
+			Command: "gopls.update_go_sum",
+			Title:   "Update go.sum",
+			Doc:     "Updates the go.sum file for a module.",
+			ArgDoc:  "{\n\t// The file URIs.\n\t\"URIs\": []string,\n}",
 		},
 		{
-			Command:   "gopls.upgrade_dependency",
-			Title:     "Upgrade a dependency",
-			Doc:       "Upgrades a dependency in the go.mod file for a module.",
-			ArgDoc:    "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// Additional args to pass to the go command.\n\t\"GoCmdArgs\": []string,\n\t// Whether to add a require directive.\n\t\"AddRequire\": bool,\n}",
-			ResultDoc: "",
+			Command: "gopls.upgrade_dependency",
+			Title:   "Upgrade a dependency",
+			Doc:     "Upgrades a dependency in the go.mod file for a module.",
+			ArgDoc:  "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// Additional args to pass to the go command.\n\t\"GoCmdArgs\": []string,\n\t// Whether to add a require directive.\n\t\"AddRequire\": bool,\n}",
 		},
 		{
-			Command:   "gopls.vendor",
-			Title:     "Run go mod vendor",
-			Doc:       "Runs `go mod vendor` for a module.",
-			ArgDoc:    "{\n\t// The file URI.\n\t\"URI\": string,\n}",
-			ResultDoc: "",
-		},
-		{
-			Command:   "gopls.workspace_metadata",
-			Title:     "Query workspace metadata",
-			Doc:       "Query the server for information about active workspaces.",
-			ArgDoc:    "",
-			ResultDoc: "{\n\t// All workspaces for this session.\n\t\"Workspaces\": []{\n\t\t\"Name\": string,\n\t\t\"ModuleDir\": string,\n\t},\n}",
+			Command: "gopls.vendor",
+			Title:   "Run go mod vendor",
+			Doc:     "Runs `go mod vendor` for a module.",
+			ArgDoc:  "{\n\t// The file URI.\n\t\"URI\": string,\n}",
 		},
 	},
 	Lenses: []*LensJSON{
@@ -995,9 +810,8 @@
 			Default: true,
 		},
 		{
-			Name:    "fieldalignment",
-			Doc:     "find structs that would use less memory if their fields were sorted\n\nThis analyzer find structs that can be rearranged to use less memory, and provides\na suggested edit with the optimal order.\n\nNote that there are two different diagnostics reported. One checks struct size,\nand the other reports \"pointer bytes\" used. Pointer bytes is how many bytes of the\nobject that the garbage collector has to potentially scan for pointers, for example:\n\n\tstruct { uint32; string }\n\nhave 16 pointer bytes because the garbage collector has to scan up through the string's\ninner pointer.\n\n\tstruct { string; *uint32 }\n\nhas 24 pointer bytes because it has to scan further through the *uint32.\n\n\tstruct { string; uint32 }\n\nhas 8 because it can stop immediately after the string pointer.\n",
-			Default: false,
+			Name: "fieldalignment",
+			Doc:  "find structs that would use less memory if their fields were sorted\n\nThis analyzer find structs that can be rearranged to use less memory, and provides\na suggested edit with the optimal order.\n\nNote that there are two different diagnostics reported. One checks struct size,\nand the other reports \"pointer bytes\" used. Pointer bytes is how many bytes of the\nobject that the garbage collector has to potentially scan for pointers, for example:\n\n\tstruct { uint32; string }\n\nhave 16 pointer bytes because the garbage collector has to scan up through the string's\ninner pointer.\n\n\tstruct { string; *uint32 }\n\nhas 24 pointer bytes because it has to scan further through the *uint32.\n\n\tstruct { string; uint32 }\n\nhas 8 because it can stop immediately after the string pointer.\n",
 		},
 		{
 			Name:    "httpresponse",
@@ -1010,6 +824,11 @@
 			Default: true,
 		},
 		{
+			Name:    "infertypeargs",
+			Doc:     "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n",
+			Default: true,
+		},
+		{
 			Name:    "loopclosure",
 			Doc:     "check references to loop variables from within nested functions\n\nThis analyzer checks for references to loop variables from within a\nfunction literal inside the loop body. It checks only instances where\nthe function literal is called in a defer or go statement that is the\nlast statement in the loop body, as otherwise we would need whole\nprogram analysis.\n\nFor example:\n\n\tfor i, v := range s {\n\t\tgo func() {\n\t\t\tprintln(i, v) // not what you might expect\n\t\t}()\n\t}\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines",
 			Default: true,
@@ -1025,9 +844,8 @@
 			Default: true,
 		},
 		{
-			Name:    "nilness",
-			Doc:     "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := &v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n",
-			Default: false,
+			Name: "nilness",
+			Doc:  "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := &v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n",
 		},
 		{
 			Name:    "printf",
@@ -1035,9 +853,8 @@
 			Default: true,
 		},
 		{
-			Name:    "shadow",
-			Doc:     "check for possible unintended shadowing of variables\n\nThis analyzer check for shadowed variables.\nA shadowed variable is a variable declared in an inner scope\nwith the same name and type as a variable in an outer scope,\nand where the outer variable is mentioned after the inner one\nis declared.\n\n(This definition can be refined; the module generates too many\nfalse positives and is not yet enabled by default.)\n\nFor example:\n\n\tfunc BadRead(f *os.File, buf []byte) error {\n\t\tvar err error\n\t\tfor {\n\t\t\tn, err := f.Read(buf) // shadows the function variable 'err'\n\t\t\tif err != nil {\n\t\t\t\tbreak // causes return of wrong value\n\t\t\t}\n\t\t\tfoo(buf)\n\t\t}\n\t\treturn err\n\t}\n",
-			Default: false,
+			Name: "shadow",
+			Doc:  "check for possible unintended shadowing of variables\n\nThis analyzer check for shadowed variables.\nA shadowed variable is a variable declared in an inner scope\nwith the same name and type as a variable in an outer scope,\nand where the outer variable is mentioned after the inner one\nis declared.\n\n(This definition can be refined; the module generates too many\nfalse positives and is not yet enabled by default.)\n\nFor example:\n\n\tfunc BadRead(f *os.File, buf []byte) error {\n\t\tvar err error\n\t\tfor {\n\t\t\tn, err := f.Read(buf) // shadows the function variable 'err'\n\t\t\tif err != nil {\n\t\t\t\tbreak // causes return of wrong value\n\t\t\t}\n\t\t\tfoo(buf)\n\t\t}\n\t\treturn err\n\t}\n",
 		},
 		{
 			Name:    "shift",
@@ -1105,9 +922,8 @@
 			Default: true,
 		},
 		{
-			Name:    "unusedparams",
-			Doc:     "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or are underscored\n- functions in test files\n- functions with empty bodies or those with just a return stmt",
-			Default: false,
+			Name: "unusedparams",
+			Doc:  "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or are underscored\n- functions in test files\n- functions with empty bodies or those with just a return stmt",
 		},
 		{
 			Name:    "unusedresult",
@@ -1115,13 +931,16 @@
 			Default: true,
 		},
 		{
-			Name:    "unusedwrite",
-			Doc:     "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\tfunc f(input []T) {\n\t\tfor i, v := range input {  // v is a copy\n\t\t\tv.x = i  // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\tfunc (t T) f() {  // t is a copy\n\t\tt.x = i  // unused write to field x\n\t}\n",
-			Default: false,
+			Name: "unusedwrite",
+			Doc:  "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\tfunc f(input []T) {\n\t\tfor i, v := range input {  // v is a copy\n\t\t\tv.x = i  // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\tfunc (t T) f() {  // t is a copy\n\t\tt.x = i  // unused write to field x\n\t}\n",
+		},
+		{
+			Name: "useany",
+			Doc:  "check for constraints that could be simplified to \"any\"",
 		},
 		{
 			Name:    "fillreturns",
-			Doc:     "suggested fixes for \"wrong number of return values (want %d, got %d)\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\nwill turn into\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.\n",
+			Doc:     "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\nwill turn into\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.\n",
 			Default: true,
 		},
 		{
@@ -1131,12 +950,12 @@
 		},
 		{
 			Name:    "noresultvalues",
-			Doc:     "suggested fixes for \"no result values expected\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\". For example:\n\tfunc z() { return nil }\nwill turn into\n\tfunc z() { return }\n",
+			Doc:     "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\tfunc z() { return nil }\nwill turn into\n\tfunc z() { return }\n",
 			Default: true,
 		},
 		{
 			Name:    "undeclaredname",
-			Doc:     "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will insert a new statement:\n\"<> := \".",
+			Doc:     "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will either insert a new statement,\nsuch as:\n\n\"<> := \"\n\nor a new function declaration, such as:\n\nfunc <>(inferred parameters) {\n\tpanic(\"implement me!\")\n}\n",
 			Default: true,
 		},
 		{
@@ -1144,5 +963,10 @@
 			Doc:     "note incomplete struct initializations\n\nThis analyzer provides diagnostics for any struct literals that do not have\nany fields initialized. Because the suggested fix for this analysis is\nexpensive to compute, callers should compute it separately, using the\nSuggestedFix function below.\n",
 			Default: true,
 		},
+		{
+			Name:    "stubmethods",
+			Doc:     "stub methods analyzer\n\nThis analyzer generates method stubs for concrete types\nin order to implement a target interface",
+			Default: true,
+		},
 	},
 }
diff --git a/internal/lsp/source/call_hierarchy.go b/internal/lsp/source/call_hierarchy.go
index 26ef07e..991c30a 100644
--- a/internal/lsp/source/call_hierarchy.go
+++ b/internal/lsp/source/call_hierarchy.go
@@ -32,8 +32,9 @@
 		}
 		return nil, err
 	}
+
 	// The identifier can be nil if it is an import spec.
-	if identifier == nil {
+	if identifier == nil || identifier.Declaration.obj == nil {
 		return nil, nil
 	}
 
diff --git a/internal/lsp/source/completion/completion.go b/internal/lsp/source/completion/completion.go
index dbc380c..60c404d 100644
--- a/internal/lsp/source/completion/completion.go
+++ b/internal/lsp/source/completion/completion.go
@@ -29,6 +29,7 @@
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/snippet"
 	"golang.org/x/tools/internal/lsp/source"
+	"golang.org/x/tools/internal/typeparams"
 	errors "golang.org/x/xerrors"
 )
 
@@ -485,6 +486,13 @@
 					qual := types.RelativeTo(pkg.GetTypes())
 					objStr = types.ObjectString(obj, qual)
 				}
+				ans, sel := definition(path, obj, snapshot.FileSet(), pgf.Mapper, fh)
+				if ans != nil {
+					sort.Slice(ans, func(i, j int) bool {
+						return ans[i].Score > ans[j].Score
+					})
+					return ans, sel, nil
+				}
 				return nil, nil, ErrIsDefinition{objStr: objStr}
 			}
 		}
@@ -1231,6 +1239,13 @@
 		c.methodSetCache[methodSetKey{typ, addressable}] = mset
 	}
 
+	if typ.String() == "*testing.F" && addressable {
+		// is that a sufficient test? (or is more care needed?)
+		if c.fuzz(typ, mset, imp, cb, c.snapshot.FileSet()) {
+			return
+		}
+	}
+
 	for i := 0; i < mset.Len(); i++ {
 		cb(candidate{
 			obj:         mset.At(i).Obj(),
@@ -1373,33 +1388,18 @@
 		}
 	}
 
-	if t := c.inference.objType; t != nil {
-		t = source.Deref(t)
-
-		// If we have an expected type and it is _not_ a named type,
-		// handle it specially. Non-named types like "[]int" will never be
-		// considered via a lexical search, so we need to directly inject
-		// them.
-		if _, named := t.(*types.Named); !named {
-			// If our expected type is "[]int", this will add a literal
-			// candidate of "[]int{}".
-			c.literal(ctx, t, nil)
-
-			if _, isBasic := t.(*types.Basic); !isBasic {
-				// If we expect a non-basic type name (e.g. "[]int"), hack up
-				// a named type whose name is literally "[]int". This allows
-				// us to reuse our object based completion machinery.
-				fakeNamedType := candidate{
-					obj:   types.NewTypeName(token.NoPos, nil, types.TypeString(t, c.qf), t),
-					score: stdScore,
-				}
-				// Make sure the type name matches before considering
-				// candidate. This cuts down on useless candidates.
-				if c.matchingTypeName(&fakeNamedType) {
-					c.deepState.enqueue(fakeNamedType)
-				}
+	if c.inference.typeName.isTypeParam {
+		// If we are completing a type param, offer each structural type.
+		// This ensures we suggest "[]int" and "[]float64" for a constraint
+		// with type union "[]int | []float64".
+		if t, _ := c.inference.objType.(*types.Interface); t != nil {
+			terms, _ := typeparams.InterfaceTermSet(t)
+			for _, term := range terms {
+				c.injectType(ctx, term.Type())
 			}
 		}
+	} else {
+		c.injectType(ctx, c.inference.objType)
 	}
 
 	// Add keyword completion items appropriate in the current context.
@@ -1408,6 +1408,43 @@
 	return nil
 }
 
+// injectInferredType manufacters candidates based on the given type.
+// For example, if the type is "[]int", this method makes sure you get
+// candidates "[]int{}" and "[]int" (the latter applies when
+// completing a type name).
+func (c *completer) injectType(ctx context.Context, t types.Type) {
+	if t == nil {
+		return
+	}
+
+	t = source.Deref(t)
+
+	// If we have an expected type and it is _not_ a named type,
+	// handle it specially. Non-named types like "[]int" will never be
+	// considered via a lexical search, so we need to directly inject
+	// them.
+	if _, named := t.(*types.Named); !named {
+		// If our expected type is "[]int", this will add a literal
+		// candidate of "[]int{}".
+		c.literal(ctx, t, nil)
+
+		if _, isBasic := t.(*types.Basic); !isBasic {
+			// If we expect a non-basic type name (e.g. "[]int"), hack up
+			// a named type whose name is literally "[]int". This allows
+			// us to reuse our object based completion machinery.
+			fakeNamedType := candidate{
+				obj:   types.NewTypeName(token.NoPos, nil, types.TypeString(t, c.qf), t),
+				score: stdScore,
+			}
+			// Make sure the type name matches before considering
+			// candidate. This cuts down on useless candidates.
+			if c.matchingTypeName(&fakeNamedType) {
+				c.deepState.enqueue(fakeNamedType)
+			}
+		}
+	}
+}
+
 func (c *completer) unimportedPackages(ctx context.Context, seen map[string]struct{}) error {
 	var prefix string
 	if c.surrounding != nil {
@@ -1694,6 +1731,12 @@
 			}
 		case *ast.FuncLit:
 			if typ, ok := info.Types[t]; ok {
+				if sig, _ := typ.Type.(*types.Signature); sig == nil {
+					// golang/go#49397: it should not be possible, but we somehow arrived
+					// here with a non-signature type, most likely due to AST mangling
+					// such that node.Type is not a FuncType.
+					return nil
+				}
 				return &funcInfo{
 					sig:  typ.Type.(*types.Signature),
 					body: t.Body,
@@ -1885,6 +1928,9 @@
 	// compLitType is true if we are completing a composite literal type
 	// name, e.g "foo<>{}".
 	compLitType bool
+
+	// isTypeParam is true if we are completing a type instantiation parameter
+	isTypeParam bool
 }
 
 // expectedCandidate returns information about the expected candidate
@@ -2074,6 +2120,23 @@
 					case *types.Slice, *types.Array:
 						inf.objType = types.Typ[types.UntypedInt]
 					}
+
+					if ct := expectedConstraint(tv.Type, 0); ct != nil {
+						inf.objType = ct
+						inf.typeName.wantTypeName = true
+						inf.typeName.isTypeParam = true
+					}
+				}
+			}
+			return inf
+		case *typeparams.IndexListExpr:
+			if node.Lbrack < c.pos && c.pos <= node.Rbrack {
+				if tv, ok := c.pkg.GetTypesInfo().Types[node.X]; ok {
+					if ct := expectedConstraint(tv.Type, exprAtPos(c.pos, node.Indices)); ct != nil {
+						inf.objType = ct
+						inf.typeName.wantTypeName = true
+						inf.typeName.isTypeParam = true
+					}
 				}
 			}
 			return inf
@@ -2117,6 +2180,19 @@
 	return inf
 }
 
+func expectedConstraint(t types.Type, idx int) types.Type {
+	var tp *typeparams.TypeParamList
+	if named, _ := t.(*types.Named); named != nil {
+		tp = typeparams.ForNamed(named)
+	} else if sig, _ := t.Underlying().(*types.Signature); sig != nil {
+		tp = typeparams.ForSignature(sig)
+	}
+	if tp == nil || idx >= tp.Len() {
+		return nil
+	}
+	return tp.At(idx).Constraint()
+}
+
 // objChain decomposes e into a chain of objects if possible. For
 // example, "foo.bar().baz" will yield []types.Object{foo, bar, baz}.
 // If any part can't be turned into an object, return nil.
@@ -2619,6 +2695,10 @@
 		return false
 	}
 
+	if _, ok := from.(*typeparams.TypeParam); ok {
+		return false
+	}
+
 	if !types.ConvertibleTo(from, to) {
 		return false
 	}
diff --git a/internal/lsp/source/completion/definition.go b/internal/lsp/source/completion/definition.go
new file mode 100644
index 0000000..17b251c
--- /dev/null
+++ b/internal/lsp/source/completion/definition.go
@@ -0,0 +1,127 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+	"go/ast"
+	"go/token"
+	"go/types"
+	"strings"
+	"unicode"
+	"unicode/utf8"
+
+	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/lsp/snippet"
+	"golang.org/x/tools/internal/lsp/source"
+)
+
+// some definitions can be completed
+// So far, TestFoo(t *testing.T), TestMain(m *testing.M)
+// BenchmarkFoo(b *testing.B), FuzzFoo(f *testing.F)
+
+// path[0] is known to be *ast.Ident
+func definition(path []ast.Node, obj types.Object, fset *token.FileSet, mapper *protocol.ColumnMapper, fh source.FileHandle) ([]CompletionItem, *Selection) {
+	if _, ok := obj.(*types.Func); !ok {
+		return nil, nil // not a function at all
+	}
+	if !strings.HasSuffix(fh.URI().Filename(), "_test.go") {
+		return nil, nil
+	}
+
+	name := path[0].(*ast.Ident).Name
+	if len(name) == 0 {
+		// can't happen
+		return nil, nil
+	}
+	pos := path[0].Pos()
+	sel := &Selection{
+		content:     "",
+		cursor:      pos,
+		MappedRange: source.NewMappedRange(fset, mapper, pos, pos),
+	}
+	var ans []CompletionItem
+
+	// Always suggest TestMain, if possible
+	if strings.HasPrefix("TestMain", name) {
+		ans = []CompletionItem{defItem("TestMain(m *testing.M)", obj)}
+	}
+
+	// If a snippet is possible, suggest it
+	if strings.HasPrefix("Test", name) {
+		ans = append(ans, defSnippet("Test", "Xxx", "(t *testing.T)", obj))
+		return ans, sel
+	} else if strings.HasPrefix("Benchmark", name) {
+		ans = append(ans, defSnippet("Benchmark", "Xxx", "(b *testing.B)", obj))
+		return ans, sel
+	} else if strings.HasPrefix("Fuzz", name) {
+		ans = append(ans, defSnippet("Fuzz", "Xxx", "(f *testing.F)", obj))
+		return ans, sel
+	}
+
+	// Fill in the argument for what the user has already typed
+	if got := defMatches(name, "Test", path, "(t *testing.T)"); got != "" {
+		ans = append(ans, defItem(got, obj))
+	} else if got := defMatches(name, "Benchmark", path, "(b *testing.B)"); got != "" {
+		ans = append(ans, defItem(got, obj))
+	} else if got := defMatches(name, "Fuzz", path, "(f *testing.F)"); got != "" {
+		ans = append(ans, defItem(got, obj))
+	}
+	return ans, sel
+}
+
+func defMatches(name, pat string, path []ast.Node, arg string) string {
+	idx := strings.Index(name, pat)
+	if idx < 0 {
+		return ""
+	}
+	c, _ := utf8.DecodeRuneInString(name[len(pat):])
+	if unicode.IsLower(c) {
+		return ""
+	}
+	fd, ok := path[1].(*ast.FuncDecl)
+	if !ok {
+		// we don't know what's going on
+		return ""
+	}
+	fp := fd.Type.Params
+	if fp != nil && len(fp.List) > 0 {
+		// signature already there, minimal suggestion
+		return name
+	}
+	// suggesting signature too
+	return name + arg
+}
+
+func defSnippet(prefix, placeholder, suffix string, obj types.Object) CompletionItem {
+	var sn snippet.Builder
+	sn.WriteText(prefix)
+	if placeholder != "" {
+		sn.WritePlaceholder(func(b *snippet.Builder) { b.WriteText(placeholder) })
+	}
+	sn.WriteText(suffix + " {\n")
+	sn.WriteFinalTabstop()
+	sn.WriteText("\n}")
+	return CompletionItem{
+		Label:         prefix + placeholder + suffix,
+		Detail:        "tab, type the rest of the name, then tab",
+		Kind:          protocol.FunctionCompletion,
+		Depth:         0,
+		Score:         10,
+		snippet:       &sn,
+		Documentation: prefix + " test function",
+		obj:           obj,
+	}
+}
+func defItem(val string, obj types.Object) CompletionItem {
+	return CompletionItem{
+		Label:         val,
+		InsertText:    val,
+		Kind:          protocol.FunctionCompletion,
+		Depth:         0,
+		Score:         9, // prefer the snippets when available
+		Documentation: "complete the parameter",
+		obj:           obj,
+	}
+}
diff --git a/internal/lsp/source/completion/format.go b/internal/lsp/source/completion/format.go
index c7a7e01..e674569 100644
--- a/internal/lsp/source/completion/format.go
+++ b/internal/lsp/source/completion/format.go
@@ -7,6 +7,8 @@
 import (
 	"context"
 	"fmt"
+	"go/ast"
+	"go/doc"
 	"go/types"
 	"strings"
 
@@ -17,6 +19,7 @@
 	"golang.org/x/tools/internal/lsp/snippet"
 	"golang.org/x/tools/internal/lsp/source"
 	"golang.org/x/tools/internal/span"
+	"golang.org/x/tools/internal/typeparams"
 	errors "golang.org/x/xerrors"
 )
 
@@ -57,6 +60,14 @@
 	if obj.Type() == nil {
 		detail = ""
 	}
+	if isTypeName(obj) && c.wantTypeParams() {
+		x := cand.obj.(*types.TypeName)
+		if named, ok := x.Type().(*types.Named); ok {
+			tp := typeparams.ForNamed(named)
+			label += source.FormatTypeParams(tp)
+			insert = label // maintain invariant above (label == insert)
+		}
+	}
 
 	snip.WriteText(insert)
 
@@ -119,7 +130,7 @@
 		case invoke:
 			if sig, ok := funcType.Underlying().(*types.Signature); ok {
 				s := source.NewSignature(ctx, c.snapshot, c.pkg, sig, nil, c.qf)
-				c.functionCallSnippet("", s.Params(), &snip)
+				c.functionCallSnippet("", s.TypeParams(), s.Params(), &snip)
 				if sig.Results().Len() == 1 {
 					funcType = sig.Results().At(0).Type()
 				}
@@ -235,17 +246,18 @@
 	if err != nil {
 		return CompletionItem{}, err
 	}
-	hover, err := source.HoverInfo(ctx, c.snapshot, pkg, obj, decl, nil)
+	hover, err := source.FindHoverContext(ctx, c.snapshot, pkg, obj, decl, nil)
 	if err != nil {
 		event.Error(ctx, "failed to find Hover", err, tag.URI.Of(uri))
 		return item, nil
 	}
-	item.Documentation = hover.Synopsis
 	if c.opts.fullDocumentation {
-		item.Documentation = hover.FullDocumentation
+		item.Documentation = hover.Comment.Text()
+	} else {
+		item.Documentation = doc.Synopsis(hover.Comment.Text())
 	}
 	// The desired pattern is `^// Deprecated`, but the prefix has been removed
-	if strings.HasPrefix(hover.FullDocumentation, "Deprecated") {
+	if strings.HasPrefix(hover.Comment.Text(), "Deprecated") {
 		if c.snapshot.View().Options().CompletionTags {
 			item.Tags = []protocol.CompletionItemTag{protocol.ComplDeprecated}
 		} else if c.snapshot.View().Options().CompletionDeprecated {
@@ -295,7 +307,7 @@
 		}
 		item.Detail = "func" + sig.Format()
 		item.snippet = &snippet.Builder{}
-		c.functionCallSnippet(obj.Name(), sig.Params(), item.snippet)
+		c.functionCallSnippet(obj.Name(), sig.TypeParams(), sig.Params(), item.snippet)
 	case *types.TypeName:
 		if types.IsInterface(obj.Type()) {
 			item.Kind = protocol.InterfaceCompletion
@@ -307,3 +319,22 @@
 	}
 	return item, nil
 }
+
+// decide if the type params (if any) should be part of the completion
+// which only possible for types.Named and types.Signature
+// (so far, only in receivers, e.g.; func (s *GENERIC[K, V])..., which is a types.Named)
+func (c *completer) wantTypeParams() bool {
+	// Need to be lexically in a receiver, and a child of an IndexListExpr
+	// (but IndexListExpr only exists with go1.18)
+	start := c.path[0].Pos()
+	for i, nd := range c.path {
+		if fd, ok := nd.(*ast.FuncDecl); ok {
+			if i > 0 && fd.Recv != nil && start < fd.Recv.End() {
+				return true
+			} else {
+				return false
+			}
+		}
+	}
+	return false
+}
diff --git a/internal/lsp/source/completion/fuzz.go b/internal/lsp/source/completion/fuzz.go
new file mode 100644
index 0000000..92349ab
--- /dev/null
+++ b/internal/lsp/source/completion/fuzz.go
@@ -0,0 +1,142 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"go/types"
+	"strings"
+
+	"golang.org/x/tools/internal/lsp/protocol"
+)
+
+// golang/go#51089
+// *testing.F deserves special treatment as member use is constrained:
+// The arguments to f.Fuzz are determined by the arguments to a previous f.Add
+// Inside f.Fuzz only f.Failed and f.Name are allowed.
+// PJW: are there other packages where we can deduce usage constraints?
+
+// if we find fuzz completions, then return true, as those are the only completions to offer
+func (c *completer) fuzz(typ types.Type, mset *types.MethodSet, imp *importInfo, cb func(candidate), fset *token.FileSet) bool {
+	// 1. inside f.Fuzz? (only f.Failed and f.Name)
+	// 2. possible completing f.Fuzz?
+	//    [Ident,SelectorExpr,Callexpr,ExprStmt,BlockiStmt,FuncDecl(Fuzz...)]
+	// 3. before f.Fuzz, same (for 2., offer choice when looking at an F)
+
+	// does the path contain FuncLit as arg to f.Fuzz CallExpr?
+	inside := false
+Loop:
+	for i, n := range c.path {
+		switch v := n.(type) {
+		case *ast.CallExpr:
+			if len(v.Args) != 1 {
+				continue Loop
+			}
+			if _, ok := v.Args[0].(*ast.FuncLit); !ok {
+				continue
+			}
+			if s, ok := v.Fun.(*ast.SelectorExpr); !ok || s.Sel.Name != "Fuzz" {
+				continue
+			}
+			if i > 2 { // avoid t.Fuzz itself in tests
+				inside = true
+				break Loop
+			}
+		}
+	}
+	if inside {
+		for i := 0; i < mset.Len(); i++ {
+			o := mset.At(i).Obj()
+			if o.Name() == "Failed" || o.Name() == "Name" {
+				cb(candidate{
+					obj:         o,
+					score:       stdScore,
+					imp:         imp,
+					addressable: true,
+				})
+			}
+		}
+		return true
+	}
+	// if it could be t.Fuzz, look for the preceding t.Add
+	id, ok := c.path[0].(*ast.Ident)
+	if ok && strings.HasPrefix("Fuzz", id.Name) {
+		var add *ast.CallExpr
+		f := func(n ast.Node) bool {
+			if n == nil {
+				return true
+			}
+			call, ok := n.(*ast.CallExpr)
+			if !ok {
+				return true
+			}
+			s, ok := call.Fun.(*ast.SelectorExpr)
+			if !ok {
+				return true
+			}
+			if s.Sel.Name != "Add" {
+				return true
+			}
+			// Sel.X should be of type *testing.F
+			got := c.pkg.GetTypesInfo().Types[s.X]
+			if got.Type.String() == "*testing.F" {
+				add = call
+			}
+			return false // because we're done...
+		}
+		// look at the enclosing FuzzFoo functions
+		if len(c.path) < 2 {
+			return false
+		}
+		n := c.path[len(c.path)-2]
+		if _, ok := n.(*ast.FuncDecl); !ok {
+			// the path should start with ast.File, ast.FuncDecl, ...
+			// but it didn't, so give up
+			return false
+		}
+		ast.Inspect(n, f)
+		if add == nil {
+			// looks like f.Fuzz without a preceding f.Add.
+			// let the regular completion handle it.
+			return false
+		}
+
+		lbl := "Fuzz(func(t *testing.T"
+		for i, a := range add.Args {
+			info := c.pkg.GetTypesInfo().TypeOf(a)
+			if info == nil {
+				return false // How could this happen, but better safe than panic.
+			}
+			lbl += fmt.Sprintf(", %c %s", 'a'+i, info)
+		}
+		lbl += ")"
+		xx := CompletionItem{
+			Label:         lbl,
+			InsertText:    lbl,
+			Kind:          protocol.FunctionCompletion,
+			Depth:         0,
+			Score:         10, // pretty confident the user should see this
+			Documentation: "argument types from f.Add",
+			obj:           nil,
+		}
+		c.items = append(c.items, xx)
+		for i := 0; i < mset.Len(); i++ {
+			o := mset.At(i).Obj()
+			if o.Name() != "Fuzz" {
+				cb(candidate{
+					obj:         o,
+					score:       stdScore,
+					imp:         imp,
+					addressable: true,
+				})
+			}
+		}
+		return true // done
+	}
+	// let the standard processing take care of it instead
+	return false
+}
diff --git a/internal/lsp/source/completion/literal.go b/internal/lsp/source/completion/literal.go
index 0fc7a81..5025f1f 100644
--- a/internal/lsp/source/completion/literal.go
+++ b/internal/lsp/source/completion/literal.go
@@ -294,6 +294,15 @@
 	})
 }
 
+// conventionalAcronyms contains conventional acronyms for type names
+// in lower case. For example, "ctx" for "context" and "err" for "error".
+var conventionalAcronyms = map[string]string{
+	"context":        "ctx",
+	"error":          "err",
+	"tx":             "tx",
+	"responsewriter": "w",
+}
+
 // abbreviateTypeName abbreviates type names into acronyms. For
 // example, "fooBar" is abbreviated "fb". Care is taken to ignore
 // non-identifier runes. For example, "[]int" becomes "i", and
@@ -320,6 +329,10 @@
 		return !unicode.IsLetter(r)
 	})
 
+	if acr, ok := conventionalAcronyms[strings.ToLower(s)]; ok {
+		return acr
+	}
+
 	for i, r := range s {
 		// Stop if we encounter a non-identifier rune.
 		if !unicode.IsLetter(r) && !unicode.IsNumber(r) {
diff --git a/internal/lsp/source/completion/package.go b/internal/lsp/source/completion/package.go
index 0ed66e6..c7e52d7 100644
--- a/internal/lsp/source/completion/package.go
+++ b/internal/lsp/source/completion/package.go
@@ -80,12 +80,15 @@
 		return nil, fmt.Errorf("unparseable file (%s)", pgf.URI)
 	}
 	tok := fset.File(expr.Pos())
-	offset := pgf.Tok.Offset(pos)
+	offset, err := source.Offset(pgf.Tok, pos)
+	if err != nil {
+		return nil, err
+	}
 	if offset > tok.Size() {
 		debug.Bug(ctx, "out of bounds cursor", "cursor offset (%d) out of bounds for %s (size: %d)", offset, pgf.URI, tok.Size())
 		return nil, fmt.Errorf("cursor out of bounds")
 	}
-	cursor := tok.Pos(pgf.Tok.Offset(pos))
+	cursor := tok.Pos(offset)
 	m := &protocol.ColumnMapper{
 		URI:       pgf.URI,
 		Content:   pgf.Src,
diff --git a/internal/lsp/source/completion/postfix_snippets.go b/internal/lsp/source/completion/postfix_snippets.go
index 4c5cb0e..7ea9621 100644
--- a/internal/lsp/source/completion/postfix_snippets.go
+++ b/internal/lsp/source/completion/postfix_snippets.go
@@ -174,6 +174,18 @@
 	body: `{{if and (eq .Kind "tuple") .StmtOK -}}
 {{.Import "fmt"}}.Println({{.X}})
 {{- end}}`,
+}, {
+	label:   "split",
+	details: "split string",
+	body: `{{if (eq (.TypeName .Type) "string") -}}
+{{.Import "strings"}}.Split({{.X}}, "{{.Cursor}}")
+{{- end}}`,
+}, {
+	label:   "join",
+	details: "join string slice",
+	body: `{{if and (eq .Kind "slice") (eq (.TypeName .ElemType) "string") -}}
+{{.Import "strings"}}.Join({{.X}}, "{{.Cursor}}")
+{{- end}}`,
 }}
 
 // Cursor indicates where the client's cursor should end up after the
diff --git a/internal/lsp/source/completion/snippet.go b/internal/lsp/source/completion/snippet.go
index 3649314..72c351f 100644
--- a/internal/lsp/source/completion/snippet.go
+++ b/internal/lsp/source/completion/snippet.go
@@ -49,7 +49,7 @@
 }
 
 // functionCallSnippets calculates the snippet for function calls.
-func (c *completer) functionCallSnippet(name string, params []string, snip *snippet.Builder) {
+func (c *completer) functionCallSnippet(name string, tparams, params []string, snip *snippet.Builder) {
 	// If there is no suffix then we need to reuse existing call parens
 	// "()" if present. If there is an identifier suffix then we always
 	// need to include "()" since we don't overwrite the suffix.
@@ -73,7 +73,26 @@
 		}
 	}
 
-	snip.WriteText(name + "(")
+	snip.WriteText(name)
+
+	if len(tparams) > 0 {
+		snip.WriteText("[")
+		if c.opts.placeholders {
+			for i, tp := range tparams {
+				if i > 0 {
+					snip.WriteText(", ")
+				}
+				snip.WritePlaceholder(func(b *snippet.Builder) {
+					b.WriteText(tp)
+				})
+			}
+		} else {
+			snip.WritePlaceholder(nil)
+		}
+		snip.WriteText("]")
+	}
+
+	snip.WriteText("(")
 
 	if c.opts.placeholders {
 		// A placeholder snippet turns "someFun<>" into "someFunc(<*i int*>, *s string*)".
diff --git a/internal/lsp/source/diagnostics.go b/internal/lsp/source/diagnostics.go
index 58154ca..e393c2f 100644
--- a/internal/lsp/source/diagnostics.go
+++ b/internal/lsp/source/diagnostics.go
@@ -82,12 +82,3 @@
 	}
 	return fh.VersionedFileIdentity(), fileDiags, nil
 }
-
-func isConvenienceAnalyzer(category string) bool {
-	for _, a := range DefaultOptions().ConvenienceAnalyzers {
-		if category == a.Analyzer.Name {
-			return true
-		}
-	}
-	return false
-}
diff --git a/internal/lsp/source/extract.go b/internal/lsp/source/extract.go
index 8f7010a..43b414a 100644
--- a/internal/lsp/source/extract.go
+++ b/internal/lsp/source/extract.go
@@ -63,7 +63,11 @@
 	if tok == nil {
 		return nil, fmt.Errorf("no file for pos %v", fset.Position(file.Pos()))
 	}
-	newLineIndent := "\n" + calculateIndentation(src, tok, insertBeforeStmt)
+	indent, err := calculateIndentation(src, tok, insertBeforeStmt)
+	if err != nil {
+		return nil, err
+	}
+	newLineIndent := "\n" + indent
 
 	lhs := strings.Join(lhsNames, ", ")
 	assignStmt := &ast.AssignStmt{
@@ -128,11 +132,17 @@
 // When inserting lines of code, we must ensure that the lines have consistent
 // formatting (i.e. the proper indentation). To do so, we observe the indentation on the
 // line of code on which the insertion occurs.
-func calculateIndentation(content []byte, tok *token.File, insertBeforeStmt ast.Node) string {
+func calculateIndentation(content []byte, tok *token.File, insertBeforeStmt ast.Node) (string, error) {
 	line := tok.Line(insertBeforeStmt.Pos())
-	lineOffset := tok.Offset(tok.LineStart(line))
-	stmtOffset := tok.Offset(insertBeforeStmt.Pos())
-	return string(content[lineOffset:stmtOffset])
+	lineOffset, err := Offset(tok, tok.LineStart(line))
+	if err != nil {
+		return "", err
+	}
+	stmtOffset, err := Offset(tok, insertBeforeStmt.Pos())
+	if err != nil {
+		return "", err
+	}
+	return string(content[lineOffset:stmtOffset]), nil
 }
 
 // generateAvailableIdentifier adjusts the new function name until there are no collisons in scope.
@@ -390,8 +400,14 @@
 
 	// We put the selection in a constructed file. We can then traverse and edit
 	// the extracted selection without modifying the original AST.
-	startOffset := tok.Offset(rng.Start)
-	endOffset := tok.Offset(rng.End)
+	startOffset, err := Offset(tok, rng.Start)
+	if err != nil {
+		return nil, err
+	}
+	endOffset, err := Offset(tok, rng.End)
+	if err != nil {
+		return nil, err
+	}
 	selection := src[startOffset:endOffset]
 	extractedBlock, err := parseBlockStmt(fset, selection)
 	if err != nil {
@@ -584,11 +600,21 @@
 
 	// We're going to replace the whole enclosing function,
 	// so preserve the text before and after the selected block.
-	outerStart := tok.Offset(outer.Pos())
-	outerEnd := tok.Offset(outer.End())
+	outerStart, err := Offset(tok, outer.Pos())
+	if err != nil {
+		return nil, err
+	}
+	outerEnd, err := Offset(tok, outer.End())
+	if err != nil {
+		return nil, err
+	}
 	before := src[outerStart:startOffset]
 	after := src[endOffset:outerEnd]
-	newLineIndent := "\n" + calculateIndentation(src, tok, start)
+	indent, err := calculateIndentation(src, tok, start)
+	if err != nil {
+		return nil, err
+	}
+	newLineIndent := "\n" + indent
 
 	var fullReplacement strings.Builder
 	fullReplacement.Write(before)
@@ -634,8 +660,11 @@
 // their cursors for whitespace. To support this use case, we must manually adjust the
 // ranges to match the correct AST node. In this particular example, we would adjust
 // rng.Start forward by one byte, and rng.End backwards by two bytes.
-func adjustRangeForWhitespace(rng span.Range, tok *token.File, content []byte) span.Range {
-	offset := tok.Offset(rng.Start)
+func adjustRangeForWhitespace(rng span.Range, tok *token.File, content []byte) (span.Range, error) {
+	offset, err := Offset(tok, rng.Start)
+	if err != nil {
+		return span.Range{}, err
+	}
 	for offset < len(content) {
 		if !unicode.IsSpace(rune(content[offset])) {
 			break
@@ -646,7 +675,10 @@
 	rng.Start = tok.Pos(offset)
 
 	// Move backwards to find a non-whitespace character.
-	offset = tok.Offset(rng.End)
+	offset, err = Offset(tok, rng.End)
+	if err != nil {
+		return span.Range{}, err
+	}
 	for o := offset - 1; 0 <= o && o < len(content); o-- {
 		if !unicode.IsSpace(rune(content[o])) {
 			break
@@ -654,7 +686,7 @@
 		offset = o
 	}
 	rng.End = tok.Pos(offset)
-	return rng
+	return rng, nil
 }
 
 // findParent finds the parent AST node of the given target node, if the target is a
@@ -916,7 +948,11 @@
 	if tok == nil {
 		return nil, false, false, fmt.Errorf("no file for pos %v", fset.Position(file.Pos()))
 	}
-	rng = adjustRangeForWhitespace(rng, tok, src)
+	var err error
+	rng, err = adjustRangeForWhitespace(rng, tok, src)
+	if err != nil {
+		return nil, false, false, err
+	}
 	path, _ := astutil.PathEnclosingInterval(file, rng.Start, rng.End)
 	if len(path) == 0 {
 		return nil, false, false, fmt.Errorf("no path enclosing interval")
@@ -959,6 +995,16 @@
 	if start == nil || end == nil {
 		return nil, false, false, fmt.Errorf("range does not map to AST nodes")
 	}
+	// If the region is a blockStmt, use the first and last nodes in the block
+	// statement.
+	// <rng.start>{ ... }<rng.end> => { <rng.start>...<rng.end> }
+	if blockStmt, ok := start.(*ast.BlockStmt); ok {
+		if len(blockStmt.List) == 0 {
+			return nil, false, false, fmt.Errorf("range maps to empty block statement")
+		}
+		start, end = blockStmt.List[0], blockStmt.List[len(blockStmt.List)-1]
+		rng.Start, rng.End = start.Pos(), end.End()
+	}
 	return &fnExtractParams{
 		tok:   tok,
 		path:  path,
diff --git a/internal/lsp/source/fix.go b/internal/lsp/source/fix.go
index 4cf270f..2f921ad 100644
--- a/internal/lsp/source/fix.go
+++ b/internal/lsp/source/fix.go
@@ -19,16 +19,20 @@
 	errors "golang.org/x/xerrors"
 )
 
-// SuggestedFixFunc is a function used to get the suggested fixes for a given
-// gopls command, some of which are provided by go/analysis.Analyzers. Some of
-// the analyzers in internal/lsp/analysis are not efficient enough to include
-// suggested fixes with their diagnostics, so we have to compute them
-// separately. Such analyzers should provide a function with a signature of
-// SuggestedFixFunc.
-type SuggestedFixFunc func(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error)
+type (
+	// SuggestedFixFunc is a function used to get the suggested fixes for a given
+	// gopls command, some of which are provided by go/analysis.Analyzers. Some of
+	// the analyzers in internal/lsp/analysis are not efficient enough to include
+	// suggested fixes with their diagnostics, so we have to compute them
+	// separately. Such analyzers should provide a function with a signature of
+	// SuggestedFixFunc.
+	SuggestedFixFunc  func(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle, pRng protocol.Range) (*analysis.SuggestedFix, error)
+	singleFileFixFunc func(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error)
+)
 
 const (
 	FillStruct      = "fill_struct"
+	StubMethods     = "stub_methods"
 	UndeclaredName  = "undeclared_name"
 	ExtractVariable = "extract_variable"
 	ExtractFunction = "extract_function"
@@ -37,11 +41,23 @@
 
 // suggestedFixes maps a suggested fix command id to its handler.
 var suggestedFixes = map[string]SuggestedFixFunc{
-	FillStruct:      fillstruct.SuggestedFix,
-	UndeclaredName:  undeclaredname.SuggestedFix,
-	ExtractVariable: extractVariable,
-	ExtractFunction: extractFunction,
-	ExtractMethod:   extractMethod,
+	FillStruct:      singleFile(fillstruct.SuggestedFix),
+	UndeclaredName:  singleFile(undeclaredname.SuggestedFix),
+	ExtractVariable: singleFile(extractVariable),
+	ExtractFunction: singleFile(extractFunction),
+	ExtractMethod:   singleFile(extractMethod),
+	StubMethods:     stubSuggestedFixFunc,
+}
+
+// singleFile calls analyzers that expect inputs for a single file
+func singleFile(sf singleFileFixFunc) SuggestedFixFunc {
+	return func(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle, pRng protocol.Range) (*analysis.SuggestedFix, error) {
+		fset, rng, src, file, pkg, info, err := getAllSuggestedFixInputs(ctx, snapshot, fh, pRng)
+		if err != nil {
+			return nil, err
+		}
+		return sf(fset, rng, src, file, pkg, info)
+	}
 }
 
 func SuggestedFixFromCommand(cmd protocol.Command, kind protocol.CodeActionKind) SuggestedFix {
@@ -59,55 +75,66 @@
 	if !ok {
 		return nil, fmt.Errorf("no suggested fix function for %s", fix)
 	}
-	fset, rng, src, file, m, pkg, info, err := getAllSuggestedFixInputs(ctx, snapshot, fh, pRng)
-	if err != nil {
-		return nil, err
-	}
-	suggestion, err := handler(fset, rng, src, file, pkg, info)
+	suggestion, err := handler(ctx, snapshot, fh, pRng)
 	if err != nil {
 		return nil, err
 	}
 	if suggestion == nil {
 		return nil, nil
 	}
-
-	var edits []protocol.TextEdit
+	fset := snapshot.FileSet()
+	editsPerFile := map[span.URI]*protocol.TextDocumentEdit{}
 	for _, edit := range suggestion.TextEdits {
-		rng := span.NewRange(fset, edit.Pos, edit.End)
-		spn, err := rng.Span()
+		spn, err := span.NewRange(fset, edit.Pos, edit.End).Span()
 		if err != nil {
 			return nil, err
 		}
-		clRng, err := m.Range(spn)
+		fh, err := snapshot.GetVersionedFile(ctx, spn.URI())
 		if err != nil {
 			return nil, err
 		}
-		edits = append(edits, protocol.TextEdit{
-			Range:   clRng,
+		te, ok := editsPerFile[spn.URI()]
+		if !ok {
+			te = &protocol.TextDocumentEdit{
+				TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{
+					Version: fh.Version(),
+					TextDocumentIdentifier: protocol.TextDocumentIdentifier{
+						URI: protocol.URIFromSpanURI(fh.URI()),
+					},
+				},
+			}
+			editsPerFile[spn.URI()] = te
+		}
+		_, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
+		if err != nil {
+			return nil, err
+		}
+		rng, err := pgf.Mapper.Range(spn)
+		if err != nil {
+			return nil, err
+		}
+		te.Edits = append(te.Edits, protocol.TextEdit{
+			Range:   rng,
 			NewText: string(edit.NewText),
 		})
 	}
-	return []protocol.TextDocumentEdit{{
-		TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{
-			Version: fh.Version(),
-			TextDocumentIdentifier: protocol.TextDocumentIdentifier{
-				URI: protocol.URIFromSpanURI(fh.URI()),
-			},
-		},
-		Edits: edits,
-	}}, nil
+	var edits []protocol.TextDocumentEdit
+	for _, edit := range editsPerFile {
+		edits = append(edits, *edit)
+	}
+	return edits, nil
 }
 
 // getAllSuggestedFixInputs is a helper function to collect all possible needed
 // inputs for an AppliesFunc or SuggestedFixFunc.
-func getAllSuggestedFixInputs(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) (*token.FileSet, span.Range, []byte, *ast.File, *protocol.ColumnMapper, *types.Package, *types.Info, error) {
+func getAllSuggestedFixInputs(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) (*token.FileSet, span.Range, []byte, *ast.File, *types.Package, *types.Info, error) {
 	pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
 	if err != nil {
-		return nil, span.Range{}, nil, nil, nil, nil, nil, errors.Errorf("getting file for Identifier: %w", err)
+		return nil, span.Range{}, nil, nil, nil, nil, errors.Errorf("getting file for Identifier: %w", err)
 	}
 	rng, err := pgf.Mapper.RangeToSpanRange(pRng)
 	if err != nil {
-		return nil, span.Range{}, nil, nil, nil, nil, nil, err
+		return nil, span.Range{}, nil, nil, nil, nil, err
 	}
-	return snapshot.FileSet(), rng, pgf.Src, pgf.File, pgf.Mapper, pkg.GetTypes(), pkg.GetTypesInfo(), nil
+	return snapshot.FileSet(), rng, pgf.Src, pgf.File, pkg.GetTypes(), pkg.GetTypesInfo(), nil
 }
diff --git a/internal/lsp/source/folding_range.go b/internal/lsp/source/folding_range.go
index be3f4b0..576308f 100644
--- a/internal/lsp/source/folding_range.go
+++ b/internal/lsp/source/folding_range.go
@@ -106,6 +106,11 @@
 			startSpecs, endSpecs = n.Specs[0].Pos(), n.Specs[num-1].End()
 		}
 		start, end = validLineFoldingRange(fset, n.Lparen, n.Rparen, startSpecs, endSpecs, lineFoldingOnly)
+	case *ast.BasicLit:
+		// Fold raw string literals from position of "`" to position of "`".
+		if n.Kind == token.STRING && len(n.Value) >= 2 && n.Value[0] == '`' && n.Value[len(n.Value)-1] == '`' {
+			start, end = n.Pos(), n.End()
+		}
 	case *ast.CompositeLit:
 		// Fold between positions of or lines between "{" and "}".
 		var startElts, endElts token.Pos
diff --git a/internal/lsp/source/format.go b/internal/lsp/source/format.go
index 0d61172..79da0b3 100644
--- a/internal/lsp/source/format.go
+++ b/internal/lsp/source/format.go
@@ -29,6 +29,11 @@
 	ctx, done := event.Start(ctx, "source.Format")
 	defer done()
 
+	// Generated files shouldn't be edited. So, don't format them
+	if IsGenerated(ctx, snapshot, fh.URI()) {
+		return nil, fmt.Errorf("can't format %q: file is generated", fh.URI().Filename())
+	}
+
 	pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
 	if err != nil {
 		return nil, err
@@ -58,8 +63,24 @@
 
 	// Apply additional formatting, if any is supported. Currently, the only
 	// supported additional formatter is gofumpt.
-	if format := snapshot.View().Options().Hooks.GofumptFormat; snapshot.View().Options().Gofumpt && format != nil {
-		b, err := format(ctx, buf.Bytes())
+	if format := snapshot.View().Options().GofumptFormat; snapshot.View().Options().Gofumpt && format != nil {
+		// gofumpt can customize formatting based on language version and module
+		// path, if available.
+		//
+		// Try to derive this information, but fall-back on the default behavior.
+		//
+		// TODO: under which circumstances can we fail to find module information?
+		// Can this, for example, result in inconsistent formatting across saves,
+		// due to pending calls to packages.Load?
+		var langVersion, modulePath string
+		mds, err := snapshot.MetadataForFile(ctx, fh.URI())
+		if err == nil && len(mds) > 0 {
+			if mi := mds[0].ModuleInfo(); mi != nil {
+				langVersion = mi.GoVersion
+				modulePath = mi.Path
+			}
+		}
+		b, err := format(ctx, langVersion, modulePath, buf.Bytes())
 		if err != nil {
 			return nil, err
 		}
@@ -153,7 +174,10 @@
 
 func computeFixEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Options, fixes []*imports.ImportFix) ([]protocol.TextEdit, error) {
 	// trim the original data to match fixedData
-	left := importPrefix(pgf.Src)
+	left, err := importPrefix(pgf.Src)
+	if err != nil {
+		return nil, err
+	}
 	extra := !strings.Contains(left, "\n") // one line may have more than imports
 	if extra {
 		left = string(pgf.Src)
@@ -185,25 +209,30 @@
 // importPrefix returns the prefix of the given file content through the final
 // import statement. If there are no imports, the prefix is the package
 // statement and any comment groups below it.
-func importPrefix(src []byte) string {
+func importPrefix(src []byte) (string, error) {
 	fset := token.NewFileSet()
 	// do as little parsing as possible
 	f, err := parser.ParseFile(fset, "", src, parser.ImportsOnly|parser.ParseComments)
 	if err != nil { // This can happen if 'package' is misspelled
-		return ""
+		return "", fmt.Errorf("importPrefix: failed to parse: %s", err)
 	}
 	tok := fset.File(f.Pos())
 	var importEnd int
 	for _, d := range f.Decls {
 		if x, ok := d.(*ast.GenDecl); ok && x.Tok == token.IMPORT {
-			if e := tok.Offset(d.End()); e > importEnd {
+			if e, err := Offset(tok, d.End()); err != nil {
+				return "", fmt.Errorf("importPrefix: %s", err)
+			} else if e > importEnd {
 				importEnd = e
 			}
 		}
 	}
 
 	maybeAdjustToLineEnd := func(pos token.Pos, isCommentNode bool) int {
-		offset := tok.Offset(pos)
+		offset, err := Offset(tok, pos)
+		if err != nil {
+			return -1
+		}
 
 		// Don't go past the end of the file.
 		if offset > len(src) {
@@ -215,7 +244,10 @@
 		// return a position on the next line whenever possible.
 		switch line := tok.Line(tok.Pos(offset)); {
 		case line < tok.LineCount():
-			nextLineOffset := tok.Offset(tok.LineStart(line + 1))
+			nextLineOffset, err := Offset(tok, tok.LineStart(line+1))
+			if err != nil {
+				return -1
+			}
 			// If we found a position that is at the end of a line, move the
 			// offset to the start of the next line.
 			if offset+1 == nextLineOffset {
@@ -234,14 +266,19 @@
 	}
 	for _, cgroup := range f.Comments {
 		for _, c := range cgroup.List {
-			if end := tok.Offset(c.End()); end > importEnd {
+			if end, err := Offset(tok, c.End()); err != nil {
+				return "", err
+			} else if end > importEnd {
 				startLine := tok.Position(c.Pos()).Line
 				endLine := tok.Position(c.End()).Line
 
 				// Work around golang/go#41197 by checking if the comment might
 				// contain "\r", and if so, find the actual end position of the
 				// comment by scanning the content of the file.
-				startOffset := tok.Offset(c.Pos())
+				startOffset, err := Offset(tok, c.Pos())
+				if err != nil {
+					return "", err
+				}
 				if startLine != endLine && bytes.Contains(src[startOffset:], []byte("\r")) {
 					if commentEnd := scanForCommentEnd(src[startOffset:]); commentEnd > 0 {
 						end = startOffset + commentEnd
@@ -254,7 +291,7 @@
 	if importEnd > len(src) {
 		importEnd = len(src)
 	}
-	return string(src[:importEnd])
+	return string(src[:importEnd]), nil
 }
 
 // scanForCommentEnd returns the offset of the end of the multi-line comment
diff --git a/internal/lsp/source/format_test.go b/internal/lsp/source/format_test.go
index 5d93a4e..eac78d9 100644
--- a/internal/lsp/source/format_test.go
+++ b/internal/lsp/source/format_test.go
@@ -35,7 +35,10 @@
 		{"package x; import \"os\"; func f() {}\n\n", "package x; import \"os\""},
 		{"package x; func f() {fmt.Println()}\n\n", "package x"},
 	} {
-		got := importPrefix([]byte(tt.input))
+		got, err := importPrefix([]byte(tt.input))
+		if err != nil {
+			t.Fatal(err)
+		}
 		if got != tt.want {
 			t.Errorf("%d: failed for %q:\n%s", i, tt.input, diffStr(t, tt.want, got))
 		}
@@ -62,7 +65,10 @@
 */`,
 		},
 	} {
-		got := importPrefix([]byte(strings.ReplaceAll(tt.input, "\n", "\r\n")))
+		got, err := importPrefix([]byte(strings.ReplaceAll(tt.input, "\n", "\r\n")))
+		if err != nil {
+			t.Fatal(err)
+		}
 		want := strings.ReplaceAll(tt.want, "\n", "\r\n")
 		if got != want {
 			t.Errorf("%d: failed for %q:\n%s", i, tt.input, diffStr(t, want, got))
diff --git a/internal/lsp/source/hover.go b/internal/lsp/source/hover.go
index 10fb541..b6fd9ac 100644
--- a/internal/lsp/source/hover.go
+++ b/internal/lsp/source/hover.go
@@ -14,16 +14,41 @@
 	"go/format"
 	"go/token"
 	"go/types"
+	"strconv"
 	"strings"
 	"time"
+	"unicode/utf8"
 
+	"golang.org/x/text/unicode/runenames"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/typeparams"
 	errors "golang.org/x/xerrors"
 )
 
-type HoverInformation struct {
+// HoverContext contains context extracted from the syntax and type information
+// of a given node, for use in various summaries (hover, autocomplete,
+// signature help).
+type HoverContext struct {
+	// signatureSource is the object or node use to derive the hover signature.
+	//
+	// It may also hold a precomputed string.
+	// TODO(rfindley): pre-compute all signatures to avoid this indirection.
+	signatureSource interface{}
+
+	// comment is the most relevant comment group associated with the hovered object.
+	Comment *ast.CommentGroup
+}
+
+// HoverJSON contains information used by hover. It is also the JSON returned
+// for the "structured" hover format
+type HoverJSON struct {
+	// Synopsis is a single sentence synopsis of the symbol's documentation.
+	Synopsis string `json:"synopsis"`
+
+	// FullDocumentation is the symbol's full documentation.
+	FullDocumentation string `json:"fullDocumentation"`
+
 	// Signature is the symbol's signature.
 	Signature string `json:"signature"`
 
@@ -31,11 +56,8 @@
 	// This is recommended only for use in clients that show a single line for hover.
 	SingleLine string `json:"singleLine"`
 
-	// Synopsis is a single sentence synopsis of the symbol's documentation.
-	Synopsis string `json:"synopsis"`
-
-	// FullDocumentation is the symbol's full documentation.
-	FullDocumentation string `json:"fullDocumentation"`
+	// SymbolName is the types.Object.Name for the given symbol.
+	SymbolName string `json:"symbolName"`
 
 	// LinkPath is the pkg.go.dev link for the given symbol.
 	// For example, the "go/ast" part of "pkg.go.dev/go/ast#Node".
@@ -44,28 +66,14 @@
 	// LinkAnchor is the pkg.go.dev link anchor for the given symbol.
 	// For example, the "Node" part of "pkg.go.dev/go/ast#Node".
 	LinkAnchor string `json:"linkAnchor"`
-
-	// importPath is the import path for the package containing the given
-	// symbol.
-	importPath string
-
-	// symbolName is the types.Object.Name for the given symbol.
-	symbolName string
-
-	source  interface{}
-	comment *ast.CommentGroup
-
-	// typeName contains the identifier name when the identifier is a type declaration.
-	// If it is not empty, the hover will have the prefix "type <typeName> ".
-	typeName string
-	// isTypeAlias indicates whether the identifier is a type alias declaration.
-	// If it is true, the hover will have the prefix "type <typeName> = ".
-	isTypeAlias bool
 }
 
 func Hover(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (*protocol.Hover, error) {
 	ident, err := Identifier(ctx, snapshot, fh, position)
 	if err != nil {
+		if hover, innerErr := hoverRune(ctx, snapshot, fh, position); innerErr == nil {
+			return hover, nil
+		}
 		return nil, nil
 	}
 	h, err := HoverIdentifier(ctx, ident)
@@ -76,10 +84,6 @@
 	if err != nil {
 		return nil, err
 	}
-	// See golang/go#36998: don't link to modules matching GOPRIVATE.
-	if snapshot.View().IsGoPrivatePath(h.importPath) {
-		h.LinkPath = ""
-	}
 	hover, err := FormatHover(h, snapshot.View().Options())
 	if err != nil {
 		return nil, err
@@ -93,30 +97,193 @@
 	}, nil
 }
 
-func HoverIdentifier(ctx context.Context, i *IdentifierInfo) (*HoverInformation, error) {
-	ctx, done := event.Start(ctx, "source.Hover")
+func hoverRune(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (*protocol.Hover, error) {
+	ctx, done := event.Start(ctx, "source.hoverRune")
 	defer done()
 
-	fset := i.Snapshot.FileSet()
-	h, err := HoverInfo(ctx, i.Snapshot, i.pkg, i.Declaration.obj, i.Declaration.node, i.Declaration.fullDecl)
+	r, mrng, err := findRune(ctx, snapshot, fh, position)
 	if err != nil {
 		return nil, err
 	}
+	rng, err := mrng.Range()
+	if err != nil {
+		return nil, err
+	}
+
+	var desc string
+	runeName := runenames.Name(r)
+	if len(runeName) > 0 && runeName[0] == '<' {
+		// Check if the rune looks like an HTML tag. If so, trim the surrounding <>
+		// characters to work around https://github.com/microsoft/vscode/issues/124042.
+		runeName = strings.TrimRight(runeName[1:], ">")
+	}
+	if strconv.IsPrint(r) {
+		desc = fmt.Sprintf("'%s', U+%04X, %s", string(r), uint32(r), runeName)
+	} else {
+		desc = fmt.Sprintf("U+%04X, %s", uint32(r), runeName)
+	}
+	return &protocol.Hover{
+		Contents: protocol.MarkupContent{
+			Kind:  snapshot.View().Options().PreferredContentFormat,
+			Value: desc,
+		},
+		Range: rng,
+	}, nil
+}
+
+// ErrNoRuneFound is the error returned when no rune is found at a particular position.
+var ErrNoRuneFound = errors.New("no rune found")
+
+// findRune returns rune information for a position in a file.
+func findRune(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (rune, MappedRange, error) {
+	pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
+	if err != nil {
+		return 0, MappedRange{}, err
+	}
+	spn, err := pgf.Mapper.PointSpan(position)
+	if err != nil {
+		return 0, MappedRange{}, err
+	}
+	rng, err := spn.Range(pgf.Mapper.Converter)
+	if err != nil {
+		return 0, MappedRange{}, err
+	}
+	pos := rng.Start
+
+	// Find the basic literal enclosing the given position, if there is one.
+	var lit *ast.BasicLit
+	var found bool
+	ast.Inspect(pgf.File, func(n ast.Node) bool {
+		if found {
+			return false
+		}
+		if n, ok := n.(*ast.BasicLit); ok && pos >= n.Pos() && pos <= n.End() {
+			lit = n
+			found = true
+		}
+		return !found
+	})
+	if !found {
+		return 0, MappedRange{}, ErrNoRuneFound
+	}
+
+	var r rune
+	var start, end token.Pos
+	switch lit.Kind {
+	case token.CHAR:
+		s, err := strconv.Unquote(lit.Value)
+		if err != nil {
+			// If the conversion fails, it's because of an invalid syntax, therefore
+			// there is no rune to be found.
+			return 0, MappedRange{}, ErrNoRuneFound
+		}
+		r, _ = utf8.DecodeRuneInString(s)
+		if r == utf8.RuneError {
+			return 0, MappedRange{}, fmt.Errorf("rune error")
+		}
+		start, end = lit.Pos(), lit.End()
+	case token.INT:
+		// It's an integer, scan only if it is a hex litteral whose bitsize in
+		// ranging from 8 to 32.
+		if !(strings.HasPrefix(lit.Value, "0x") && len(lit.Value[2:]) >= 2 && len(lit.Value[2:]) <= 8) {
+			return 0, MappedRange{}, ErrNoRuneFound
+		}
+		v, err := strconv.ParseUint(lit.Value[2:], 16, 32)
+		if err != nil {
+			return 0, MappedRange{}, err
+		}
+		r = rune(v)
+		if r == utf8.RuneError {
+			return 0, MappedRange{}, fmt.Errorf("rune error")
+		}
+		start, end = lit.Pos(), lit.End()
+	case token.STRING:
+		// It's a string, scan only if it contains a unicode escape sequence under or before the
+		// current cursor position.
+		var found bool
+		litOffset, err := Offset(pgf.Tok, lit.Pos())
+		if err != nil {
+			return 0, MappedRange{}, err
+		}
+		offset, err := Offset(pgf.Tok, pos)
+		if err != nil {
+			return 0, MappedRange{}, err
+		}
+		for i := offset - litOffset; i > 0; i-- {
+			// Start at the cursor position and search backward for the beginning of a rune escape sequence.
+			rr, _ := utf8.DecodeRuneInString(lit.Value[i:])
+			if rr == utf8.RuneError {
+				return 0, MappedRange{}, fmt.Errorf("rune error")
+			}
+			if rr == '\\' {
+				// Got the beginning, decode it.
+				var tail string
+				r, _, tail, err = strconv.UnquoteChar(lit.Value[i:], '"')
+				if err != nil {
+					// If the conversion fails, it's because of an invalid syntax, therefore is no rune to be found.
+					return 0, MappedRange{}, ErrNoRuneFound
+				}
+				// Only the rune escape sequence part of the string has to be highlighted, recompute the range.
+				runeLen := len(lit.Value) - (int(i) + len(tail))
+				start = token.Pos(int(lit.Pos()) + int(i))
+				end = token.Pos(int(start) + runeLen)
+				found = true
+				break
+			}
+		}
+		if !found {
+			// No escape sequence found
+			return 0, MappedRange{}, ErrNoRuneFound
+		}
+	default:
+		return 0, MappedRange{}, ErrNoRuneFound
+	}
+
+	mappedRange, err := posToMappedRange(snapshot, pkg, start, end)
+	if err != nil {
+		return 0, MappedRange{}, err
+	}
+	return r, mappedRange, nil
+}
+
+func HoverIdentifier(ctx context.Context, i *IdentifierInfo) (*HoverJSON, error) {
+	ctx, done := event.Start(ctx, "source.Hover")
+	defer done()
+
+	hoverCtx, err := FindHoverContext(ctx, i.Snapshot, i.pkg, i.Declaration.obj, i.Declaration.node, i.Declaration.fullDecl)
+	if err != nil {
+		return nil, err
+	}
+
+	h := &HoverJSON{
+		FullDocumentation: hoverCtx.Comment.Text(),
+		Synopsis:          doc.Synopsis(hoverCtx.Comment.Text()),
+	}
+
+	fset := i.Snapshot.FileSet()
 	// Determine the symbol's signature.
-	switch x := h.source.(type) {
+	switch x := hoverCtx.signatureSource.(type) {
+	case string:
+		h.Signature = x // a pre-computed signature
+
+	case *ast.TypeSpec:
+		x2 := *x
+		// Don't duplicate comments when formatting type specs.
+		x2.Doc = nil
+		x2.Comment = nil
+		var b strings.Builder
+		b.WriteString("type ")
+		if err := format.Node(&b, fset, &x2); err != nil {
+			return nil, err
+		}
+		h.Signature = b.String()
+
 	case ast.Node:
 		var b strings.Builder
 		if err := format.Node(&b, fset, x); err != nil {
 			return nil, err
 		}
 		h.Signature = b.String()
-		if h.typeName != "" {
-			prefix := "type " + h.typeName + " "
-			if h.isTypeAlias {
-				prefix += "= "
-			}
-			h.Signature = prefix + h.Signature
-		}
 
 		// Check if the variable is an integer whose value we can present in a more
 		// user-friendly way, i.e. `var hex = 0xe34e` becomes `var hex = 58190`
@@ -145,22 +312,7 @@
 	if obj == nil {
 		return h, nil
 	}
-	switch obj := obj.(type) {
-	case *types.PkgName:
-		h.importPath = obj.Imported().Path()
-		h.LinkPath = h.importPath
-		h.symbolName = obj.Name()
-		if mod, version, ok := moduleAtVersion(h.LinkPath, i); ok {
-			h.LinkPath = strings.Replace(h.LinkPath, mod, mod+"@"+version, 1)
-		}
-		return h, nil
-	case *types.Builtin:
-		h.importPath = "builtin"
-		h.LinkPath = h.importPath
-		h.LinkAnchor = obj.Name()
-		h.symbolName = h.LinkAnchor
-		return h, nil
-	}
+
 	// Check if the identifier is test-only (and is therefore not part of a
 	// package's API). This is true if the request originated in a test package,
 	// and if the declaration is also found in the same test package.
@@ -169,66 +321,105 @@
 			return h, nil
 		}
 	}
-	// Don't return links for other unexported types.
-	if !obj.Exported() {
-		return h, nil
+
+	h.SymbolName, h.LinkPath, h.LinkAnchor = linkData(obj, i.enclosing)
+
+	// See golang/go#36998: don't link to modules matching GOPRIVATE.
+	//
+	// The path returned by linkData is an import path.
+	if i.Snapshot.View().IsGoPrivatePath(h.LinkPath) {
+		h.LinkPath = ""
+	} else if mod, version, ok := moduleAtVersion(h.LinkPath, i); ok {
+		h.LinkPath = strings.Replace(h.LinkPath, mod, mod+"@"+version, 1)
 	}
-	var rTypeName string
+
+	return h, nil
+}
+
+// linkData returns the name, import path, and anchor to use in building links
+// to obj.
+//
+// If obj is not visible in documentation, the returned name will be empty.
+func linkData(obj types.Object, enclosing *types.TypeName) (name, importPath, anchor string) {
+	// Package names simply link to the package.
+	if obj, ok := obj.(*types.PkgName); ok {
+		return obj.Name(), obj.Imported().Path(), ""
+	}
+
+	// Builtins link to the special builtin package.
+	if obj.Parent() == types.Universe {
+		return obj.Name(), "builtin", obj.Name()
+	}
+
+	// In all other cases, the object must be exported.
+	if !obj.Exported() {
+		return "", "", ""
+	}
+
+	var recv types.Object // If non-nil, the field or method receiver base.
+
 	switch obj := obj.(type) {
 	case *types.Var:
 		// If the object is a field, and we have an associated selector
 		// composite literal, or struct, we can determine the link.
-		if obj.IsField() {
-			if named, ok := i.enclosing.(*types.Named); ok {
-				rTypeName = named.Obj().Name()
-			}
+		if obj.IsField() && enclosing != nil {
+			recv = enclosing
 		}
 	case *types.Func:
 		typ, ok := obj.Type().(*types.Signature)
 		if !ok {
-			return h, nil
+			// Note: this should never happen. go/types guarantees that the type of
+			// *Funcs are Signatures.
+			//
+			// TODO(rfindley): given a 'debug' mode, we should panic here.
+			return "", "", ""
 		}
 		if r := typ.Recv(); r != nil {
-			switch rtyp := Deref(r.Type()).(type) {
-			case *types.Struct:
-				rTypeName = r.Name()
-			case *types.Named:
+			if rtyp, _ := Deref(r.Type()).(*types.Named); rtyp != nil {
 				// If we have an unexported type, see if the enclosing type is
 				// exported (we may have an interface or struct we can link
 				// to). If not, don't show any link.
 				if !rtyp.Obj().Exported() {
-					if named, ok := i.enclosing.(*types.Named); ok && named.Obj().Exported() {
-						rTypeName = named.Obj().Name()
+					if enclosing != nil {
+						recv = enclosing
 					} else {
-						return h, nil
+						return "", "", ""
 					}
 				} else {
-					rTypeName = rtyp.Obj().Name()
+					recv = rtyp.Obj()
 				}
 			}
 		}
 	}
-	if obj.Pkg() == nil {
-		event.Log(ctx, fmt.Sprintf("nil package for %s", obj))
-		return h, nil
+
+	if recv != nil && !recv.Exported() {
+		return "", "", ""
 	}
-	h.importPath = obj.Pkg().Path()
-	h.LinkPath = h.importPath
-	if mod, version, ok := moduleAtVersion(h.LinkPath, i); ok {
-		h.LinkPath = strings.Replace(h.LinkPath, mod, mod+"@"+version, 1)
+
+	// Either the object or its receiver must be in the package scope.
+	scopeObj := obj
+	if recv != nil {
+		scopeObj = recv
 	}
-	if rTypeName != "" {
-		h.LinkAnchor = fmt.Sprintf("%s.%s", rTypeName, obj.Name())
-		h.symbolName = fmt.Sprintf("(%s.%s).%s", obj.Pkg().Name(), rTypeName, obj.Name())
-		return h, nil
+	if scopeObj.Pkg() == nil || scopeObj.Pkg().Scope().Lookup(scopeObj.Name()) != scopeObj {
+		return "", "", ""
 	}
-	// For most cases, the link is "package/path#symbol".
-	h.LinkAnchor = obj.Name()
-	h.symbolName = fmt.Sprintf("%s.%s", obj.Pkg().Name(), obj.Name())
-	return h, nil
+
+	importPath = obj.Pkg().Path()
+	if recv != nil {
+		anchor = fmt.Sprintf("%s.%s", recv.Name(), obj.Name())
+		name = fmt.Sprintf("(%s.%s).%s", obj.Pkg().Name(), recv.Name(), obj.Name())
+	} else {
+		// For most cases, the link is "package/path#symbol".
+		anchor = obj.Name()
+		name = fmt.Sprintf("%s.%s", obj.Pkg().Name(), obj.Name())
+	}
+	return name, importPath, anchor
 }
 
 func moduleAtVersion(path string, i *IdentifierInfo) (string, string, bool) {
+	// TODO(rfindley): moduleAtVersion should not be responsible for deciding
+	// whether or not the link target supports module version links.
 	if strings.ToLower(i.Snapshot.View().Options().LinkTarget) != "pkg.go.dev" {
 		return "", "", false
 	}
@@ -251,7 +442,7 @@
 func objectString(obj types.Object, qf types.Qualifier, inferred *types.Signature) string {
 	// If the signature type was inferred, prefer the preferred signature with a
 	// comment showing the generic signature.
-	if sig, _ := obj.Type().(*types.Signature); sig != nil && len(typeparams.ForSignature(sig)) > 0 && inferred != nil {
+	if sig, _ := obj.Type().(*types.Signature); sig != nil && typeparams.ForSignature(sig).Len() > 0 && inferred != nil {
 		obj2 := types.NewFunc(obj.Pos(), obj.Pkg(), obj.Name(), inferred)
 		str := types.ObjectString(obj2, qf)
 		// Try to avoid overly long lines.
@@ -283,11 +474,19 @@
 	return str
 }
 
-// HoverInfo returns a HoverInformation struct for an ast node and its type
-// object. node should be the actual node used in type checking, while fullNode
-// could be a separate node with more complete syntactic information.
-func HoverInfo(ctx context.Context, s Snapshot, pkg Package, obj types.Object, pkgNode ast.Node, fullDecl ast.Decl) (*HoverInformation, error) {
-	var info *HoverInformation
+// FindHoverContext returns a HoverContext struct for an AST node and its
+// declaration object. node should be the actual node used in type checking,
+// while fullNode could be a separate node with more complete syntactic
+// information.
+func FindHoverContext(ctx context.Context, s Snapshot, pkg Package, obj types.Object, pkgNode ast.Node, fullDecl ast.Decl) (*HoverContext, error) {
+	var info *HoverContext
+
+	// Type parameters get their signature from their declaration object.
+	if _, isTypeName := obj.(*types.TypeName); isTypeName {
+		if _, isTypeParam := obj.Type().(*typeparams.TypeParam); isTypeParam {
+			return &HoverContext{signatureSource: obj}, nil
+		}
+	}
 
 	// This is problematic for a number of reasons. We really need to have a more
 	// general mechanism to validate the coherency of AST with type information,
@@ -306,7 +505,7 @@
 		// The package declaration.
 		for _, f := range pkg.GetSyntax() {
 			if f.Name == pkgNode {
-				info = &HoverInformation{comment: f.Doc}
+				info = &HoverContext{Comment: f.Doc}
 			}
 		}
 	case *ast.ImportSpec:
@@ -320,12 +519,12 @@
 			// so pick the first file that has a doc comment.
 			for _, file := range imp.GetSyntax() {
 				if file.Doc != nil {
-					info = &HoverInformation{source: obj, comment: file.Doc}
+					info = &HoverContext{signatureSource: obj, Comment: file.Doc}
 					break
 				}
 			}
 		}
-		info = &HoverInformation{source: node}
+		info = &HoverContext{signatureSource: node}
 	case *ast.GenDecl:
 		switch obj := obj.(type) {
 		case *types.TypeName, *types.Var, *types.Const, *types.Func:
@@ -337,35 +536,64 @@
 			// obj may not have been produced by type checking the AST containing
 			// node, so we need to be careful about using token.Pos.
 			tok := s.FileSet().File(obj.Pos())
-			offset := tok.Offset(obj.Pos())
-			tok2 := s.FileSet().File(node.Pos())
+			offset, err := Offset(tok, obj.Pos())
+			if err != nil {
+				return nil, err
+			}
+
+			// fullTok and fullPos are the *token.File and object position in for the
+			// full AST.
+			fullTok := s.FileSet().File(node.Pos())
+			fullPos, err := Pos(fullTok, offset)
+			if err != nil {
+				return nil, err
+			}
+
 			var spec ast.Spec
 			for _, s := range node.Specs {
-				if tok2.Offset(s.Pos()) <= offset && offset <= tok2.Offset(s.End()) {
+				// Avoid panics by guarding the calls to token.Offset (golang/go#48249).
+				start, err := Offset(fullTok, s.Pos())
+				if err != nil {
+					return nil, err
+				}
+				end, err := Offset(fullTok, s.End())
+				if err != nil {
+					return nil, err
+				}
+				if start <= offset && offset <= end {
 					spec = s
 					break
 				}
 			}
-			var err error
-			info, err = formatGenDecl(node, spec, obj, obj.Type())
+
+			info, err = hoverGenDecl(node, spec, fullPos, obj)
 			if err != nil {
 				return nil, err
 			}
 		}
 	case *ast.TypeSpec:
 		if obj.Parent() == types.Universe {
-			if obj.Name() == "error" {
-				info = &HoverInformation{source: node}
-			} else {
-				info = &HoverInformation{source: node.Name} // comments not needed for builtins
+			if genDecl, ok := fullDecl.(*ast.GenDecl); ok {
+				info = hoverTypeSpec(node, genDecl)
 			}
 		}
 	case *ast.FuncDecl:
 		switch obj.(type) {
 		case *types.Func:
-			info = &HoverInformation{source: obj, comment: node.Doc}
+			info = &HoverContext{signatureSource: obj, Comment: node.Doc}
 		case *types.Builtin:
-			info = &HoverInformation{source: node.Type, comment: node.Doc}
+			info = &HoverContext{Comment: node.Doc}
+			if sig, err := NewBuiltinSignature(ctx, s, obj.Name()); err == nil {
+				info.signatureSource = "func " + sig.name + sig.Format()
+			} else {
+				// Fall back on the object as a signature source.
+
+				// TODO(rfindley): refactor so that we can report bugs from the source
+				// package.
+
+				// debug.Bug(ctx, "invalid builtin hover", "did not find builtin signature: %v", err)
+				info.signatureSource = obj
+			}
 		case *types.Var:
 			// Object is a function param or the field of an anonymous struct
 			// declared with ':='. Skip the first one because only fields
@@ -384,18 +612,13 @@
 				if comment.Text() == "" {
 					comment = field.Comment
 				}
-				info = &HoverInformation{source: obj, comment: comment}
+				info = &HoverContext{signatureSource: obj, Comment: comment}
 			}
 		}
 	}
 
 	if info == nil {
-		info = &HoverInformation{source: obj}
-	}
-
-	if info.comment != nil {
-		info.FullDocumentation = info.comment.Text()
-		info.Synopsis = doc.Synopsis(info.FullDocumentation)
+		info = &HoverContext{signatureSource: obj}
 	}
 
 	return info, nil
@@ -419,35 +642,34 @@
 	return false
 }
 
-func formatGenDecl(node *ast.GenDecl, spec ast.Spec, obj types.Object, typ types.Type) (*HoverInformation, error) {
-	if _, ok := typ.(*types.Named); ok {
-		switch typ.Underlying().(type) {
-		case *types.Interface, *types.Struct:
-			return formatGenDecl(node, spec, obj, typ.Underlying())
-		}
-	}
+// hoverGenDecl returns hover information an object declared via spec inside
+// of the GenDecl node. obj is the type-checked object corresponding to the
+// declaration, but may have been type-checked using a different AST than the
+// given nodes; fullPos is the position of obj in node's AST.
+func hoverGenDecl(node *ast.GenDecl, spec ast.Spec, fullPos token.Pos, obj types.Object) (*HoverContext, error) {
 	if spec == nil {
-		return nil, errors.Errorf("no spec for node %v at position %v", node, obj.Pos())
+		return nil, errors.Errorf("no spec for node %v at position %v", node, fullPos)
 	}
 
 	// If we have a field or method.
 	switch obj.(type) {
 	case *types.Var, *types.Const, *types.Func:
-		return formatVar(spec, obj, node), nil
+		return hoverVar(spec, fullPos, obj, node), nil
 	}
 	// Handle types.
 	switch spec := spec.(type) {
 	case *ast.TypeSpec:
-		return formatTypeSpec(spec, node), nil
+		return hoverTypeSpec(spec, node), nil
 	case *ast.ValueSpec:
-		return &HoverInformation{source: spec, comment: spec.Doc}, nil
+		return &HoverContext{signatureSource: spec, Comment: spec.Doc}, nil
 	case *ast.ImportSpec:
-		return &HoverInformation{source: spec, comment: spec.Doc}, nil
+		return &HoverContext{signatureSource: spec, Comment: spec.Doc}, nil
 	}
 	return nil, errors.Errorf("unable to format spec %v (%T)", spec, spec)
 }
 
-func formatTypeSpec(spec *ast.TypeSpec, decl *ast.GenDecl) *HoverInformation {
+// TODO(rfindley): rename this function.
+func hoverTypeSpec(spec *ast.TypeSpec, decl *ast.GenDecl) *HoverContext {
 	comment := spec.Doc
 	if comment == nil && decl != nil {
 		comment = decl.Doc
@@ -455,15 +677,13 @@
 	if comment == nil {
 		comment = spec.Comment
 	}
-	return &HoverInformation{
-		source:      spec.Type,
-		comment:     comment,
-		typeName:    spec.Name.Name,
-		isTypeAlias: spec.Assign.IsValid(),
+	return &HoverContext{
+		signatureSource: spec,
+		Comment:         comment,
 	}
 }
 
-func formatVar(node ast.Spec, obj types.Object, decl *ast.GenDecl) *HoverInformation {
+func hoverVar(node ast.Spec, fullPos token.Pos, obj types.Object, decl *ast.GenDecl) *HoverContext {
 	var fieldList *ast.FieldList
 	switch spec := node.(type) {
 	case *ast.TypeSpec:
@@ -491,18 +711,18 @@
 		// associated values so that we can augment their hover with more information.
 		if _, ok := obj.(*types.Var); ok && spec.Type == nil && len(spec.Values) > 0 {
 			if _, ok := spec.Values[0].(*ast.BasicLit); ok {
-				return &HoverInformation{source: spec, comment: comment}
+				return &HoverContext{signatureSource: spec, Comment: comment}
 			}
 		}
 
-		return &HoverInformation{source: obj, comment: comment}
+		return &HoverContext{signatureSource: obj, Comment: comment}
 	}
 
 	if fieldList != nil {
-		comment := findFieldComment(obj.Pos(), fieldList)
-		return &HoverInformation{source: obj, comment: comment}
+		comment := findFieldComment(fullPos, fieldList)
+		return &HoverContext{signatureSource: obj, Comment: comment}
 	}
-	return &HoverInformation{source: obj, comment: decl.Doc}
+	return &HoverContext{signatureSource: obj, Comment: decl.Doc}
 }
 
 // extractFieldList recursively tries to extract a field list.
@@ -548,11 +768,8 @@
 	return nil
 }
 
-func FormatHover(h *HoverInformation, options *Options) (string, error) {
-	signature := h.Signature
-	if signature != "" && options.PreferredContentFormat == protocol.Markdown {
-		signature = fmt.Sprintf("```go\n%s\n```", signature)
-	}
+func FormatHover(h *HoverJSON, options *Options) (string, error) {
+	signature := formatSignature(h, options)
 
 	switch options.HoverKind {
 	case SingleLine:
@@ -566,26 +783,50 @@
 		}
 		return string(b), nil
 	}
+
 	link := formatLink(h, options)
-	switch options.HoverKind {
-	case SynopsisDocumentation:
-		doc := formatDoc(h.Synopsis, options)
-		return formatHover(options, signature, link, doc), nil
-	case FullDocumentation:
-		doc := formatDoc(h.FullDocumentation, options)
-		return formatHover(options, signature, link, doc), nil
+	doc := formatDoc(h, options)
+
+	var b strings.Builder
+	parts := []string{signature, doc, link}
+	for i, el := range parts {
+		if el != "" {
+			b.WriteString(el)
+
+			// Don't write out final newline.
+			if i == len(parts) {
+				continue
+			}
+			// If any elements of the remainder of the list are non-empty,
+			// write a newline.
+			if anyNonEmpty(parts[i+1:]) {
+				if options.PreferredContentFormat == protocol.Markdown {
+					b.WriteString("\n\n")
+				} else {
+					b.WriteRune('\n')
+				}
+			}
+		}
 	}
-	return "", errors.Errorf("no hover for %v", h.source)
+	return b.String(), nil
 }
 
-func formatLink(h *HoverInformation, options *Options) string {
+func formatSignature(h *HoverJSON, options *Options) string {
+	signature := h.Signature
+	if signature != "" && options.PreferredContentFormat == protocol.Markdown {
+		signature = fmt.Sprintf("```go\n%s\n```", signature)
+	}
+	return signature
+}
+
+func formatLink(h *HoverJSON, options *Options) string {
 	if !options.LinksInHover || options.LinkTarget == "" || h.LinkPath == "" {
 		return ""
 	}
 	plainLink := BuildLink(options.LinkTarget, h.LinkPath, h.LinkAnchor)
 	switch options.PreferredContentFormat {
 	case protocol.Markdown:
-		return fmt.Sprintf("[`%s` on %s](%s)", h.symbolName, options.LinkTarget, plainLink)
+		return fmt.Sprintf("[`%s` on %s](%s)", h.SymbolName, options.LinkTarget, plainLink)
 	case protocol.PlainText:
 		return ""
 	default:
@@ -605,37 +846,20 @@
 	return link + "#" + anchor
 }
 
-func formatDoc(doc string, options *Options) string {
+func formatDoc(h *HoverJSON, options *Options) string {
+	var doc string
+	switch options.HoverKind {
+	case SynopsisDocumentation:
+		doc = h.Synopsis
+	case FullDocumentation:
+		doc = h.FullDocumentation
+	}
 	if options.PreferredContentFormat == protocol.Markdown {
 		return CommentToMarkdown(doc)
 	}
 	return doc
 }
 
-func formatHover(options *Options, x ...string) string {
-	var b strings.Builder
-	for i, el := range x {
-		if el != "" {
-			b.WriteString(el)
-
-			// Don't write out final newline.
-			if i == len(x) {
-				continue
-			}
-			// If any elements of the remainder of the list are non-empty,
-			// write a newline.
-			if anyNonEmpty(x[i+1:]) {
-				if options.PreferredContentFormat == protocol.Markdown {
-					b.WriteString("\n\n")
-				} else {
-					b.WriteRune('\n')
-				}
-			}
-		}
-	}
-	return b.String()
-}
-
 func anyNonEmpty(x []string) bool {
 	for _, el := range x {
 		if el != "" {
diff --git a/internal/lsp/source/identifier.go b/internal/lsp/source/identifier.go
index 2ab6cfd..bf4941f 100644
--- a/internal/lsp/source/identifier.go
+++ b/internal/lsp/source/identifier.go
@@ -39,9 +39,10 @@
 
 	ident *ast.Ident
 
-	// enclosing is an expression used to determine the link anchor for an
-	// identifier. If it's a named type, it should be exported.
-	enclosing types.Type
+	// For struct fields or embedded interfaces, enclosing is the object
+	// corresponding to the outer type declaration, if it is exported, for use in
+	// documentation links.
+	enclosing *types.TypeName
 
 	pkg Package
 	qf  types.Qualifier
@@ -78,7 +79,7 @@
 	ctx, done := event.Start(ctx, "source.Identifier")
 	defer done()
 
-	pkgs, err := snapshot.PackagesForFile(ctx, fh.URI(), TypecheckAll)
+	pkgs, err := snapshot.PackagesForFile(ctx, fh.URI(), TypecheckAll, false)
 	if err != nil {
 		return nil, err
 	}
@@ -217,6 +218,10 @@
 			return nil, errors.Errorf("no declaration for %s", result.Name)
 		}
 		result.Declaration.node = decl
+		if typeSpec, ok := decl.(*ast.TypeSpec); ok {
+			// Find the GenDecl (which has the doc comments) for the TypeSpec.
+			result.Declaration.fullDecl = findGenDecl(builtin.File, typeSpec)
+		}
 
 		// The builtin package isn't in the dependency graph, so the usual
 		// utilities won't work here.
@@ -299,7 +304,7 @@
 		return result, nil
 	}
 
-	result.Inferred = inferredSignature(pkg.GetTypesInfo(), path)
+	result.Inferred = inferredSignature(pkg.GetTypesInfo(), ident)
 
 	result.Type.Object = typeToObject(typ)
 	if result.Type.Object != nil {
@@ -314,6 +319,18 @@
 	return result, nil
 }
 
+// findGenDecl determines the parent ast.GenDecl for a given ast.Spec.
+func findGenDecl(f *ast.File, spec ast.Spec) *ast.GenDecl {
+	for _, decl := range f.Decls {
+		if genDecl, ok := decl.(*ast.GenDecl); ok {
+			if genDecl.Pos() <= spec.Pos() && genDecl.End() >= spec.End() {
+				return genDecl
+			}
+		}
+	}
+	return nil
+}
+
 // fullNode tries to extract the full spec corresponding to obj's declaration.
 // If the package was not parsed in full, the declaration file will be
 // re-parsed to ensure it has complete syntax.
@@ -331,7 +348,10 @@
 		fset := snapshot.FileSet()
 		file2, _ := parser.ParseFile(fset, tok.Name(), pgf.Src, parser.AllErrors|parser.ParseComments)
 		if file2 != nil {
-			offset := tok.Offset(obj.Pos())
+			offset, err := Offset(tok, obj.Pos())
+			if err != nil {
+				return nil, err
+			}
 			file = file2
 			tok2 := fset.File(file2.Pos())
 			pos = tok2.Pos(offset)
@@ -347,55 +367,16 @@
 }
 
 // inferredSignature determines the resolved non-generic signature for an
-// identifier with a generic signature that is the operand of an IndexExpr or
-// CallExpr.
+// identifier in an instantiation expression.
 //
 // If no such signature exists, it returns nil.
-func inferredSignature(info *types.Info, path []ast.Node) *types.Signature {
-	if len(path) < 2 {
-		return nil
-	}
-	// There are four ways in which a signature may be resolved:
-	//  1. It has no explicit type arguments, but the CallExpr can be fully
-	//     inferred from function arguments.
-	//  2. It has full type arguments, and the IndexExpr has a non-generic type.
-	//  3. For a partially instantiated IndexExpr representing a function-valued
-	//     expression (i.e. not part of a CallExpr), type arguments may be
-	//     inferred using constraint type inference.
-	//  4. For a partially instantiated IndexExpr that is part of a CallExpr,
-	//     type arguments may be inferred using both constraint type inference
-	//     and function argument inference.
-	//
-	// These branches are handled below.
-	switch n := path[1].(type) {
-	case *ast.CallExpr:
-		_, sig := typeparams.GetInferred(info, n)
-		return sig
-	default:
-		if ix := typeparams.GetIndexExprData(n); ix != nil {
-			e := n.(ast.Expr)
-			// If the IndexExpr is fully instantiated, we consider that 'inference' for
-			// gopls' purposes.
-			sig, _ := info.TypeOf(e).(*types.Signature)
-			if sig != nil && len(typeparams.ForSignature(sig)) == 0 {
-				return sig
-			}
-			_, sig = typeparams.GetInferred(info, e)
-			if sig != nil {
-				return sig
-			}
-			if len(path) >= 2 {
-				if call, _ := path[2].(*ast.CallExpr); call != nil {
-					_, sig := typeparams.GetInferred(info, call)
-					return sig
-				}
-			}
-		}
-	}
-	return nil
+func inferredSignature(info *types.Info, id *ast.Ident) *types.Signature {
+	inst := typeparams.GetInstances(info)[id]
+	sig, _ := inst.Type.(*types.Signature)
+	return sig
 }
 
-func searchForEnclosing(info *types.Info, path []ast.Node) types.Type {
+func searchForEnclosing(info *types.Info, path []ast.Node) *types.TypeName {
 	for _, n := range path {
 		switch n := n.(type) {
 		case *ast.SelectorExpr:
@@ -403,9 +384,9 @@
 				recv := Deref(sel.Recv())
 
 				// Keep track of the last exported type seen.
-				var exported types.Type
+				var exported *types.TypeName
 				if named, ok := recv.(*types.Named); ok && named.Obj().Exported() {
-					exported = named
+					exported = named.Obj()
 				}
 				// We don't want the last element, as that's the field or
 				// method itself.
@@ -413,7 +394,7 @@
 					if r, ok := recv.Underlying().(*types.Struct); ok {
 						recv = Deref(r.Field(index).Type())
 						if named, ok := recv.(*types.Named); ok && named.Obj().Exported() {
-							exported = named
+							exported = named.Obj()
 						}
 					}
 				}
@@ -421,12 +402,16 @@
 			}
 		case *ast.CompositeLit:
 			if t, ok := info.Types[n]; ok {
-				return t.Type
+				if named, _ := t.Type.(*types.Named); named != nil {
+					return named.Obj()
+				}
 			}
 		case *ast.TypeSpec:
 			if _, ok := n.Type.(*ast.StructType); ok {
 				if t, ok := info.Defs[n.Name]; ok {
-					return t.Type()
+					if tname, _ := t.(*types.TypeName); tname != nil {
+						return tname
+					}
 				}
 			}
 		}
diff --git a/internal/lsp/source/identifier_test.go b/internal/lsp/source/identifier_test.go
index 5e191e4..9bbdf58 100644
--- a/internal/lsp/source/identifier_test.go
+++ b/internal/lsp/source/identifier_test.go
@@ -83,14 +83,14 @@
 			if _, err = (*types.Config)(nil).Check("p", fset, []*ast.File{file}, info); err != nil {
 				t.Fatal(err)
 			}
-			typ := searchForEnclosing(info, path)
-			if typ == nil {
+			obj := searchForEnclosing(info, path)
+			if obj == nil {
 				if test.wantTypeName != "" {
 					t.Errorf("searchForEnclosing(...) = <nil>, want %q", test.wantTypeName)
 				}
 				return
 			}
-			if got := typ.(*types.Named).Obj().Name(); got != test.wantTypeName {
+			if got := obj.Name(); got != test.wantTypeName {
 				t.Errorf("searchForEnclosing(...) = %q, want %q", got, test.wantTypeName)
 			}
 		})
diff --git a/internal/lsp/source/implementation.go b/internal/lsp/source/implementation.go
index 3e35fa7..b53d7c9 100644
--- a/internal/lsp/source/implementation.go
+++ b/internal/lsp/source/implementation.go
@@ -215,7 +215,7 @@
 // every package that the file belongs to, in every typechecking mode
 // applicable.
 func qualifiedObjsAtProtocolPos(ctx context.Context, s Snapshot, uri span.URI, pp protocol.Position) ([]qualifiedObject, error) {
-	pkgs, err := s.PackagesForFile(ctx, uri, TypecheckAll)
+	pkgs, err := s.PackagesForFile(ctx, uri, TypecheckAll, false)
 	if err != nil {
 		return nil, err
 	}
@@ -223,7 +223,6 @@
 		return nil, errNoObjectFound
 	}
 	pkg := pkgs[0]
-	var offset int
 	pgf, err := pkg.File(uri)
 	if err != nil {
 		return nil, err
@@ -236,7 +235,10 @@
 	if err != nil {
 		return nil, err
 	}
-	offset = pgf.Tok.Offset(rng.Start)
+	offset, err := Offset(pgf.Tok, rng.Start)
+	if err != nil {
+		return nil, err
+	}
 	return qualifiedObjsAtLocation(ctx, s, objSearchKey{uri, offset}, map[objSearchKey]bool{})
 }
 
@@ -262,7 +264,7 @@
 	// try to be comprehensive in case we ever support variations on build
 	// constraints.
 
-	pkgs, err := s.PackagesForFile(ctx, key.uri, TypecheckAll)
+	pkgs, err := s.PackagesForFile(ctx, key.uri, TypecheckAll, false)
 	if err != nil {
 		return nil, err
 	}
@@ -350,7 +352,11 @@
 			offset := -1
 			for _, pgf := range pkg.CompiledGoFiles() {
 				if pgf.Tok.Base() <= int(pos) && int(pos) <= pgf.Tok.Base()+pgf.Tok.Size() {
-					offset = pgf.Tok.Offset(pos)
+					var err error
+					offset, err = Offset(pgf.Tok, pos)
+					if err != nil {
+						return nil, err
+					}
 					uri = pgf.URI
 				}
 			}
diff --git a/internal/lsp/source/offset_test.go b/internal/lsp/source/offset_test.go
new file mode 100644
index 0000000..1007677
--- /dev/null
+++ b/internal/lsp/source/offset_test.go
@@ -0,0 +1,71 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source_test
+
+import (
+	"go/token"
+	"go/types"
+	"testing"
+
+	"golang.org/x/tools/go/packages"
+)
+
+// This test reports any unexpected uses of (*go/token.File).Offset within
+// the gopls codebase to ensure that we don't check in more code that is prone
+// to panicking. All calls to (*go/token.File).Offset should be replaced with
+// calls to source.Offset.
+func TestTokenOffset(t *testing.T) {
+	fset := token.NewFileSet()
+	pkgs, err := packages.Load(&packages.Config{
+		Fset: fset,
+		Mode: packages.NeedName | packages.NeedModule | packages.NeedCompiledGoFiles | packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedImports | packages.NeedDeps,
+	}, "go/token", "golang.org/x/tools/internal/lsp/...", "golang.org/x/tools/gopls/...")
+	if err != nil {
+		t.Fatal(err)
+	}
+	var tokPkg *packages.Package
+	for _, pkg := range pkgs {
+		if pkg.PkgPath == "go/token" {
+			tokPkg = pkg
+			break
+		}
+	}
+	typname, ok := tokPkg.Types.Scope().Lookup("File").(*types.TypeName)
+	if !ok {
+		t.Fatal("expected go/token.File typename, got none")
+	}
+	named, ok := typname.Type().(*types.Named)
+	if !ok {
+		t.Fatalf("expected named type, got %T", typname.Type)
+	}
+	var offset *types.Func
+	for i := 0; i < named.NumMethods(); i++ {
+		meth := named.Method(i)
+		if meth.Name() == "Offset" {
+			offset = meth
+			break
+		}
+	}
+	for _, pkg := range pkgs {
+		for ident, obj := range pkg.TypesInfo.Uses {
+			if ident.Name != "Offset" {
+				continue
+			}
+			if pkg.PkgPath == "go/token" {
+				continue
+			}
+			if !types.Identical(offset.Type(), obj.Type()) {
+				continue
+			}
+			// The only permitted use is in golang.org/x/tools/internal/lsp/source.Offset,
+			// so check the enclosing function.
+			sourceOffset := pkg.Types.Scope().Lookup("Offset").(*types.Func)
+			if sourceOffset.Pos() <= ident.Pos() && ident.Pos() <= sourceOffset.Scope().End() {
+				continue // accepted usage
+			}
+			t.Errorf(`%s: Unexpected use of (*go/token.File).Offset. Please use golang.org/x/tools/internal/lsp/source.Offset instead.`, fset.Position(ident.Pos()))
+		}
+	}
+}
diff --git a/internal/lsp/source/options.go b/internal/lsp/source/options.go
index 5175507..8e262c6 100644
--- a/internal/lsp/source/options.go
+++ b/internal/lsp/source/options.go
@@ -7,6 +7,7 @@
 import (
 	"context"
 	"fmt"
+	"io"
 	"path/filepath"
 	"regexp"
 	"strings"
@@ -46,15 +47,19 @@
 	"golang.org/x/tools/go/analysis/passes/unsafeptr"
 	"golang.org/x/tools/go/analysis/passes/unusedresult"
 	"golang.org/x/tools/go/analysis/passes/unusedwrite"
+	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/internal/lsp/analysis/fillreturns"
 	"golang.org/x/tools/internal/lsp/analysis/fillstruct"
+	"golang.org/x/tools/internal/lsp/analysis/infertypeargs"
 	"golang.org/x/tools/internal/lsp/analysis/nonewvars"
 	"golang.org/x/tools/internal/lsp/analysis/noresultvalues"
 	"golang.org/x/tools/internal/lsp/analysis/simplifycompositelit"
 	"golang.org/x/tools/internal/lsp/analysis/simplifyrange"
 	"golang.org/x/tools/internal/lsp/analysis/simplifyslice"
+	"golang.org/x/tools/internal/lsp/analysis/stubmethods"
 	"golang.org/x/tools/internal/lsp/analysis/undeclaredname"
 	"golang.org/x/tools/internal/lsp/analysis/unusedparams"
+	"golang.org/x/tools/internal/lsp/analysis/useany"
 	"golang.org/x/tools/internal/lsp/command"
 	"golang.org/x/tools/internal/lsp/diff"
 	"golang.org/x/tools/internal/lsp/diff/myers"
@@ -69,7 +74,7 @@
 
 // DefaultOptions is the options that are used for Gopls execution independent
 // of any externally provided configuration (LSP initialization, command
-// invokation, etc.).
+// invocation, etc.).
 func DefaultOptions() *Options {
 	optionsOnce.Do(func() {
 		var commands []string
@@ -78,13 +83,14 @@
 		}
 		defaultOptions = &Options{
 			ClientOptions: ClientOptions{
-				InsertTextFormat:                  protocol.PlainTextTextFormat,
-				PreferredContentFormat:            protocol.Markdown,
-				ConfigurationSupported:            true,
-				DynamicConfigurationSupported:     true,
-				DynamicWatchedFilesSupported:      true,
-				LineFoldingOnly:                   false,
-				HierarchicalDocumentSymbolSupport: true,
+				InsertTextFormat:                           protocol.PlainTextTextFormat,
+				PreferredContentFormat:                     protocol.Markdown,
+				ConfigurationSupported:                     true,
+				DynamicConfigurationSupported:              true,
+				DynamicRegistrationSemanticTokensSupported: true,
+				DynamicWatchedFilesSupported:               true,
+				LineFoldingOnly:                            false,
+				HierarchicalDocumentSymbolSupport:          true,
 			},
 			ServerOptions: ServerOptions{
 				SupportedCodeActions: map[FileKind]map[protocol.CodeActionKind]bool{
@@ -99,6 +105,7 @@
 						protocol.SourceOrganizeImports: true,
 						protocol.QuickFix:              true,
 					},
+					Work: {},
 					Sum:  {},
 					Tmpl: {},
 				},
@@ -109,6 +116,8 @@
 					ExpandWorkspaceToModule:     true,
 					ExperimentalPackageCacheKey: true,
 					MemoryMode:                  ModeNormal,
+					DirectoryFilters:            []string{"-node_modules"},
+					TemplateExtensions:          []string{},
 				},
 				UIOptions: UIOptions{
 					DiagnosticOptions: DiagnosticOptions{
@@ -127,7 +136,7 @@
 					},
 					NavigationOptions: NavigationOptions{
 						ImportShortcut: Both,
-						SymbolMatcher:  SymbolFuzzy,
+						SymbolMatcher:  SymbolFastFuzzy,
 						SymbolStyle:    DynamicSymbols,
 					},
 					CompletionOptions: CompletionOptions{
@@ -179,18 +188,19 @@
 // ClientOptions holds LSP-specific configuration that is provided by the
 // client.
 type ClientOptions struct {
-	InsertTextFormat                  protocol.InsertTextFormat
-	ConfigurationSupported            bool
-	DynamicConfigurationSupported     bool
-	DynamicWatchedFilesSupported      bool
-	PreferredContentFormat            protocol.MarkupKind
-	LineFoldingOnly                   bool
-	HierarchicalDocumentSymbolSupport bool
-	SemanticTypes                     []string
-	SemanticMods                      []string
-	RelatedInformationSupported       bool
-	CompletionTags                    bool
-	CompletionDeprecated              bool
+	InsertTextFormat                           protocol.InsertTextFormat
+	ConfigurationSupported                     bool
+	DynamicConfigurationSupported              bool
+	DynamicRegistrationSemanticTokensSupported bool
+	DynamicWatchedFilesSupported               bool
+	PreferredContentFormat                     protocol.MarkupKind
+	LineFoldingOnly                            bool
+	HierarchicalDocumentSymbolSupport          bool
+	SemanticTypes                              []string
+	SemanticMods                               []string
+	RelatedInformationSupported                bool
+	CompletionTags                             bool
+	CompletionDeprecated                       bool
 }
 
 // ServerOptions holds LSP-specific configuration that is provided by the
@@ -225,6 +235,11 @@
 	// Include only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules`
 	DirectoryFilters []string
 
+	// TemplateExtensions gives the extensions of file names that are treateed
+	// as template files. (The extension
+	// is the part of the file name after the final dot.)
+	TemplateExtensions []string
+
 	// MemoryMode controls the tradeoff `gopls` makes between memory usage and
 	// correctness.
 	//
@@ -243,10 +258,6 @@
 	// for multi-module workspaces.
 	ExperimentalWorkspaceModule bool `status:"experimental"`
 
-	// ExperimentalTemplateSupport opts into the experimental support
-	// for template files.
-	ExperimentalTemplateSupport bool `status:"experimental"`
-
 	// ExperimentalPackageCacheKey controls whether to use a coarser cache key
 	// for package type information to increase cache hits. This setting removes
 	// the user's environment, build flags, and working directory from the cache
@@ -280,7 +291,7 @@
 
 	// Codelenses overrides the enabled/disabled state of code lenses. See the
 	// "Code Lenses" section of the
-	// [Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md)
+	// [Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#code-lenses)
 	// for the list of supported lenses.
 	//
 	// Example Usage:
@@ -288,7 +299,7 @@
 	// ```json5
 	// "gopls": {
 	// ...
-	//   "codelens": {
+	//   "codelenses": {
 	//     "generate": false,  // Don't show the `go generate` lens.
 	//     "gc_details": true  // Show a code lens toggling the display of gc's choices.
 	//   }
@@ -318,7 +329,7 @@
 	// candidates.
 	Matcher Matcher `status:"advanced"`
 
-	// ExperimentalPostfixCompletions enables artifical method snippets
+	// ExperimentalPostfixCompletions enables artificial method snippets
 	// such as "someSlice.sort!".
 	ExperimentalPostfixCompletions bool `status:"experimental"`
 }
@@ -410,7 +421,7 @@
 	// ```json5
 	// "gopls": {
 	// ...
-	//   "symbolStyle": "dynamic",
+	//   "symbolStyle": "Dynamic",
 	// ...
 	// }
 	// ```
@@ -452,15 +463,23 @@
 // Hooks contains configuration that is provided to the Gopls command by the
 // main package.
 type Hooks struct {
-	LicensesText         string
-	GoDiff               bool
-	ComputeEdits         diff.ComputeEdits
-	URLRegexp            *regexp.Regexp
-	GofumptFormat        func(ctx context.Context, src []byte) ([]byte, error)
+	LicensesText string
+	GoDiff       bool
+	ComputeEdits diff.ComputeEdits
+	URLRegexp    *regexp.Regexp
+
+	// GofumptFormat allows the gopls module to wire-in a call to
+	// gofumpt/format.Source. langVersion and modulePath are used for some
+	// Gofumpt formatting rules -- see the Gofumpt documentation for details.
+	GofumptFormat func(ctx context.Context, langVersion, modulePath string, src []byte) ([]byte, error)
+
 	DefaultAnalyzers     map[string]*Analyzer
 	TypeErrorAnalyzers   map[string]*Analyzer
 	ConvenienceAnalyzers map[string]*Analyzer
 	StaticcheckAnalyzers map[string]*Analyzer
+
+	// Govulncheck is the implementation of the Govulncheck gopls command.
+	Govulncheck func(context.Context, *packages.Config, command.VulncheckArgs) (command.VulncheckResult, error)
 }
 
 // InternalOptions contains settings that are not intended for use by the
@@ -651,6 +670,7 @@
 	// Check if the client supports configuration messages.
 	o.ConfigurationSupported = caps.Workspace.Configuration
 	o.DynamicConfigurationSupported = caps.Workspace.DidChangeConfiguration.DynamicRegistration
+	o.DynamicRegistrationSemanticTokensSupported = caps.TextDocument.SemanticTokens.DynamicRegistration
 	o.DynamicWatchedFilesSupported = caps.Workspace.DidChangeWatchedFiles.DynamicRegistration
 
 	// Check which types of content format are supported by this client.
@@ -683,10 +703,11 @@
 		ClientOptions:   o.ClientOptions,
 		InternalOptions: o.InternalOptions,
 		Hooks: Hooks{
-			GoDiff:        o.Hooks.GoDiff,
-			ComputeEdits:  o.Hooks.ComputeEdits,
+			GoDiff:        o.GoDiff,
+			ComputeEdits:  o.ComputeEdits,
 			GofumptFormat: o.GofumptFormat,
 			URLRegexp:     o.URLRegexp,
+			Govulncheck:   o.Govulncheck,
 		},
 		ServerOptions: o.ServerOptions,
 		UserOptions:   o.UserOptions,
@@ -740,9 +761,9 @@
 func (o *Options) EnableAllExperiments() {
 	o.SemanticTokens = true
 	o.ExperimentalPostfixCompletions = true
-	o.ExperimentalTemplateSupport = true
 	o.ExperimentalUseInvalidMetadata = true
 	o.ExperimentalWatchedFileDelay = 50 * time.Millisecond
+	o.SymbolMatcher = SymbolFastFuzzy
 }
 
 func (o *Options) enableAllExperimentMaps() {
@@ -799,7 +820,7 @@
 		var filters []string
 		for _, ifilter := range ifilters {
 			filter := fmt.Sprint(ifilter)
-			if filter[0] != '+' && filter[0] != '-' {
+			if filter == "" || (filter[0] != '+' && filter[0] != '-') {
 				result.errorf("invalid filter %q, must start with + or -", filter)
 				return result
 			}
@@ -928,9 +949,23 @@
 	case "experimentalWorkspaceModule":
 		result.setBool(&o.ExperimentalWorkspaceModule)
 
-	case "experimentalTemplateSupport":
-		result.setBool(&o.ExperimentalTemplateSupport)
+	case "experimentalTemplateSupport": // remove after June 2022
+		result.State = OptionDeprecated
 
+	case "templateExtensions":
+		if iexts, ok := value.([]interface{}); ok {
+			ans := []string{}
+			for _, x := range iexts {
+				ans = append(ans, fmt.Sprint(x))
+			}
+			o.TemplateExtensions = ans
+			break
+		}
+		if value == nil {
+			o.TemplateExtensions = nil
+			break
+		}
+		result.errorf(fmt.Sprintf("unexpected type %T not []string", value))
 	case "experimentalDiagnosticsDelay", "diagnosticsDelay":
 		if name == "experimentalDiagnosticsDelay" {
 			result.State = OptionDeprecated
@@ -1193,6 +1228,12 @@
 			Enabled:    true,
 			ActionKind: []protocol.CodeActionKind{protocol.RefactorRewrite},
 		},
+		stubmethods.Analyzer.Name: {
+			Analyzer:   stubmethods.Analyzer,
+			ActionKind: []protocol.CodeActionKind{protocol.RefactorRewrite},
+			Fix:        StubMethods,
+			Enabled:    true,
+		},
 	}
 }
 
@@ -1234,6 +1275,8 @@
 		testinggoroutine.Analyzer.Name: {Analyzer: testinggoroutine.Analyzer, Enabled: true},
 		unusedparams.Analyzer.Name:     {Analyzer: unusedparams.Analyzer, Enabled: false},
 		unusedwrite.Analyzer.Name:      {Analyzer: unusedwrite.Analyzer, Enabled: false},
+		useany.Analyzer.Name:           {Analyzer: useany.Analyzer, Enabled: false},
+		infertypeargs.Analyzer.Name:    {Analyzer: infertypeargs.Analyzer, Enabled: true},
 
 		// gofmt -s suite:
 		simplifycompositelit.Analyzer.Name: {
@@ -1279,6 +1322,66 @@
 	Hierarchy  string
 }
 
+func (o *OptionJSON) String() string {
+	return o.Name
+}
+
+func (o *OptionJSON) Write(w io.Writer) {
+	fmt.Fprintf(w, "**%v** *%v*\n\n", o.Name, o.Type)
+	writeStatus(w, o.Status)
+	enumValues := collectEnums(o)
+	fmt.Fprintf(w, "%v%v\nDefault: `%v`.\n\n", o.Doc, enumValues, o.Default)
+}
+
+func writeStatus(section io.Writer, status string) {
+	switch status {
+	case "":
+	case "advanced":
+		fmt.Fprint(section, "**This is an advanced setting and should not be configured by most `gopls` users.**\n\n")
+	case "debug":
+		fmt.Fprint(section, "**This setting is for debugging purposes only.**\n\n")
+	case "experimental":
+		fmt.Fprint(section, "**This setting is experimental and may be deleted.**\n\n")
+	default:
+		fmt.Fprintf(section, "**Status: %s.**\n\n", status)
+	}
+}
+
+var parBreakRE = regexp.MustCompile("\n{2,}")
+
+func collectEnums(opt *OptionJSON) string {
+	var b strings.Builder
+	write := func(name, doc string, index, len int) {
+		if doc != "" {
+			unbroken := parBreakRE.ReplaceAllString(doc, "\\\n")
+			fmt.Fprintf(&b, "* %s\n", strings.TrimSpace(unbroken))
+		} else {
+			fmt.Fprintf(&b, "* `%s`\n", name)
+		}
+	}
+	if len(opt.EnumValues) > 0 && opt.Type == "enum" {
+		b.WriteString("\nMust be one of:\n\n")
+		for i, val := range opt.EnumValues {
+			write(val.Value, val.Doc, i, len(opt.EnumValues))
+		}
+	} else if len(opt.EnumKeys.Keys) > 0 && shouldShowEnumKeysInSettings(opt.Name) {
+		b.WriteString("\nCan contain any of:\n\n")
+		for i, val := range opt.EnumKeys.Keys {
+			write(val.Name, val.Doc, i, len(opt.EnumKeys.Keys))
+		}
+	}
+	return b.String()
+}
+
+func shouldShowEnumKeysInSettings(name string) bool {
+	// Both of these fields have too many possible options to print.
+	return !hardcodedEnumKeys(name)
+}
+
+func hardcodedEnumKeys(name string) bool {
+	return name == "analyses" || name == "codelenses"
+}
+
 type EnumKeys struct {
 	ValueType string
 	Keys      []EnumKey
@@ -1303,14 +1406,44 @@
 	ResultDoc string
 }
 
+func (c *CommandJSON) String() string {
+	return c.Command
+}
+
+func (c *CommandJSON) Write(w io.Writer) {
+	fmt.Fprintf(w, "### **%v**\nIdentifier: `%v`\n\n%v\n\n", c.Title, c.Command, c.Doc)
+	if c.ArgDoc != "" {
+		fmt.Fprintf(w, "Args:\n\n```\n%s\n```\n\n", c.ArgDoc)
+	}
+	if c.ResultDoc != "" {
+		fmt.Fprintf(w, "Result:\n\n```\n%s\n```\n\n", c.ResultDoc)
+	}
+}
+
 type LensJSON struct {
 	Lens  string
 	Title string
 	Doc   string
 }
 
+func (l *LensJSON) String() string {
+	return l.Title
+}
+
+func (l *LensJSON) Write(w io.Writer) {
+	fmt.Fprintf(w, "%s (%s): %s", l.Title, l.Lens, l.Doc)
+}
+
 type AnalyzerJSON struct {
 	Name    string
 	Doc     string
 	Default bool
 }
+
+func (a *AnalyzerJSON) String() string {
+	return a.Name
+}
+
+func (a *AnalyzerJSON) Write(w io.Writer) {
+	fmt.Fprintf(w, "%s (%s): %v", a.Name, a.Doc, a.Default)
+}
diff --git a/internal/lsp/source/options_test.go b/internal/lsp/source/options_test.go
index 83cb795..f8260c1 100644
--- a/internal/lsp/source/options_test.go
+++ b/internal/lsp/source/options_test.go
@@ -18,7 +18,7 @@
 	}{
 		{
 			name:  "symbolStyle",
-			value: "dynamic",
+			value: "Dynamic",
 			check: func(o Options) bool { return o.SymbolStyle == DynamicSymbols },
 		},
 		{
diff --git a/internal/lsp/source/references.go b/internal/lsp/source/references.go
index 1cd9a40..5d3eac3 100644
--- a/internal/lsp/source/references.go
+++ b/internal/lsp/source/references.go
@@ -6,6 +6,7 @@
 
 import (
 	"context"
+	"fmt"
 	"go/ast"
 	"go/token"
 	"go/types"
@@ -68,7 +69,11 @@
 		seen       = make(map[token.Pos]bool)
 	)
 
-	filename := snapshot.FileSet().Position(qos[0].obj.Pos()).Filename
+	pos := qos[0].obj.Pos()
+	if pos == token.NoPos {
+		return nil, fmt.Errorf("no position for %s", qos[0].obj)
+	}
+	filename := snapshot.FileSet().Position(pos).Filename
 	pgf, err := qos[0].pkg.File(span.URIFromPath(filename))
 	if err != nil {
 		return nil, err
@@ -104,10 +109,13 @@
 		searchPkgs = append(searchPkgs, qo.pkg)
 		for _, pkg := range searchPkgs {
 			for ident, obj := range pkg.GetTypesInfo().Uses {
-				if obj != qo.obj {
-					// If ident is not a use of qo.obj, skip it, with one exception: uses
-					// of an embedded field can be considered references of the embedded
-					// type name.
+				// For instantiated objects (as in methods or fields on instantiated
+				// types), we may not have pointer-identical objects but still want to
+				// consider them references.
+				if !equalOrigin(obj, qo.obj) {
+					// If ident is not a use of qo.obj, skip it, with one exception:
+					// uses of an embedded field can be considered references of the
+					// embedded type name
 					if !includeEmbeddedRefs {
 						continue
 					}
@@ -162,6 +170,13 @@
 	return references, nil
 }
 
+// equalOrigin reports whether obj1 and obj2 have equivalent origin object.
+// This may be the case even if obj1 != obj2, if one or both of them is
+// instantiated.
+func equalOrigin(obj1, obj2 types.Object) bool {
+	return obj1.Pkg() == obj2.Pkg() && obj1.Pos() == obj2.Pos() && obj1.Name() == obj2.Name()
+}
+
 // interfaceReferences returns the references to the interfaces implemented by
 // the type or method at the given position.
 func interfaceReferences(ctx context.Context, s Snapshot, f FileHandle, pp protocol.Position) ([]*ReferenceInfo, error) {
diff --git a/internal/lsp/source/signature_help.go b/internal/lsp/source/signature_help.go
index 9c52f99..e7ed9cc 100644
--- a/internal/lsp/source/signature_help.go
+++ b/internal/lsp/source/signature_help.go
@@ -115,12 +115,12 @@
 			node: node,
 		}
 		decl.MappedRange = append(decl.MappedRange, rng)
-		d, err := HoverInfo(ctx, snapshot, pkg, decl.obj, decl.node, nil)
+		d, err := FindHoverContext(ctx, snapshot, pkg, decl.obj, decl.node, nil)
 		if err != nil {
 			return nil, 0, err
 		}
 		name = obj.Name()
-		comment = d.comment
+		comment = d.Comment
 	} else {
 		name = "func"
 	}
diff --git a/internal/lsp/source/source_test.go b/internal/lsp/source/source_test.go
index f1ab3ff..dc5fe53 100644
--- a/internal/lsp/source/source_test.go
+++ b/internal/lsp/source/source_test.go
@@ -52,7 +52,7 @@
 	options := source.DefaultOptions().Clone()
 	tests.DefaultOptions(options)
 	options.SetEnvSlice(datum.Config.Env)
-	view, _, release, err := session.NewView(ctx, "source_test", span.URIFromPath(datum.Config.Dir), "", options)
+	view, _, release, err := session.NewView(ctx, "source_test", span.URIFromPath(datum.Config.Dir), options)
 	release()
 	if err != nil {
 		t.Fatal(err)
@@ -66,8 +66,7 @@
 
 	var modifications []source.FileModification
 	for filename, content := range datum.Config.Overlay {
-		kind := source.DetectLanguage("", filename)
-		if kind != source.Go {
+		if filepath.Ext(filename) != ".go" {
 			continue
 		}
 		modifications = append(modifications, source.FileModification{
@@ -576,12 +575,14 @@
 	didSomething := false
 	if hover != "" {
 		didSomething = true
-		tag := fmt.Sprintf("%s-hover", d.Name)
+		tag := fmt.Sprintf("%s-hoverdef", d.Name)
 		expectHover := string(r.data.Golden(tag, d.Src.URI().Filename(), func() ([]byte, error) {
 			return []byte(hover), nil
 		}))
+		hover = tests.StripSubscripts(hover)
+		expectHover = tests.StripSubscripts(expectHover)
 		if hover != expectHover {
-			t.Errorf("hover for %s failed:\n%s", d.Src, tests.Diff(t, expectHover, hover))
+			t.Errorf("hoverdef for %s failed:\n%s", d.Src, tests.Diff(t, expectHover, hover))
 		}
 	}
 	if !d.OnlyHover {
@@ -682,6 +683,37 @@
 	}
 }
 
+func (r *runner) Hover(t *testing.T, src span.Span, text string) {
+	ctx := r.ctx
+	_, srcRng, err := spanToRange(r.data, src)
+	if err != nil {
+		t.Fatal(err)
+	}
+	fh, err := r.snapshot.GetFile(r.ctx, src.URI())
+	if err != nil {
+		t.Fatal(err)
+	}
+	hover, err := source.Hover(ctx, r.snapshot, fh, srcRng.Start)
+	if err != nil {
+		t.Errorf("hover failed for %s: %v", src.URI(), err)
+	}
+	if text == "" {
+		if hover != nil {
+			t.Errorf("want nil, got %v\n", hover)
+		}
+	} else {
+		if hover == nil {
+			t.Fatalf("want hover result to not be nil")
+		}
+		if got := hover.Contents.Value; got != text {
+			t.Errorf("want %v, got %v\n", got, text)
+		}
+		if want, got := srcRng, hover.Range; want != got {
+			t.Errorf("want range %v, got %v instead", want, got)
+		}
+	}
+}
+
 func (r *runner) References(t *testing.T, src span.Span, itemList []span.Span) {
 	ctx := r.ctx
 	_, srcRng, err := spanToRange(r.data, src)
diff --git a/internal/lsp/source/stub.go b/internal/lsp/source/stub.go
new file mode 100644
index 0000000..6810f1d
--- /dev/null
+++ b/internal/lsp/source/stub.go
@@ -0,0 +1,330 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+	"bytes"
+	"context"
+	"fmt"
+	"go/ast"
+	"go/format"
+	"go/parser"
+	"go/token"
+	"go/types"
+	"strings"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/ast/astutil"
+	"golang.org/x/tools/internal/lsp/analysis/stubmethods"
+	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/span"
+	"golang.org/x/tools/internal/typeparams"
+)
+
+func stubSuggestedFixFunc(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle, rng protocol.Range) (*analysis.SuggestedFix, error) {
+	pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
+	if err != nil {
+		return nil, fmt.Errorf("GetParsedFile: %w", err)
+	}
+	nodes, pos, err := getStubNodes(pgf, rng)
+	if err != nil {
+		return nil, fmt.Errorf("getNodes: %w", err)
+	}
+	si := stubmethods.GetStubInfo(pkg.GetTypesInfo(), nodes, pos)
+	if si == nil {
+		return nil, fmt.Errorf("nil interface request")
+	}
+	parsedConcreteFile, concreteFH, err := getStubFile(ctx, si.Concrete.Obj(), snapshot)
+	if err != nil {
+		return nil, fmt.Errorf("getFile(concrete): %w", err)
+	}
+	var (
+		methodsSrc  []byte
+		stubImports []*stubImport // additional imports needed for method stubs
+	)
+	if si.Interface.Pkg() == nil && si.Interface.Name() == "error" && si.Interface.Parent() == types.Universe {
+		methodsSrc = stubErr(ctx, parsedConcreteFile.File, si, snapshot)
+	} else {
+		methodsSrc, stubImports, err = stubMethods(ctx, parsedConcreteFile.File, si, snapshot)
+	}
+	if err != nil {
+		return nil, fmt.Errorf("stubMethods: %w", err)
+	}
+	nodes, _ = astutil.PathEnclosingInterval(parsedConcreteFile.File, si.Concrete.Obj().Pos(), si.Concrete.Obj().Pos())
+	concreteSrc, err := concreteFH.Read()
+	if err != nil {
+		return nil, fmt.Errorf("error reading concrete file source: %w", err)
+	}
+	insertPos := snapshot.FileSet().Position(nodes[1].End()).Offset
+	if insertPos >= len(concreteSrc) {
+		return nil, fmt.Errorf("insertion position is past the end of the file")
+	}
+	var buf bytes.Buffer
+	buf.Write(concreteSrc[:insertPos])
+	buf.WriteByte('\n')
+	buf.Write(methodsSrc)
+	buf.Write(concreteSrc[insertPos:])
+	fset := token.NewFileSet()
+	newF, err := parser.ParseFile(fset, parsedConcreteFile.File.Name.Name, buf.Bytes(), parser.ParseComments)
+	if err != nil {
+		return nil, fmt.Errorf("could not reparse file: %w", err)
+	}
+	for _, imp := range stubImports {
+		astutil.AddNamedImport(fset, newF, imp.Name, imp.Path)
+	}
+	var source bytes.Buffer
+	err = format.Node(&source, fset, newF)
+	if err != nil {
+		return nil, fmt.Errorf("format.Node: %w", err)
+	}
+	diffEdits, err := snapshot.View().Options().ComputeEdits(parsedConcreteFile.URI, string(parsedConcreteFile.Src), source.String())
+	if err != nil {
+		return nil, err
+	}
+	var edits []analysis.TextEdit
+	for _, edit := range diffEdits {
+		rng, err := edit.Span.Range(parsedConcreteFile.Mapper.Converter)
+		if err != nil {
+			return nil, err
+		}
+		edits = append(edits, analysis.TextEdit{
+			Pos:     rng.Start,
+			End:     rng.End,
+			NewText: []byte(edit.NewText),
+		})
+	}
+	return &analysis.SuggestedFix{
+		TextEdits: edits,
+	}, nil
+}
+
+// stubMethods returns the Go code of all methods
+// that implement the given interface
+func stubMethods(ctx context.Context, concreteFile *ast.File, si *stubmethods.StubInfo, snapshot Snapshot) ([]byte, []*stubImport, error) {
+	ifacePkg, err := deducePkgFromTypes(ctx, snapshot, si.Interface)
+	if err != nil {
+		return nil, nil, err
+	}
+	si.Concrete.Obj().Type()
+	concMS := types.NewMethodSet(types.NewPointer(si.Concrete.Obj().Type()))
+	missing, err := missingMethods(ctx, snapshot, concMS, si.Concrete.Obj().Pkg(), si.Interface, ifacePkg, map[string]struct{}{})
+	if err != nil {
+		return nil, nil, fmt.Errorf("missingMethods: %w", err)
+	}
+	if len(missing) == 0 {
+		return nil, nil, fmt.Errorf("no missing methods found")
+	}
+	var (
+		stubImports   []*stubImport
+		methodsBuffer bytes.Buffer
+	)
+	for _, mi := range missing {
+		for _, m := range mi.missing {
+			// TODO(marwan-at-work): this should share the same logic with source.FormatVarType
+			// as it also accounts for type aliases.
+			sig := types.TypeString(m.Type(), stubmethods.RelativeToFiles(si.Concrete.Obj().Pkg(), concreteFile, mi.file, func(name, path string) {
+				for _, imp := range stubImports {
+					if imp.Name == name && imp.Path == path {
+						return
+					}
+				}
+				stubImports = append(stubImports, &stubImport{name, path})
+			}))
+			_, err = methodsBuffer.Write(printStubMethod(methodData{
+				Method:    m.Name(),
+				Concrete:  getStubReceiver(si),
+				Interface: deduceIfaceName(si.Concrete.Obj().Pkg(), si.Interface.Pkg(), si.Interface),
+				Signature: strings.TrimPrefix(sig, "func"),
+			}))
+			if err != nil {
+				return nil, nil, fmt.Errorf("error printing method: %w", err)
+			}
+			methodsBuffer.WriteRune('\n')
+		}
+	}
+	return methodsBuffer.Bytes(), stubImports, nil
+}
+
+// stubErr reurns the Go code implementation
+// of an error interface relevant to the
+// concrete type
+func stubErr(ctx context.Context, concreteFile *ast.File, si *stubmethods.StubInfo, snapshot Snapshot) []byte {
+	return printStubMethod(methodData{
+		Method:    "Error",
+		Interface: "error",
+		Concrete:  getStubReceiver(si),
+		Signature: "() string",
+	})
+}
+
+// getStubReceiver returns the concrete type's name as a method receiver.
+// It accounts for type parameters if they exist.
+func getStubReceiver(si *stubmethods.StubInfo) string {
+	var concrete string
+	if si.Pointer {
+		concrete += "*"
+	}
+	concrete += si.Concrete.Obj().Name()
+	concrete += FormatTypeParams(typeparams.ForNamed(si.Concrete))
+	return concrete
+}
+
+type methodData struct {
+	Method    string
+	Interface string
+	Concrete  string
+	Signature string
+}
+
+// printStubMethod takes methodData and returns Go code that represents the given method such as:
+// 	// {{ .Method }} implements {{ .Interface }}
+// 	func ({{ .Concrete }}) {{ .Method }}{{ .Signature }} {
+// 		panic("unimplemented")
+// 	}
+func printStubMethod(md methodData) []byte {
+	var b bytes.Buffer
+	fmt.Fprintf(&b, "// %s implements %s\n", md.Method, md.Interface)
+	fmt.Fprintf(&b, "func (%s) %s%s {\n\t", md.Concrete, md.Method, md.Signature)
+	fmt.Fprintln(&b, `panic("unimplemented")`)
+	fmt.Fprintln(&b, "}")
+	return b.Bytes()
+}
+
+func deducePkgFromTypes(ctx context.Context, snapshot Snapshot, ifaceObj types.Object) (Package, error) {
+	pkgs, err := snapshot.KnownPackages(ctx)
+	if err != nil {
+		return nil, err
+	}
+	for _, p := range pkgs {
+		if p.PkgPath() == ifaceObj.Pkg().Path() {
+			return p, nil
+		}
+	}
+	return nil, fmt.Errorf("pkg %q not found", ifaceObj.Pkg().Path())
+}
+
+func deduceIfaceName(concretePkg, ifacePkg *types.Package, ifaceObj types.Object) string {
+	if concretePkg.Path() == ifacePkg.Path() {
+		return ifaceObj.Name()
+	}
+	return fmt.Sprintf("%s.%s", ifacePkg.Name(), ifaceObj.Name())
+}
+
+func getStubNodes(pgf *ParsedGoFile, pRng protocol.Range) ([]ast.Node, token.Pos, error) {
+	spn, err := pgf.Mapper.RangeSpan(pRng)
+	if err != nil {
+		return nil, 0, err
+	}
+	rng, err := spn.Range(pgf.Mapper.Converter)
+	if err != nil {
+		return nil, 0, err
+	}
+	nodes, _ := astutil.PathEnclosingInterval(pgf.File, rng.Start, rng.End)
+	return nodes, rng.Start, nil
+}
+
+/*
+missingMethods takes a concrete type and returns any missing methods for the given interface as well as
+any missing interface that might have been embedded to its parent. For example:
+
+type I interface {
+	io.Writer
+	Hello()
+}
+returns []*missingInterface{
+	{
+		iface: *types.Interface (io.Writer),
+		file: *ast.File: io.go,
+		missing []*types.Func{Write},
+	},
+	{
+		iface: *types.Interface (I),
+		file: *ast.File: myfile.go,
+		missing: []*types.Func{Hello}
+	},
+}
+*/
+func missingMethods(ctx context.Context, snapshot Snapshot, concMS *types.MethodSet, concPkg *types.Package, ifaceObj types.Object, ifacePkg Package, visited map[string]struct{}) ([]*missingInterface, error) {
+	iface, ok := ifaceObj.Type().Underlying().(*types.Interface)
+	if !ok {
+		return nil, fmt.Errorf("expected %v to be an interface but got %T", iface, ifaceObj.Type().Underlying())
+	}
+	missing := []*missingInterface{}
+	for i := 0; i < iface.NumEmbeddeds(); i++ {
+		eiface := iface.Embedded(i).Obj()
+		depPkg := ifacePkg
+		if eiface.Pkg().Path() != ifacePkg.PkgPath() {
+			var err error
+			depPkg, err = ifacePkg.GetImport(eiface.Pkg().Path())
+			if err != nil {
+				return nil, err
+			}
+		}
+		em, err := missingMethods(ctx, snapshot, concMS, concPkg, eiface, depPkg, visited)
+		if err != nil {
+			return nil, err
+		}
+		missing = append(missing, em...)
+	}
+	parsedFile, _, err := getStubFile(ctx, ifaceObj, snapshot)
+	if err != nil {
+		return nil, fmt.Errorf("error getting iface file: %w", err)
+	}
+	mi := &missingInterface{
+		pkg:   ifacePkg,
+		iface: iface,
+		file:  parsedFile.File,
+	}
+	if mi.file == nil {
+		return nil, fmt.Errorf("could not find ast.File for %v", ifaceObj.Name())
+	}
+	for i := 0; i < iface.NumExplicitMethods(); i++ {
+		method := iface.ExplicitMethod(i)
+		// if the concrete type does not have the interface method
+		if concMS.Lookup(concPkg, method.Name()) == nil {
+			if _, ok := visited[method.Name()]; !ok {
+				mi.missing = append(mi.missing, method)
+				visited[method.Name()] = struct{}{}
+			}
+		}
+		if sel := concMS.Lookup(concPkg, method.Name()); sel != nil {
+			implSig := sel.Type().(*types.Signature)
+			ifaceSig := method.Type().(*types.Signature)
+			if !types.Identical(ifaceSig, implSig) {
+				return nil, fmt.Errorf("mimsatched %q function signatures:\nhave: %s\nwant: %s", method.Name(), implSig, ifaceSig)
+			}
+		}
+	}
+	if len(mi.missing) > 0 {
+		missing = append(missing, mi)
+	}
+	return missing, nil
+}
+
+func getStubFile(ctx context.Context, obj types.Object, snapshot Snapshot) (*ParsedGoFile, VersionedFileHandle, error) {
+	objPos := snapshot.FileSet().Position(obj.Pos())
+	objFile := span.URIFromPath(objPos.Filename)
+	objectFH := snapshot.FindFile(objFile)
+	_, goFile, err := GetParsedFile(ctx, snapshot, objectFH, WidestPackage)
+	if err != nil {
+		return nil, nil, fmt.Errorf("GetParsedFile: %w", err)
+	}
+	return goFile, objectFH, nil
+}
+
+// missingInterface represents an interface
+// that has all or some of its methods missing
+// from the destination concrete type
+type missingInterface struct {
+	iface   *types.Interface
+	file    *ast.File
+	pkg     Package
+	missing []*types.Func
+}
+
+// stubImport represents a newly added import
+// statement to the concrete type. If name is not
+// empty, then that import is required to have that name.
+type stubImport struct{ Name, Path string }
diff --git a/internal/lsp/source/types_format.go b/internal/lsp/source/types_format.go
index c3f17b0..fcbf228 100644
--- a/internal/lsp/source/types_format.go
+++ b/internal/lsp/source/types_format.go
@@ -18,6 +18,7 @@
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/lsp/debug/tag"
 	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // FormatType returns the detail and kind for a types.Type.
@@ -38,10 +39,10 @@
 }
 
 type signature struct {
-	name, doc        string
-	params, results  []string
-	variadic         bool
-	needResultParens bool
+	name, doc                   string
+	typeParams, params, results []string
+	variadic                    bool
+	needResultParens            bool
 }
 
 func (s *signature) Format() string {
@@ -74,6 +75,10 @@
 	return b.String()
 }
 
+func (s *signature) TypeParams() []string {
+	return s.typeParams
+}
+
 func (s *signature) Params() []string {
 	return s.params
 }
@@ -167,8 +172,36 @@
 	return result, writeResultParens
 }
 
+// FormatTypeParams turns TypeParamList into its Go representation, such as:
+// [T, Y]. Note that it does not print constraints as this is mainly used for
+// formatting type params in method receivers.
+func FormatTypeParams(tparams *typeparams.TypeParamList) string {
+	if tparams == nil || tparams.Len() == 0 {
+		return ""
+	}
+	var buf bytes.Buffer
+	buf.WriteByte('[')
+	for i := 0; i < tparams.Len(); i++ {
+		if i > 0 {
+			buf.WriteString(", ")
+		}
+		buf.WriteString(tparams.At(i).Obj().Name())
+	}
+	buf.WriteByte(']')
+	return buf.String()
+}
+
 // NewSignature returns formatted signature for a types.Signature struct.
 func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signature, comment *ast.CommentGroup, qf types.Qualifier) *signature {
+	var tparams []string
+	tpList := typeparams.ForSignature(sig)
+	for i := 0; i < tpList.Len(); i++ {
+		tparam := tpList.At(i)
+		// TODO: is it possible to reuse the logic from FormatVarType here?
+		s := tparam.Obj().Name() + " " + tparam.Constraint().String()
+		tparams = append(tparams, s)
+	}
+
 	params := make([]string, 0, sig.Params().Len())
 	for i := 0; i < sig.Params().Len(); i++ {
 		el := sig.Params().At(i)
@@ -179,6 +212,7 @@
 		}
 		params = append(params, p)
 	}
+
 	var needResultParens bool
 	results := make([]string, 0, sig.Results().Len())
 	for i := 0; i < sig.Results().Len(); i++ {
@@ -208,6 +242,7 @@
 	}
 	return &signature{
 		doc:              d,
+		typeParams:       tparams,
 		params:           params,
 		results:          results,
 		variadic:         sig.Variadic(),
@@ -217,7 +252,7 @@
 
 // FormatVarType formats a *types.Var, accounting for type aliases.
 // To do this, it looks in the AST of the file in which the object is declared.
-// On any errors, it always fallbacks back to types.TypeString.
+// On any errors, it always falls back to types.TypeString.
 func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, obj *types.Var, qf types.Qualifier) string {
 	pkg, err := FindPackageFromPos(ctx, snapshot, obj.Pos())
 	if err != nil {
@@ -229,6 +264,14 @@
 		return types.TypeString(obj.Type(), qf)
 	}
 
+	// If the given expr refers to a type parameter, then use the
+	// object's Type instead of the type parameter declaration. This helps
+	// format the instantiated type as opposed to the original undeclared
+	// generic type.
+	if typeparams.IsTypeParam(pkg.GetTypesInfo().Types[expr].Type) {
+		return types.TypeString(obj.Type(), qf)
+	}
+
 	// The type names in the AST may not be correctly qualified.
 	// Determine the package name to use based on the package that originated
 	// the query and the package in which the type is declared.
@@ -261,10 +304,13 @@
 		switch n := n.(type) {
 		case *ast.ArrayType, *ast.ChanType, *ast.Ellipsis,
 			*ast.FuncType, *ast.MapType, *ast.ParenExpr,
-			*ast.StarExpr, *ast.StructType:
+			*ast.StarExpr, *ast.StructType, *ast.FieldList, *ast.Field:
 			// These are the only types that are cloned by cloneExpr below,
 			// so these are the only types that we can traverse and potentially
 			// modify. This is not an ideal approach, but it works for now.
+
+			// TODO(rFindley): can we eliminate this filtering entirely? This caused
+			// bugs in the past (golang/go#50539)
 			return true
 		case *ast.SelectorExpr:
 			// We may need to change any selectors in which the X is a package
diff --git a/internal/lsp/source/util.go b/internal/lsp/source/util.go
index 4ff5d57..71892ea 100644
--- a/internal/lsp/source/util.go
+++ b/internal/lsp/source/util.go
@@ -6,6 +6,7 @@
 
 import (
 	"context"
+	"fmt"
 	"go/ast"
 	"go/printer"
 	"go/token"
@@ -16,6 +17,7 @@
 	"strconv"
 	"strings"
 
+	"golang.org/x/mod/modfile"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/span"
 	errors "golang.org/x/xerrors"
@@ -160,7 +162,9 @@
 //	https://golang.org/s/generatedcode
 var generatedRx = regexp.MustCompile(`// .*DO NOT EDIT\.?`)
 
-func DetectLanguage(langID, filename string) FileKind {
+// FileKindForLang returns the file kind associated with the given language ID,
+// or UnknownKind if the language ID is not recognized.
+func FileKindForLang(langID string) FileKind {
 	switch langID {
 	case "go":
 		return Go
@@ -168,35 +172,29 @@
 		return Mod
 	case "go.sum":
 		return Sum
-	case "tmpl":
+	case "tmpl", "gotmpl":
 		return Tmpl
-	}
-	// Fallback to detecting the language based on the file extension.
-	switch ext := filepath.Ext(filename); ext {
-	case ".mod":
-		return Mod
-	case ".sum":
-		return Sum
+	case "go.work":
+		return Work
 	default:
-		if strings.HasSuffix(ext, "tmpl") {
-			// .tmpl, .gotmpl, etc
-			return Tmpl
-		}
-		// It's a Go file, or we shouldn't be seeing it
-		return Go
+		return UnknownKind
 	}
 }
 
 func (k FileKind) String() string {
 	switch k {
+	case Go:
+		return "go"
 	case Mod:
 		return "go.mod"
 	case Sum:
 		return "go.sum"
 	case Tmpl:
 		return "tmpl"
+	case Work:
+		return "go.work"
 	default:
-		return "go"
+		return fmt.Sprintf("unk%d", k)
 	}
 }
 
@@ -282,9 +280,7 @@
 		return nil, errors.Errorf("no file for pos %v", pos)
 	}
 	uri := span.URIFromPath(tok.Name())
-	// Search all packages: some callers may be working with packages not
-	// type-checked in workspace mode.
-	pkgs, err := snapshot.PackagesForFile(ctx, uri, TypecheckAll)
+	pkgs, err := snapshot.PackagesForFile(ctx, uri, TypecheckAll, true)
 	if err != nil {
 		return nil, err
 	}
@@ -545,8 +541,46 @@
 	return strings.Contains(s, "command-line-arguments")
 }
 
+// Offset returns tok.Offset(pos), but first checks that the pos is in range
+// for the given file.
+func Offset(tok *token.File, pos token.Pos) (int, error) {
+	if !InRange(tok, pos) {
+		return -1, fmt.Errorf("pos %v is not in range for file [%v:%v)", pos, tok.Base(), tok.Base()+tok.Size())
+	}
+	return tok.Offset(pos), nil
+}
+
+// Pos returns tok.Pos(offset), but first checks that the offset is valid for
+// the given file.
+func Pos(tok *token.File, offset int) (token.Pos, error) {
+	if offset < 0 || offset > tok.Size() {
+		return token.NoPos, fmt.Errorf("offset %v is not in range for file of size %v", offset, tok.Size())
+	}
+	return tok.Pos(offset), nil
+}
+
 // InRange reports whether the given position is in the given token.File.
 func InRange(tok *token.File, pos token.Pos) bool {
 	size := tok.Pos(tok.Size())
 	return int(pos) >= tok.Base() && pos <= size
 }
+
+// LineToRange creates a Range spanning start and end.
+func LineToRange(m *protocol.ColumnMapper, uri span.URI, start, end modfile.Position) (protocol.Range, error) {
+	return ByteOffsetsToRange(m, uri, start.Byte, end.Byte)
+}
+
+// ByteOffsetsToRange creates a range spanning start and end.
+func ByteOffsetsToRange(m *protocol.ColumnMapper, uri span.URI, start, end int) (protocol.Range, error) {
+	line, col, err := m.Converter.ToPosition(start)
+	if err != nil {
+		return protocol.Range{}, err
+	}
+	s := span.NewPoint(line, col, start)
+	line, col, err = m.Converter.ToPosition(end)
+	if err != nil {
+		return protocol.Range{}, err
+	}
+	e := span.NewPoint(line, col, end)
+	return m.Range(span.New(uri, s, e))
+}
diff --git a/internal/lsp/source/view.go b/internal/lsp/source/view.go
index 19fca6e..4d7d411 100644
--- a/internal/lsp/source/view.go
+++ b/internal/lsp/source/view.go
@@ -18,6 +18,7 @@
 	"golang.org/x/mod/modfile"
 	"golang.org/x/mod/module"
 	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/internal/gocommand"
 	"golang.org/x/tools/internal/imports"
 	"golang.org/x/tools/internal/lsp/progress"
@@ -129,6 +130,12 @@
 	// GoModForFile returns the URI of the go.mod file for the given URI.
 	GoModForFile(uri span.URI) span.URI
 
+	// WorkFile, if non-empty, is the go.work file for the workspace.
+	WorkFile() span.URI
+
+	// ParseWork is used to parse go.work files.
+	ParseWork(ctx context.Context, fh FileHandle) (*ParsedWorkFile, error)
+
 	// BuiltinFile returns information about the special builtin package.
 	BuiltinFile(ctx context.Context) (*ParsedGoFile, error)
 
@@ -137,7 +144,7 @@
 
 	// PackagesForFile returns the packages that this file belongs to, checked
 	// in mode.
-	PackagesForFile(ctx context.Context, uri span.URI, mode TypecheckMode) ([]Package, error)
+	PackagesForFile(ctx context.Context, uri span.URI, mode TypecheckMode, includeTestVariants bool) ([]Package, error)
 
 	// PackageForFile returns a single package that this file belongs to,
 	// checked in mode and filtered by the package policy.
@@ -201,9 +208,6 @@
 	// Normal is appropriate for commands that might be run by a user and don't
 	// deliberately modify go.mod files, e.g. `go test`.
 	Normal InvocationFlags = iota
-	// UpdateUserModFile is for commands that intend to update the user's real
-	// go.mod file, e.g. `go mod tidy` in response to a user's request to tidy.
-	UpdateUserModFile
 	// WriteTemporaryModFile is for commands that need information from a
 	// modified version of the user's go.mod file, e.g. `go mod tidy` used to
 	// generate diagnostics.
@@ -235,10 +239,6 @@
 	// Folder returns the folder with which this view was created.
 	Folder() span.URI
 
-	// TempWorkspace returns the folder this view uses for its temporary
-	// workspace module.
-	TempWorkspace() span.URI
-
 	// Shutdown closes this view, and detaches it from its session.
 	Shutdown(ctx context.Context)
 
@@ -266,6 +266,9 @@
 
 	// RegisterModuleUpgrades registers that upgrades exist for the given modules.
 	RegisterModuleUpgrades(upgrades map[string]string)
+
+	// FileKind returns the type of a file
+	FileKind(FileHandle) FileKind
 }
 
 // A FileSource maps uris to FileHandles. This abstraction exists both for
@@ -297,6 +300,14 @@
 	ParseErrors []*Diagnostic
 }
 
+// A ParsedWorkFile contains the results of parsing a go.work file.
+type ParsedWorkFile struct {
+	URI         span.URI
+	File        *modfile.WorkFile
+	Mapper      *protocol.ColumnMapper
+	ParseErrors []*Diagnostic
+}
+
 // A TidiedModule contains the results of running `go mod tidy` on a module.
 type TidiedModule struct {
 	// Diagnostics representing changes made by `go mod tidy`.
@@ -312,6 +323,9 @@
 
 	// PackagePath is the package path.
 	PackagePath() string
+
+	// ModuleInfo returns the go/packages module information for the given package.
+	ModuleInfo() *packages.Module
 }
 
 // Session represents a single connection from a client.
@@ -325,7 +339,7 @@
 	// non-empty tempWorkspace directory is provided, the View will record a copy
 	// of its gopls workspace module in that directory, so that client tooling
 	// can execute in the same main module.
-	NewView(ctx context.Context, name string, folder, tempWorkspace span.URI, options *Options) (View, Snapshot, func(), error)
+	NewView(ctx context.Context, name string, folder span.URI, options *Options) (View, Snapshot, func(), error)
 
 	// Cache returns the cache that created this session, for debugging only.
 	Cache() interface{}
@@ -373,8 +387,11 @@
 	SetProgressTracker(tracker *progress.Tracker)
 }
 
+var ErrViewExists = errors.New("view already exists for session")
+
 // Overlay is the type for a file held in memory on a session.
 type Overlay interface {
+	Kind() FileKind
 	VersionedFileHandle
 }
 
@@ -496,7 +513,6 @@
 // FileHandle represents a handle to a specific version of a single file.
 type FileHandle interface {
 	URI() span.URI
-	Kind() FileKind
 
 	// FileIdentity returns a FileIdentity for the file, even if there was an
 	// error reading it.
@@ -514,17 +530,14 @@
 
 	// Identifier represents a unique identifier for the file's content.
 	Hash string
-
-	// Kind is the file's kind.
-	Kind FileKind
 }
 
 func (id FileIdentity) String() string {
-	return fmt.Sprintf("%s%s%s", id.URI, id.Hash, id.Kind)
+	return fmt.Sprintf("%s%s", id.URI, id.Hash)
 }
 
 // FileKind describes the kind of the file in question.
-// It can be one of Go, mod, or sum.
+// It can be one of Go,mod, Sum, or Tmpl.
 type FileKind int
 
 const (
@@ -539,6 +552,8 @@
 	Sum
 	// Tmpl is a template file.
 	Tmpl
+	// Work is a go.work file.
+	Work
 )
 
 // Analyzer represents a go/analysis analyzer with some boolean properties
@@ -643,6 +658,8 @@
 	ModTidyError             DiagnosticSource = "go mod tidy"
 	OptimizationDetailsError DiagnosticSource = "optimizer details"
 	UpgradeNotification      DiagnosticSource = "upgrade available"
+	TemplateError            DiagnosticSource = "template"
+	WorkFileError            DiagnosticSource = "go.work file"
 )
 
 func AnalyzerErrorKind(name string) DiagnosticSource {
diff --git a/internal/lsp/source/workspace_symbol.go b/internal/lsp/source/workspace_symbol.go
index 1f6fd20..d9257c9 100644
--- a/internal/lsp/source/workspace_symbol.go
+++ b/internal/lsp/source/workspace_symbol.go
@@ -8,6 +8,7 @@
 	"context"
 	"fmt"
 	"go/types"
+	"path/filepath"
 	"runtime"
 	"sort"
 	"strings"
@@ -149,7 +150,6 @@
 	matchers   []matcherFunc
 	symbolizer symbolizer
 
-	seen map[span.URI]bool
 	symbolStore
 }
 
@@ -176,9 +176,11 @@
 func buildMatcher(matcher SymbolMatcher, query string) matcherFunc {
 	switch matcher {
 	case SymbolFuzzy:
-		return parseQuery(query)
+		return parseQuery(query, newFuzzyMatcher)
 	case SymbolFastFuzzy:
-		return fuzzy.NewSymbolMatcher(query).Match
+		return parseQuery(query, func(query string) matcherFunc {
+			return fuzzy.NewSymbolMatcher(query).Match
+		})
 	case SymbolCaseSensitive:
 		return matchExact(query)
 	case SymbolCaseInsensitive:
@@ -194,6 +196,18 @@
 	panic(fmt.Errorf("unknown symbol matcher: %v", matcher))
 }
 
+func newFuzzyMatcher(query string) matcherFunc {
+	fm := fuzzy.NewMatcher(query)
+	return func(chunks []string) (int, float64) {
+		score := float64(fm.ScoreChunks(chunks))
+		ranges := fm.MatchedRanges()
+		if len(ranges) > 0 {
+			return ranges[0], score
+		}
+		return -1, score
+	}
+}
+
 // parseQuery parses a field-separated symbol query, extracting the special
 // characters listed below, and returns a matcherFunc corresponding to the AND
 // of all field queries.
@@ -206,7 +220,7 @@
 // In all three of these special queries, matches are 'smart-cased', meaning
 // they are case sensitive if the symbol query contains any upper-case
 // characters, and case insensitive otherwise.
-func parseQuery(q string) matcherFunc {
+func parseQuery(q string, newMatcher func(string) matcherFunc) matcherFunc {
 	fields := strings.Fields(q)
 	if len(fields) == 0 {
 		return func([]string) (int, float64) { return -1, 0 }
@@ -237,15 +251,7 @@
 				return -1, 0
 			})
 		default:
-			fm := fuzzy.NewMatcher(field)
-			f = func(chunks []string) (int, float64) {
-				score := float64(fm.ScoreChunks(chunks))
-				ranges := fm.MatchedRanges()
-				if len(ranges) > 0 {
-					return ranges[0], score
-				}
-				return -1, score
-			}
+			f = newMatcher(field)
 		}
 		funcs = append(funcs, f)
 	}
@@ -296,7 +302,6 @@
 }
 
 func (sc *symbolCollector) walk(ctx context.Context, views []View) ([]protocol.SymbolInformation, error) {
-
 	// Use the root view URIs for determining (lexically) whether a uri is in any
 	// open workspace.
 	var roots []string
@@ -316,14 +321,22 @@
 			return nil, err
 		}
 
+		filters := v.Options().DirectoryFilters
+		folder := filepath.ToSlash(v.Folder().Filename())
 		for uri, syms := range psyms {
+			norm := filepath.ToSlash(uri.Filename())
+			nm := strings.TrimPrefix(norm, folder)
+			if FiltersDisallow(nm, filters) {
+				continue
+			}
 			// Only scan each file once.
 			if _, ok := files[uri]; ok {
 				continue
 			}
 			mds, err := snapshot.MetadataForFile(ctx, uri)
 			if err != nil {
-				return nil, err
+				event.Error(ctx, fmt.Sprintf("missing metadata for %q", uri), err)
+				continue
 			}
 			if len(mds) == 0 {
 				// TODO: should use the bug reporting API
@@ -364,6 +377,28 @@
 	return sc.results(), nil
 }
 
+// FilterDisallow is code from the body of cache.pathExcludedByFilter in cache/view.go
+// Exporting and using that function would cause an import cycle.
+// Moving it here and exporting it would leave behind view_test.go.
+// (This code is exported and used in the body of cache.pathExcludedByFilter)
+func FiltersDisallow(path string, filters []string) bool {
+	path = strings.TrimPrefix(path, "/")
+	var excluded bool
+	for _, filter := range filters {
+		op, prefix := filter[0], filter[1:]
+		// Non-empty prefixes have to be precise directory matches.
+		if prefix != "" {
+			prefix = prefix + "/"
+			path = path + "/"
+		}
+		if !strings.HasPrefix(path, prefix) {
+			continue
+		}
+		excluded = op == '-'
+	}
+	return excluded
+}
+
 // symbolFile holds symbol information for a single file.
 type symbolFile struct {
 	uri  span.URI
@@ -474,7 +509,14 @@
 		return
 	}
 	insertAt := sort.Search(len(sc.res), func(i int) bool {
-		return sc.res[i].score < si.score
+		// Sort by score, then symbol length, and finally lexically.
+		if sc.res[i].score != si.score {
+			return sc.res[i].score < si.score
+		}
+		if len(sc.res[i].symbol) != len(si.symbol) {
+			return len(sc.res[i].symbol) > len(si.symbol)
+		}
+		return sc.res[i].symbol > si.symbol
 	})
 	if insertAt < len(sc.res)-1 {
 		copy(sc.res[insertAt+1:], sc.res[insertAt:len(sc.res)-1])
diff --git a/internal/lsp/source/workspace_symbol_test.go b/internal/lsp/source/workspace_symbol_test.go
index 89c754d..314ef78 100644
--- a/internal/lsp/source/workspace_symbol_test.go
+++ b/internal/lsp/source/workspace_symbol_test.go
@@ -38,7 +38,7 @@
 	}
 
 	for _, test := range tests {
-		matcher := parseQuery(test.query)
+		matcher := parseQuery(test.query, newFuzzyMatcher)
 		if _, score := matcher([]string{test.s}); score > 0 != test.wantMatch {
 			t.Errorf("parseQuery(%q) match for %q: %.2g, want match: %t", test.query, test.s, score, test.wantMatch)
 		}
diff --git a/internal/lsp/symbols.go b/internal/lsp/symbols.go
index 5bde1bd..f04e457 100644
--- a/internal/lsp/symbols.go
+++ b/internal/lsp/symbols.go
@@ -24,7 +24,7 @@
 		return []interface{}{}, err
 	}
 	var docSymbols []protocol.DocumentSymbol
-	if fh.Kind() == source.Tmpl {
+	if snapshot.View().FileKind(fh) == source.Tmpl {
 		docSymbols, err = template.DocumentSymbols(snapshot, fh)
 	} else {
 		docSymbols, err = source.DocumentSymbols(ctx, snapshot, fh)
diff --git a/internal/lsp/template/completion.go b/internal/lsp/template/completion.go
index a593bf5..13dbdf1 100644
--- a/internal/lsp/template/completion.go
+++ b/internal/lsp/template/completion.go
@@ -5,17 +5,297 @@
 package template
 
 import (
+	"bytes"
 	"context"
 	"fmt"
+	"go/scanner"
+	"go/token"
+	"strings"
 
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
-	"golang.org/x/tools/internal/lsp/source/completion"
 )
 
-func Completion(ctx context.Context, snapshot source.Snapshot, fh source.VersionedFileHandle, pos protocol.Position, context protocol.CompletionContext) ([]completion.CompletionItem, *completion.Selection, error) {
-	if skipTemplates(snapshot) {
-		return nil, nil, nil
+// information needed for completion
+type completer struct {
+	p      *Parsed
+	pos    protocol.Position
+	offset int // offset of the start of the Token
+	ctx    protocol.CompletionContext
+	syms   map[string]symbol
+}
+
+func Completion(ctx context.Context, snapshot source.Snapshot, fh source.VersionedFileHandle, pos protocol.Position, context protocol.CompletionContext) (*protocol.CompletionList, error) {
+	all := New(snapshot.Templates())
+	var start int // the beginning of the Token (completed or not)
+	syms := make(map[string]symbol)
+	var p *Parsed
+	for fn, fc := range all.files {
+		// collect symbols from all template files
+		filterSyms(syms, fc.symbols)
+		if fn.Filename() != fh.URI().Filename() {
+			continue
+		}
+		if start = inTemplate(fc, pos); start == -1 {
+			return nil, nil
+		}
+		p = fc
 	}
-	return nil, nil, fmt.Errorf("implement template completion")
+	if p == nil {
+		// this cannot happen unless the search missed a template file
+		return nil, fmt.Errorf("%s not found", fh.FileIdentity().URI.Filename())
+	}
+	c := completer{
+		p:      p,
+		pos:    pos,
+		offset: start + len(Left),
+		ctx:    context,
+		syms:   syms,
+	}
+	return c.complete()
+}
+
+func filterSyms(syms map[string]symbol, ns []symbol) {
+	for _, xsym := range ns {
+		switch xsym.kind {
+		case protocol.Method, protocol.Package, protocol.Boolean, protocol.Namespace,
+			protocol.Function:
+			syms[xsym.name] = xsym // we don't care which symbol we get
+		case protocol.Variable:
+			if xsym.name != "dot" {
+				syms[xsym.name] = xsym
+			}
+		case protocol.Constant:
+			if xsym.name == "nil" {
+				syms[xsym.name] = xsym
+			}
+		}
+	}
+}
+
+// return the starting position of the enclosing token, or -1 if none
+func inTemplate(fc *Parsed, pos protocol.Position) int {
+	// pos is the pos-th character. if the cursor is at the beginning
+	// of the file, pos is 0. That is, we've only seen characters before pos
+	// 1. pos might be in a Token, return tk.Start
+	// 2. pos might be after an elided but before a Token, return elided
+	// 3. return -1 for false
+	offset := fc.FromPosition(pos)
+	// this could be a binary search, as the tokens are ordered
+	for _, tk := range fc.tokens {
+		if tk.Start < offset && offset <= tk.End {
+			return tk.Start
+		}
+	}
+	for _, x := range fc.elided {
+		if x > offset {
+			// fc.elided is sorted
+			break
+		}
+		// If the interval [x,offset] does not contain Left or Right
+		// then provide completions. (do we need the test for Right?)
+		if !bytes.Contains(fc.buf[x:offset], []byte(Left)) && !bytes.Contains(fc.buf[x:offset], []byte(Right)) {
+			return x
+		}
+	}
+	return -1
+}
+
+var (
+	keywords = []string{"if", "with", "else", "block", "range", "template", "end}}", "end"}
+	globals  = []string{"and", "call", "html", "index", "slice", "js", "len", "not", "or",
+		"urlquery", "printf", "println", "print", "eq", "ne", "le", "lt", "ge", "gt"}
+)
+
+// find the completions. start is the offset of either the Token enclosing pos, or where
+// the incomplete token starts.
+// The error return is always nil.
+func (c *completer) complete() (*protocol.CompletionList, error) {
+	ans := &protocol.CompletionList{IsIncomplete: true, Items: []protocol.CompletionItem{}}
+	start := c.p.FromPosition(c.pos)
+	sofar := c.p.buf[c.offset:start]
+	if len(sofar) == 0 || sofar[len(sofar)-1] == ' ' || sofar[len(sofar)-1] == '\t' {
+		return ans, nil
+	}
+	// sofar could be parsed by either c.analyzer() or scan(). The latter is precise
+	// and slower, but fast enough
+	words := scan(sofar)
+	// 1. if pattern starts $, show variables
+	// 2. if pattern starts ., show methods (and . by itself?)
+	// 3. if len(words) == 1, show firstWords (but if it were a |, show functions and globals)
+	// 4. ...? (parenthetical expressions, arguments, ...) (packages, namespaces, nil?)
+	if len(words) == 0 {
+		return nil, nil // if this happens, why were we called?
+	}
+	pattern := string(words[len(words)-1])
+	if pattern[0] == '$' {
+		// should we also return a raw "$"?
+		for _, s := range c.syms {
+			if s.kind == protocol.Variable && weakMatch(s.name, pattern) > 0 {
+				ans.Items = append(ans.Items, protocol.CompletionItem{
+					Label:  s.name,
+					Kind:   protocol.VariableCompletion,
+					Detail: "Variable",
+				})
+			}
+		}
+		return ans, nil
+	}
+	if pattern[0] == '.' {
+		for _, s := range c.syms {
+			if s.kind == protocol.Method && weakMatch("."+s.name, pattern) > 0 {
+				ans.Items = append(ans.Items, protocol.CompletionItem{
+					Label:  s.name,
+					Kind:   protocol.MethodCompletion,
+					Detail: "Method/member",
+				})
+			}
+		}
+		return ans, nil
+	}
+	// could we get completion attempts in strings or numbers, and if so, do we care?
+	// globals
+	for _, kw := range globals {
+		if weakMatch(kw, string(pattern)) != 0 {
+			ans.Items = append(ans.Items, protocol.CompletionItem{
+				Label:  kw,
+				Kind:   protocol.KeywordCompletion,
+				Detail: "Function",
+			})
+		}
+	}
+	// and functions
+	for _, s := range c.syms {
+		if s.kind == protocol.Function && weakMatch(s.name, pattern) != 0 {
+			ans.Items = append(ans.Items, protocol.CompletionItem{
+				Label:  s.name,
+				Kind:   protocol.FunctionCompletion,
+				Detail: "Function",
+			})
+		}
+	}
+	// keywords if we're at the beginning
+	if len(words) <= 1 || len(words[len(words)-2]) == 1 && words[len(words)-2][0] == '|' {
+		for _, kw := range keywords {
+			if weakMatch(kw, string(pattern)) != 0 {
+				ans.Items = append(ans.Items, protocol.CompletionItem{
+					Label:  kw,
+					Kind:   protocol.KeywordCompletion,
+					Detail: "keyword",
+				})
+			}
+		}
+	}
+	return ans, nil
+}
+
+// someday think about comments, strings, backslashes, etc
+// this would repeat some of the template parsing, but because the user is typing
+// there may be no parse tree here.
+// (go/scanner will report 2 tokens for $a, as $ is not a legal go identifier character)
+// (go/scanner is about 2.7 times more expensive)
+func (c *completer) analyze(buf []byte) [][]byte {
+	// we want to split on whitespace and before dots
+	var working []byte
+	var ans [][]byte
+	for _, ch := range buf {
+		if ch == '.' && len(working) > 0 {
+			ans = append(ans, working)
+			working = []byte{'.'}
+			continue
+		}
+		if ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' {
+			if len(working) > 0 {
+				ans = append(ans, working)
+				working = []byte{}
+				continue
+			}
+		}
+		working = append(working, ch)
+	}
+	if len(working) > 0 {
+		ans = append(ans, working)
+	}
+	ch := buf[len(buf)-1]
+	if ch == ' ' || ch == '\t' {
+		// avoid completing on whitespace
+		ans = append(ans, []byte{ch})
+	}
+	return ans
+}
+
+// version of c.analyze that uses go/scanner.
+func scan(buf []byte) []string {
+	fset := token.NewFileSet()
+	fp := fset.AddFile("", -1, len(buf))
+	var sc scanner.Scanner
+	sc.Init(fp, buf, func(pos token.Position, msg string) {}, scanner.ScanComments)
+	ans := make([]string, 0, 10) // preallocating gives a measurable savings
+	for {
+		_, tok, lit := sc.Scan() // tok is an int
+		if tok == token.EOF {
+			break // done
+		} else if tok == token.SEMICOLON && lit == "\n" {
+			continue // don't care, but probably can't happen
+		} else if tok == token.PERIOD {
+			ans = append(ans, ".") // lit is empty
+		} else if tok == token.IDENT && len(ans) > 0 && ans[len(ans)-1] == "." {
+			ans[len(ans)-1] = "." + lit
+		} else if tok == token.IDENT && len(ans) > 0 && ans[len(ans)-1] == "$" {
+			ans[len(ans)-1] = "$" + lit
+		} else if lit != "" {
+			ans = append(ans, lit)
+		}
+	}
+	return ans
+}
+
+// pattern is what the user has typed
+func weakMatch(choice, pattern string) float64 {
+	lower := strings.ToLower(choice)
+	// for now, use only lower-case everywhere
+	pattern = strings.ToLower(pattern)
+	// The first char has to match
+	if pattern[0] != lower[0] {
+		return 0
+	}
+	// If they start with ., then the second char has to match
+	from := 1
+	if pattern[0] == '.' {
+		if len(pattern) < 2 {
+			return 1 // pattern just a ., so it matches
+		}
+		if pattern[1] != lower[1] {
+			return 0
+		}
+		from = 2
+	}
+	// check that all the characters of pattern occur as a subsequence of choice
+	i, j := from, from
+	for ; i < len(lower) && j < len(pattern); j++ {
+		if pattern[j] == lower[i] {
+			i++
+			if i >= len(lower) {
+				return 0
+			}
+		}
+	}
+	if j < len(pattern) {
+		return 0
+	}
+	return 1
+}
+
+// for debug printing
+func strContext(c protocol.CompletionContext) string {
+	switch c.TriggerKind {
+	case protocol.Invoked:
+		return "invoked"
+	case protocol.TriggerCharacter:
+		return fmt.Sprintf("triggered(%s)", c.TriggerCharacter)
+	case protocol.TriggerForIncompleteCompletions:
+		// gopls doesn't seem to handle these explicitly anywhere
+		return "incomplete"
+	}
+	return fmt.Sprintf("?%v", c)
 }
diff --git a/internal/lsp/template/completion_test.go b/internal/lsp/template/completion_test.go
new file mode 100644
index 0000000..bfcdb53
--- /dev/null
+++ b/internal/lsp/template/completion_test.go
@@ -0,0 +1,102 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+	"log"
+	"sort"
+	"strings"
+	"testing"
+
+	"golang.org/x/tools/internal/lsp/protocol"
+)
+
+func init() {
+	log.SetFlags(log.Lshortfile)
+}
+
+type tparse struct {
+	marked string   // ^ shows where to ask for completions. (The user just typed the following character.)
+	wanted []string // expected completions
+}
+
+// Test completions in templates that parse enough (if completion needs symbols)
+// Seen characters up to the ^
+func TestParsed(t *testing.T) {
+	var tests = []tparse{
+		{"{{x}}{{12. xx^", nil}, // https://github.com/golang/go/issues/50430
+		{`<table class="chroma" data-new-comment-url="{{if $.PageIsPullFiles}}{{$.Issue.HTMLURL}}/files/reviews/new_comment{{else}}{{$.CommitHTML}}/new_comment^{{end}}">`, nil},
+		{"{{i^f}}", []string{"index", "if"}},
+		{"{{if .}}{{e^ {{end}}", []string{"eq", "end}}", "else", "end"}},
+		{"{{foo}}{{f^", []string{"foo"}},
+		{"{{$^}}", []string{"$"}},
+		{"{{$x:=4}}{{$^", []string{"$x"}},
+		{"{{$x:=4}}{{$ ^ ", []string{}},
+		{"{{len .Modified}}{{.^Mo", []string{"Modified"}},
+		{"{{len .Modified}}{{.mf^", []string{"Modified"}},
+		{"{{$^ }}", []string{"$"}},
+		{"{{$a =3}}{{$^", []string{"$a"}},
+		// .two is not good here: fix someday
+		{`{{.Modified}}{{.^{{if $.one.two}}xxx{{end}}`, []string{"Modified", "one", "two"}},
+		{`{{.Modified}}{{.o^{{if $.one.two}}xxx{{end}}`, []string{"one"}},
+		{"{{.Modiifed}}{{.one.t^{{if $.one.two}}xxx{{end}}", []string{"two"}},
+		{`{{block "foo" .}}{{i^`, []string{"index", "if"}},
+		{"{{in^{{Internal}}", []string{"index", "Internal", "if"}},
+		// simple number has no completions
+		{"{{4^e", []string{}},
+		// simple string has no completions
+		{"{{`e^", []string{}},
+		{"{{`No i^", []string{}}, // example of why go/scanner is used
+		{"{{xavier}}{{12. x^", []string{"xavier"}},
+	}
+	for _, tx := range tests {
+		c := testCompleter(t, tx)
+		var v []string
+		if c != nil {
+			ans, _ := c.complete()
+			for _, a := range ans.Items {
+				v = append(v, a.Label)
+			}
+		}
+		if len(v) != len(tx.wanted) {
+			t.Errorf("%q: got %q, wanted %q %d,%d", tx.marked, v, tx.wanted, len(v), len(tx.wanted))
+			continue
+		}
+		sort.Strings(tx.wanted)
+		sort.Strings(v)
+		for i := 0; i < len(v); i++ {
+			if tx.wanted[i] != v[i] {
+				t.Errorf("%q at %d: got %v, wanted %v", tx.marked, i, v, tx.wanted)
+				break
+			}
+		}
+	}
+}
+
+func testCompleter(t *testing.T, tx tparse) *completer {
+	t.Helper()
+	// seen chars up to ^
+	col := strings.Index(tx.marked, "^")
+	buf := strings.Replace(tx.marked, "^", "", 1)
+	p := parseBuffer([]byte(buf))
+	pos := protocol.Position{Line: 0, Character: uint32(col)}
+	if p.ParseErr != nil {
+		log.Printf("%q: %v", tx.marked, p.ParseErr)
+	}
+	offset := inTemplate(p, pos)
+	if offset == -1 {
+		return nil
+	}
+	syms := make(map[string]symbol)
+	filterSyms(syms, p.symbols)
+	c := &completer{
+		p:      p,
+		pos:    protocol.Position{Line: 0, Character: uint32(col)},
+		offset: offset + len(Left),
+		ctx:    protocol.CompletionContext{TriggerKind: protocol.Invoked},
+		syms:   syms,
+	}
+	return c
+}
diff --git a/internal/lsp/template/highlight.go b/internal/lsp/template/highlight.go
index 65256fc..a45abaf 100644
--- a/internal/lsp/template/highlight.go
+++ b/internal/lsp/template/highlight.go
@@ -14,9 +14,6 @@
 )
 
 func Highlight(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, loc protocol.Position) ([]protocol.DocumentHighlight, error) {
-	if skipTemplates(snapshot) {
-		return nil, nil
-	}
 	buf, err := fh.Read()
 	if err != nil {
 		return nil, err
diff --git a/internal/lsp/template/implementations.go b/internal/lsp/template/implementations.go
index 6c57a68..1de9888 100644
--- a/internal/lsp/template/implementations.go
+++ b/internal/lsp/template/implementations.go
@@ -29,7 +29,8 @@
 	if err != nil {
 		// Is a Diagnostic with no Range useful? event.Error also?
 		msg := fmt.Sprintf("failed to read %s (%v)", f.URI().Filename(), err)
-		d := source.Diagnostic{Message: msg, Severity: protocol.SeverityError, URI: f.URI()}
+		d := source.Diagnostic{Message: msg, Severity: protocol.SeverityError, URI: f.URI(),
+			Source: source.TemplateError}
 		return []*source.Diagnostic{&d}
 	}
 	p := parseBuffer(buf)
@@ -38,7 +39,9 @@
 	}
 	unknownError := func(msg string) []*source.Diagnostic {
 		s := fmt.Sprintf("malformed template error %q: %s", p.ParseErr.Error(), msg)
-		d := source.Diagnostic{Message: s, Severity: protocol.SeverityError, Range: p.Range(p.nls[0], 1), URI: f.URI()}
+		d := source.Diagnostic{
+			Message: s, Severity: protocol.SeverityError, Range: p.Range(p.nls[0], 1),
+			URI: f.URI(), Source: source.TemplateError}
 		return []*source.Diagnostic{&d}
 	}
 	// errors look like `template: :40: unexpected "}" in operand`
@@ -54,7 +57,8 @@
 		return unknownError(msg)
 	}
 	msg := matches[2]
-	d := source.Diagnostic{Message: msg, Severity: protocol.SeverityError}
+	d := source.Diagnostic{Message: msg, Severity: protocol.SeverityError,
+		Source: source.TemplateError}
 	start := p.nls[lineno-1]
 	if lineno < len(p.nls) {
 		size := p.nls[lineno] - start
@@ -65,18 +69,11 @@
 	return []*source.Diagnostic{&d}
 }
 
-func skipTemplates(s source.Snapshot) bool {
-	return !s.View().Options().ExperimentalTemplateSupport
-}
-
 // Definition finds the definitions of the symbol at loc. It
 // does not understand scoping (if any) in templates. This code is
 // for defintions, type definitions, and implementations.
 // Results only for variables and templates.
 func Definition(snapshot source.Snapshot, fh source.VersionedFileHandle, loc protocol.Position) ([]protocol.Location, error) {
-	if skipTemplates(snapshot) {
-		return nil, nil
-	}
 	x, _, err := symAtPosition(fh, loc)
 	if err != nil {
 		return nil, err
@@ -97,9 +94,6 @@
 }
 
 func Hover(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, position protocol.Position) (*protocol.Hover, error) {
-	if skipTemplates(snapshot) {
-		return nil, nil
-	}
 	sym, p, err := symAtPosition(fh, position)
 	if sym == nil || err != nil {
 		return nil, err
@@ -118,6 +112,12 @@
 		ans.Contents.Value = fmt.Sprintf("template %s\n(add definition)", sym.name)
 	case protocol.Namespace:
 		ans.Contents.Value = fmt.Sprintf("template %s defined", sym.name)
+	case protocol.Number:
+		ans.Contents.Value = "number"
+	case protocol.String:
+		ans.Contents.Value = "string"
+	case protocol.Boolean:
+		ans.Contents.Value = "boolean"
 	default:
 		ans.Contents.Value = fmt.Sprintf("oops, sym=%#v", sym)
 	}
@@ -125,9 +125,6 @@
 }
 
 func References(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, params *protocol.ReferenceParams) ([]protocol.Location, error) {
-	if skipTemplates(snapshot) {
-		return nil, nil
-	}
 	sym, _, err := symAtPosition(fh, params.Position)
 	if sym == nil || err != nil || sym.name == "" {
 		return nil, err
@@ -151,9 +148,6 @@
 }
 
 func SemanticTokens(ctx context.Context, snapshot source.Snapshot, spn span.URI, add func(line, start, len uint32), d func() []uint32) (*protocol.SemanticTokens, error) {
-	if skipTemplates(snapshot) {
-		return nil, nil
-	}
 	fh, err := snapshot.GetFile(ctx, spn)
 	if err != nil {
 		return nil, err
@@ -163,9 +157,7 @@
 		return nil, err
 	}
 	p := parseBuffer(buf)
-	if p.ParseErr != nil {
-		return nil, p.ParseErr
-	}
+
 	for _, t := range p.Tokens() {
 		if t.Multiline {
 			la, ca := p.LineCol(t.Start)
diff --git a/internal/lsp/template/parse.go b/internal/lsp/template/parse.go
index 25c80b5..194eeb3 100644
--- a/internal/lsp/template/parse.go
+++ b/internal/lsp/template/parse.go
@@ -28,7 +28,6 @@
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
 	"golang.org/x/tools/internal/span"
-	errors "golang.org/x/xerrors"
 )
 
 var (
@@ -91,7 +90,6 @@
 		nls:   []int{-1},
 	}
 	if len(buf) == 0 {
-		ans.ParseErr = errors.New("empty buffer")
 		return ans
 	}
 	// how to compute allAscii...
@@ -293,11 +291,12 @@
 	return ans, nil
 }
 
-// RuneCount counts runes in a line
+// RuneCount counts runes in line l, from col s to e
+// (e==0 for end of line. called only for multiline tokens)
 func (p *Parsed) RuneCount(l, s, e uint32) uint32 {
 	start := p.nls[l] + 1 + int(s)
-	end := int(e)
-	if e == 0 || int(e) >= p.nls[l+1] {
+	end := p.nls[l] + 1 + int(e)
+	if e == 0 || end > p.nls[l+1] {
 		end = p.nls[l+1]
 	}
 	return uint32(utf8.RuneCount(p.buf[start:end]))
@@ -357,6 +356,10 @@
 // FromPosition translates a protocol.Position into an offset into the template
 func (p *Parsed) FromPosition(x protocol.Position) int {
 	l, c := int(x.Line), int(x.Character)
+	if l >= len(p.nls) || p.nls[l]+1 >= len(p.buf) {
+		// paranoia to avoid panic. return the largest offset
+		return len(p.buf)
+	}
 	line := p.buf[p.nls[l]+1:]
 	cnt := 0
 	for w := range string(line) {
diff --git a/internal/lsp/template/parse_test.go b/internal/lsp/template/parse_test.go
index db60989..345f523 100644
--- a/internal/lsp/template/parse_test.go
+++ b/internal/lsp/template/parse_test.go
@@ -33,6 +33,7 @@
 
 	{`{{block "aaa" foo}}b{{end}}`, 2, []string{"{9,3,aaa,Namespace,true}",
 		"{9,3,aaa,Package,false}", "{14,3,foo,Function,false}", "{19,1,,Constant,false}"}},
+	{"", 0, nil},
 }
 
 func TestSymbols(t *testing.T) {
diff --git a/internal/lsp/template/symbols.go b/internal/lsp/template/symbols.go
index 856f6e3..ce5a1e7 100644
--- a/internal/lsp/template/symbols.go
+++ b/internal/lsp/template/symbols.go
@@ -6,10 +6,12 @@
 
 import (
 	"bytes"
+	"context"
 	"fmt"
 	"text/template/parse"
 	"unicode/utf8"
 
+	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
 )
@@ -49,10 +51,16 @@
 			lookfor += "." + f // quadratic, but probably ok
 		}
 	default:
-		panic(fmt.Sprintf("%T unexpected in fields()", x))
+		// If these happen they will happen even if gopls is restarted
+		// and the users does the same thing, so it is better not to panic.
+		// context.Background() is used because we don't have access
+		// to any other context. [we could, but it would be complicated]
+		event.Log(context.Background(), fmt.Sprintf("%T unexpected in fields()", x))
+		return nil
 	}
 	if len(lookfor) == 0 {
-		panic(fmt.Sprintf("no strings in fields() %#v", x))
+		event.Log(context.Background(), fmt.Sprintf("no strings in fields() %#v", x))
+		return nil
 	}
 	startsAt := int(x.Position())
 	ix := bytes.Index(p.buf[startsAt:], []byte(lookfor)) // HasPrefix? PJW?
@@ -183,12 +191,9 @@
 	pop()
 }
 
-// DocumentSymbols returns a heirarchy of the symbols defined in a template file.
-// (The heirarchy is flat. SymbolInformation might be better.)
+// DocumentSymbols returns a hierarchy of the symbols defined in a template file.
+// (The hierarchy is flat. SymbolInformation might be better.)
 func DocumentSymbols(snapshot source.Snapshot, fh source.FileHandle) ([]protocol.DocumentSymbol, error) {
-	if skipTemplates(snapshot) {
-		return nil, nil
-	}
 	buf, err := fh.Read()
 	if err != nil {
 		return nil, err
diff --git a/internal/lsp/testdata/basiclit/basiclit.go b/internal/lsp/testdata/basiclit/basiclit.go
index ab895dc..9829003 100644
--- a/internal/lsp/testdata/basiclit/basiclit.go
+++ b/internal/lsp/testdata/basiclit/basiclit.go
@@ -10,4 +10,47 @@
 	_ = 1. //@complete(".")
 
 	_ = 'a' //@complete("' ")
+
+	_ = 'a' //@hover("'a'", "'a', U+0061, LATIN SMALL LETTER A")
+	_ = 0x61 //@hover("0x61", "'a', U+0061, LATIN SMALL LETTER A")
+
+	_ = '\u2211' //@hover("'\\u2211'", "'∑', U+2211, N-ARY SUMMATION")
+	_ = 0x2211 //@hover("0x2211", "'∑', U+2211, N-ARY SUMMATION")
+	_ = "foo \u2211 bar" //@hover("\\u2211", "'∑', U+2211, N-ARY SUMMATION")
+
+	_ = '\a' //@hover("'\\a'", "U+0007, control")
+	_ = "foo \a bar" //@hover("\\a", "U+0007, control")
+
+	_ = '\U0001F30A' //@hover("'\\U0001F30A'", "'🌊', U+1F30A, WATER WAVE")
+	_ = 0x0001F30A //@hover("0x0001F30A", "'🌊', U+1F30A, WATER WAVE")
+	_ = "foo \U0001F30A bar" //@hover("\\U0001F30A", "'🌊', U+1F30A, WATER WAVE")
+
+	_ = '\x7E' //@hover("'\\x7E'", "'~', U+007E, TILDE")
+	_ = "foo \x7E bar" //@hover("\\x7E", "'~', U+007E, TILDE")
+	_ = "foo \a bar" //@hover("\\a", "U+0007, control")
+
+	_ = '\173' //@hover("'\\173'", "'{', U+007B, LEFT CURLY BRACKET")
+	_ = "foo \173 bar" //@hover("\\173", "'{', U+007B, LEFT CURLY BRACKET")
+	_ = "foo \173 bar \u2211 baz" //@hover("\\173", "'{', U+007B, LEFT CURLY BRACKET")
+	_ = "foo \173 bar \u2211 baz" //@hover("\\u2211", "'∑', U+2211, N-ARY SUMMATION")
+	_ = "foo\173bar\u2211baz" //@hover("\\173", "'{', U+007B, LEFT CURLY BRACKET")
+	_ = "foo\173bar\u2211baz" //@hover("\\u2211", "'∑', U+2211, N-ARY SUMMATION")
+
+	// search for runes in string only if there is an escaped sequence
+	_ = "hello" //@hover("\"hello\"", "")
+
+	// incorrect escaped rune sequences
+	_ = '\0' //@hover("'\\0'", "")
+	_ = '\u22111' //@hover("'\\u22111'", "")
+	_ = '\U00110000' //@hover("'\\U00110000'", "")
+	_ = '\u12e45'//@hover("'\\u12e45'", "")
+	_ = '\xa' //@hover("'\\xa'", "")
+	_ = 'aa' //@hover("'aa'", "")
+
+	// other basic lits
+	_ = 1 //@hover("1", "")
+	_ = 1.2 //@hover("1.2", "")
+	_ = 1.2i //@hover("1.2i", "")
+	_ = 0123 //@hover("0123", "")
+	_ = 0x1234567890 //@hover("0x1234567890", "")
 }
diff --git a/internal/lsp/testdata/cgo/declarecgo.go.golden b/internal/lsp/testdata/cgo/declarecgo.go.golden
index 773f3b7..b6d94d0 100644
--- a/internal/lsp/testdata/cgo/declarecgo.go.golden
+++ b/internal/lsp/testdata/cgo/declarecgo.go.golden
@@ -22,7 +22,7 @@
 	"description": "```go\nfunc Example()\n```\n\n[`cgo.Example` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/cgo?utm_source=gopls#Example)"
 }
 
--- funccgoexample-hover --
+-- funccgoexample-hoverdef --
 ```go
 func Example()
 ```
diff --git a/internal/lsp/testdata/cgoimport/usecgo.go.golden b/internal/lsp/testdata/cgoimport/usecgo.go.golden
index 8f7518a..f33f94f 100644
--- a/internal/lsp/testdata/cgoimport/usecgo.go.golden
+++ b/internal/lsp/testdata/cgoimport/usecgo.go.golden
@@ -22,7 +22,7 @@
 	"description": "```go\nfunc cgo.Example()\n```\n\n[`cgo.Example` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/cgo?utm_source=gopls#Example)"
 }
 
--- funccgoexample-hover --
+-- funccgoexample-hoverdef --
 ```go
 func cgo.Example()
 ```
diff --git a/internal/lsp/testdata/extract/extract_function/extract_basic.go b/internal/lsp/testdata/extract/extract_function/extract_basic.go
index b5b9efd..5e44de2 100644
--- a/internal/lsp/testdata/extract/extract_function/extract_basic.go
+++ b/internal/lsp/testdata/extract/extract_function/extract_basic.go
@@ -1,7 +1,8 @@
 package extract
 
-func _() {
+func _() { //@mark(exSt25, "{")
 	a := 1    //@mark(exSt1, "a")
 	_ = 3 + 4 //@mark(exEn1, "4")
 	//@extractfunc(exSt1, exEn1)
-}
+	//@extractfunc(exSt25, exEn25)
+} //@mark(exEn25, "}")
diff --git a/internal/lsp/testdata/extract/extract_function/extract_basic.go.golden b/internal/lsp/testdata/extract/extract_function/extract_basic.go.golden
index ba40ff2..18adc4d 100644
--- a/internal/lsp/testdata/extract/extract_function/extract_basic.go.golden
+++ b/internal/lsp/testdata/extract/extract_function/extract_basic.go.golden
@@ -1,14 +1,30 @@
--- functionextraction_extract_basic_4_2 --
+-- functionextraction_extract_basic_3_10 --
 package extract
 
-func _() {
+func _() { //@mark(exSt25, "{")
 	//@mark(exSt1, "a")
 	newFunction() //@mark(exEn1, "4")
 	//@extractfunc(exSt1, exEn1)
+	//@extractfunc(exSt25, exEn25)
 }
 
 func newFunction() {
 	a := 1
 	_ = 3 + 4
+} //@mark(exEn25, "}")
+
+-- functionextraction_extract_basic_4_2 --
+package extract
+
+func _() { //@mark(exSt25, "{")
+	//@mark(exSt1, "a")
+	newFunction() //@mark(exEn1, "4")
+	//@extractfunc(exSt1, exEn1)
+	//@extractfunc(exSt25, exEn25)
 }
 
+func newFunction() {
+	a := 1
+	_ = 3 + 4
+} //@mark(exEn25, "}")
+
diff --git a/internal/lsp/testdata/folding/a.go.golden b/internal/lsp/testdata/folding/a.go.golden
index 59c97ad..ce69102 100644
--- a/internal/lsp/testdata/folding/a.go.golden
+++ b/internal/lsp/testdata/folding/a.go.golden
@@ -34,9 +34,7 @@
 	x, y := make(<>), make(<>)
 	select {<>}
 	// This is a multiline comment<>
-	return `
-this string
-is not indented`
+	return <>
 }
 
 -- foldingRange-2 --
@@ -283,6 +281,7 @@
 67:10-68:24
 68:15-68:23
 70:32-71:30
+72:9-74:16
 
 -- foldingRange-comment-0 --
 package folding //@fold("package")
@@ -474,9 +473,7 @@
 	select {<>
 	}
 	// This is a multiline comment<>
-	return `
-this string
-is not indented`
+	return <>
 }
 
 -- foldingRange-lineFolding-2 --
diff --git a/internal/lsp/testdata/func_rank/func_rank.go.in b/internal/lsp/testdata/func_rank/func_rank.go.in
index 3706009..905010b 100644
--- a/internal/lsp/testdata/func_rank/func_rank.go.in
+++ b/internal/lsp/testdata/func_rank/func_rank.go.in
@@ -63,7 +63,7 @@
 }
 
 func _() {
-	HandleFunc //@item(httpHandleFunc, "HandleFunc", "func(pattern string, handler func(ResponseWriter, *Request))", "func")
+	HandleFunc //@item(httpHandleFunc, "HandleFunc", "func(pattern string, handler func(http.ResponseWriter, *http.Request))", "func")
 	HandlerFunc //@item(httpHandlerFunc, "HandlerFunc", "func(http.ResponseWriter, *http.Request)", "type")
 
 	http.HandleFunc //@rank(" //", httpHandleFunc, httpHandlerFunc)
diff --git a/internal/lsp/testdata/godef/a/a.go b/internal/lsp/testdata/godef/a/a.go
index 993fd86..5cc8552 100644
--- a/internal/lsp/testdata/godef/a/a.go
+++ b/internal/lsp/testdata/godef/a/a.go
@@ -1,5 +1,5 @@
 // Package a is a package for testing go to definition.
-package a //@mark(aPackage, "a "),hover("a ", aPackage)
+package a //@mark(aPackage, "a "),hoverdef("a ", aPackage)
 
 import (
 	"fmt"
@@ -9,19 +9,19 @@
 
 var (
 	// x is a variable.
-	x string //@x,hover("x", x)
+	x string //@x,hoverdef("x", x)
 )
 
 // Constant block. When I hover on h, I should see this comment.
 const (
 	// When I hover on g, I should see this comment.
-	g = 1 //@g,hover("g", g)
+	g = 1 //@g,hoverdef("g", g)
 
-	h = 2 //@h,hover("h", h)
+	h = 2 //@h,hoverdef("h", h)
 )
 
 // z is a variable too.
-var z string //@z,hover("z", z)
+var z string //@z,hoverdef("z", z)
 
 type A string //@mark(AString, "A")
 
@@ -33,14 +33,14 @@
 	var err error         //@err
 	fmt.Printf("%v", err) //@godef("err", err)
 
-	var y string       //@string,hover("string", string)
-	_ = make([]int, 0) //@make,hover("make", make)
+	var y string       //@string,hoverdef("string", string)
+	_ = make([]int, 0) //@make,hoverdef("make", make)
 
 	var mu sync.Mutex
-	mu.Lock() //@Lock,hover("Lock", Lock)
+	mu.Lock() //@Lock,hoverdef("Lock", Lock)
 
-	var typ *types.Named //@mark(typesImport, "types"),hover("types", typesImport)
-	typ.Obj().Name()     //@Name,hover("Name", Name)
+	var typ *types.Named //@mark(typesImport, "types"),hoverdef("types", typesImport)
+	typ.Obj().Name()     //@Name,hoverdef("Name", Name)
 }
 
 type A struct {
@@ -76,7 +76,7 @@
 func _() {
 	// 1st type declaration block
 	type (
-		a struct { //@mark(declBlockA, "a"),hover("a", declBlockA)
+		a struct { //@mark(declBlockA, "a"),hoverdef("a", declBlockA)
 			x string
 		}
 	)
@@ -84,21 +84,21 @@
 	// 2nd type declaration block
 	type (
 		// b has a comment
-		b struct{} //@mark(declBlockB, "b"),hover("b", declBlockB)
+		b struct{} //@mark(declBlockB, "b"),hoverdef("b", declBlockB)
 	)
 
 	// 3rd type declaration block
 	type (
 		// c is a struct
-		c struct { //@mark(declBlockC, "c"),hover("c", declBlockC)
+		c struct { //@mark(declBlockC, "c"),hoverdef("c", declBlockC)
 			f string
 		}
 
-		d string //@mark(declBlockD, "d"),hover("d", declBlockD)
+		d string //@mark(declBlockD, "d"),hoverdef("d", declBlockD)
 	)
 
 	type (
-		e struct { //@mark(declBlockE, "e"),hover("e", declBlockE)
+		e struct { //@mark(declBlockE, "e"),hoverdef("e", declBlockE)
 			f float64
 		} // e has a comment
 	)
diff --git a/internal/lsp/testdata/godef/a/a.go.golden b/internal/lsp/testdata/godef/a/a.go.golden
index c268293..9f67a14 100644
--- a/internal/lsp/testdata/godef/a/a.go.golden
+++ b/internal/lsp/testdata/godef/a/a.go.golden
@@ -1,19 +1,19 @@
--- Lock-hover --
+-- Lock-hoverdef --
 ```go
 func (*sync.Mutex).Lock()
 ```
 
-[`(sync.Mutex).Lock` on pkg.go.dev](https://pkg.go.dev/sync?utm_source=gopls#Mutex.Lock)
-
 Lock locks m\.
--- Name-hover --
+
+[`(sync.Mutex).Lock` on pkg.go.dev](https://pkg.go.dev/sync?utm_source=gopls#Mutex.Lock)
+-- Name-hoverdef --
 ```go
 func (*types.object).Name() string
 ```
 
-[`(types.TypeName).Name` on pkg.go.dev](https://pkg.go.dev/go/types?utm_source=gopls#TypeName.Name)
-
 Name returns the object\'s \(package\-local, unqualified\) name\.
+
+[`(types.TypeName).Name` on pkg.go.dev](https://pkg.go.dev/go/types?utm_source=gopls#TypeName.Name)
 -- Random-definition --
 godef/a/random.go:3:6-12: defined here as ```go
 func Random() int
@@ -38,7 +38,7 @@
 	"description": "```go\nfunc Random() int\n```\n\n[`a.Random` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Random)"
 }
 
--- Random-hover --
+-- Random-hoverdef --
 ```go
 func Random() int
 ```
@@ -68,15 +68,15 @@
 	"description": "```go\nfunc Random2(y int) int\n```\n\n[`a.Random2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Random2)"
 }
 
--- Random2-hover --
+-- Random2-hoverdef --
 ```go
 func Random2(y int) int
 ```
 
 [`a.Random2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Random2)
--- aPackage-hover --
+-- aPackage-hoverdef --
 Package a is a package for testing go to definition\.
--- declBlockA-hover --
+-- declBlockA-hoverdef --
 ```go
 type a struct {
 	x string
@@ -84,13 +84,13 @@
 ```
 
 1st type declaration block
--- declBlockB-hover --
+-- declBlockB-hoverdef --
 ```go
 type b struct{}
 ```
 
 b has a comment
--- declBlockC-hover --
+-- declBlockC-hoverdef --
 ```go
 type c struct {
 	f string
@@ -98,13 +98,13 @@
 ```
 
 c is a struct
--- declBlockD-hover --
+-- declBlockD-hoverdef --
 ```go
 type d string
 ```
 
 3rd type declaration block
--- declBlockE-hover --
+-- declBlockE-hoverdef --
 ```go
 type e struct {
 	f float64
@@ -125,60 +125,64 @@
 		"start": {
 			"line": 33,
 			"column": 6,
-			"offset": 597
+			"offset": 612
 		},
 		"end": {
 			"line": 33,
 			"column": 9,
-			"offset": 600
+			"offset": 615
 		}
 	},
 	"description": "```go\nvar err error\n```\n\n\\@err"
 }
 
--- err-hover --
+-- err-hoverdef --
 ```go
 var err error
 ```
 
 \@err
--- g-hover --
+-- g-hoverdef --
 ```go
 const g untyped int = 1
 ```
 
 When I hover on g, I should see this comment\.
--- h-hover --
+-- h-hoverdef --
 ```go
 const h untyped int = 2
 ```
 
 Constant block\.
--- make-hover --
+-- make-hoverdef --
 ```go
-func(t Type, size ...IntegerType) Type
+func make(t Type, size ...int) Type
 ```
 
-[`make` on pkg.go.dev](https://pkg.go.dev/builtin?utm_source=gopls#make)
-
 The make built\-in function allocates and initializes an object of type slice, map, or chan \(only\)\.
--- string-hover --
+
+[`make` on pkg.go.dev](https://pkg.go.dev/builtin?utm_source=gopls#make)
+-- string-hoverdef --
 ```go
-string
+type string string
 ```
--- typesImport-hover --
+
+string is the set of all strings of 8\-bit bytes, conventionally but not necessarily representing UTF\-8\-encoded text\.
+
+[`string` on pkg.go.dev](https://pkg.go.dev/builtin?utm_source=gopls#string)
+-- typesImport-hoverdef --
 ```go
 package types ("go/types")
 ```
 
 [`types` on pkg.go.dev](https://pkg.go.dev/go/types?utm_source=gopls)
--- x-hover --
+-- x-hoverdef --
 ```go
 var x string
 ```
 
 x is a variable\.
--- z-hover --
+-- z-hoverdef --
 ```go
 var z string
 ```
diff --git a/internal/lsp/testdata/godef/a/a_test.go.golden b/internal/lsp/testdata/godef/a/a_test.go.golden
index ac50b90..e5cb3d7 100644
--- a/internal/lsp/testdata/godef/a/a_test.go.golden
+++ b/internal/lsp/testdata/godef/a/a_test.go.golden
@@ -20,7 +20,7 @@
 	"description": "```go\nfunc TestA(t *testing.T)\n```"
 }
 
--- TestA-hover --
+-- TestA-hoverdef --
 ```go
 func TestA(t *testing.T)
 ```
diff --git a/internal/lsp/testdata/godef/a/a_x_test.go.golden b/internal/lsp/testdata/godef/a/a_x_test.go.golden
index dd1d740..2e30647 100644
--- a/internal/lsp/testdata/godef/a/a_x_test.go.golden
+++ b/internal/lsp/testdata/godef/a/a_x_test.go.golden
@@ -20,7 +20,7 @@
 	"description": "```go\nfunc TestA2(t *testing.T)\n```"
 }
 
--- TestA2-hover --
+-- TestA2-hoverdef --
 ```go
 func TestA2(t *testing.T)
 ```
diff --git a/internal/lsp/testdata/godef/a/d.go b/internal/lsp/testdata/godef/a/d.go
index d20bdad..2da8d05 100644
--- a/internal/lsp/testdata/godef/a/d.go
+++ b/internal/lsp/testdata/godef/a/d.go
@@ -1,4 +1,4 @@
-package a //@mark(a, "a "),hover("a ", a)
+package a //@mark(a, "a "),hoverdef("a ", a)
 
 import "fmt"
 
diff --git a/internal/lsp/testdata/godef/a/d.go.golden b/internal/lsp/testdata/godef/a/d.go.golden
index d80c14a..47723b0 100644
--- a/internal/lsp/testdata/godef/a/d.go.golden
+++ b/internal/lsp/testdata/godef/a/d.go.golden
@@ -3,9 +3,9 @@
 field Member string
 ```
 
-[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)
-
 \@Member
+
+[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)
 -- Member-definition-json --
 {
 	"span": {
@@ -13,25 +13,25 @@
 		"start": {
 			"line": 6,
 			"column": 2,
-			"offset": 87
+			"offset": 90
 		},
 		"end": {
 			"line": 6,
 			"column": 8,
-			"offset": 93
+			"offset": 96
 		}
 	},
-	"description": "```go\nfield Member string\n```\n\n[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)\n\n\\@Member"
+	"description": "```go\nfield Member string\n```\n\n\\@Member\n\n[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)"
 }
 
--- Member-hover --
+-- Member-hoverdef --
 ```go
 field Member string
 ```
 
-[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)
-
 \@Member
+
+[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)
 -- Method-definition --
 godef/a/d.go:15:16-22: defined here as ```go
 func (Thing).Method(i int) string
@@ -45,18 +45,18 @@
 		"start": {
 			"line": 15,
 			"column": 16,
-			"offset": 216
+			"offset": 219
 		},
 		"end": {
 			"line": 15,
 			"column": 22,
-			"offset": 222
+			"offset": 225
 		}
 	},
 	"description": "```go\nfunc (Thing).Method(i int) string\n```\n\n[`(a.Thing).Method` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Method)"
 }
 
--- Method-hover --
+-- Method-hoverdef --
 ```go
 func (Thing).Method(i int) string
 ```
@@ -67,9 +67,9 @@
 var Other Thing
 ```
 
-[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)
-
 \@Other
+
+[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)
 -- Other-definition-json --
 {
 	"span": {
@@ -77,25 +77,25 @@
 		"start": {
 			"line": 9,
 			"column": 5,
-			"offset": 118
+			"offset": 121
 		},
 		"end": {
 			"line": 9,
 			"column": 10,
-			"offset": 123
+			"offset": 126
 		}
 	},
-	"description": "```go\nvar Other Thing\n```\n\n[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)\n\n\\@Other"
+	"description": "```go\nvar Other Thing\n```\n\n\\@Other\n\n[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)"
 }
 
--- Other-hover --
+-- Other-hoverdef --
 ```go
 var Other Thing
 ```
 
-[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)
-
 \@Other
+
+[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)
 -- Thing-definition --
 godef/a/d.go:5:6-11: defined here as ```go
 type Thing struct {
@@ -111,18 +111,18 @@
 		"start": {
 			"line": 5,
 			"column": 6,
-			"offset": 62
+			"offset": 65
 		},
 		"end": {
 			"line": 5,
 			"column": 11,
-			"offset": 67
+			"offset": 70
 		}
 	},
 	"description": "```go\ntype Thing struct {\n\tMember string //@Member\n}\n```\n\n[`a.Thing` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing)"
 }
 
--- Thing-hover --
+-- Thing-hoverdef --
 ```go
 type Thing struct {
 	Member string //@Member
@@ -143,22 +143,22 @@
 		"start": {
 			"line": 11,
 			"column": 6,
-			"offset": 145
+			"offset": 148
 		},
 		"end": {
 			"line": 11,
 			"column": 12,
-			"offset": 151
+			"offset": 154
 		}
 	},
 	"description": "```go\nfunc Things(val []string) []Thing\n```\n\n[`a.Things` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Things)"
 }
 
--- Things-hover --
+-- Things-hoverdef --
 ```go
 func Things(val []string) []Thing
 ```
 
 [`a.Things` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Things)
--- a-hover --
+-- a-hoverdef --
 Package a is a package for testing go to definition\.
diff --git a/internal/lsp/testdata/godef/a/f.go b/internal/lsp/testdata/godef/a/f.go
index 2d3eefc..589c45f 100644
--- a/internal/lsp/testdata/godef/a/f.go
+++ b/internal/lsp/testdata/godef/a/f.go
@@ -7,9 +7,9 @@
 
 	switch y := interface{}(x).(type) { //@mark(switchY, "y"),godef("y", switchY)
 	case int: //@mark(intY, "int")
-		fmt.Printf("%v", y) //@hover("y", intY)
+		fmt.Printf("%v", y) //@hoverdef("y", intY)
 	case string: //@mark(stringY, "string")
-		fmt.Printf("%v", y) //@hover("y", stringY)
+		fmt.Printf("%v", y) //@hoverdef("y", stringY)
 	}
 
 }
diff --git a/internal/lsp/testdata/godef/a/f.go.golden b/internal/lsp/testdata/godef/a/f.go.golden
index 6c84b4d..a084356 100644
--- a/internal/lsp/testdata/godef/a/f.go.golden
+++ b/internal/lsp/testdata/godef/a/f.go.golden
@@ -1,8 +1,8 @@
--- intY-hover --
+-- intY-hoverdef --
 ```go
 var y int
 ```
--- stringY-hover --
+-- stringY-hoverdef --
 ```go
 var y string
 ```
@@ -28,7 +28,7 @@
 	"description": "```go\nvar y interface{}\n```"
 }
 
--- switchY-hover --
+-- switchY-hoverdef --
 ```go
 var y interface{}
 ```
diff --git a/internal/lsp/testdata/godef/a/g.go b/internal/lsp/testdata/godef/a/g.go
index 4f31857..dfef2fb 100644
--- a/internal/lsp/testdata/godef/a/g.go
+++ b/internal/lsp/testdata/godef/a/g.go
@@ -3,4 +3,4 @@
 import "time"
 
 // dur is a constant of type time.Duration.
-const dur = 15*time.Minute + 10*time.Second + 350*time.Millisecond //@dur,hover("dur", dur)
+const dur = 15*time.Minute + 10*time.Second + 350*time.Millisecond //@dur,hoverdef("dur", dur)
diff --git a/internal/lsp/testdata/godef/a/g.go.golden b/internal/lsp/testdata/godef/a/g.go.golden
index d46ff04..b7ed739 100644
--- a/internal/lsp/testdata/godef/a/g.go.golden
+++ b/internal/lsp/testdata/godef/a/g.go.golden
@@ -1,4 +1,4 @@
--- dur-hover --
+-- dur-hoverdef --
 ```go
 const dur time.Duration = 910350000000 // 15m10.35s
 ```
diff --git a/internal/lsp/testdata/godef/a/h.go b/internal/lsp/testdata/godef/a/h.go
index efe7d4e..5a5dcc6 100644
--- a/internal/lsp/testdata/godef/a/h.go
+++ b/internal/lsp/testdata/godef/a/h.go
@@ -25,9 +25,9 @@
 	}
 
 	var t s
-	_ = t.nested.number  //@hover("number", nestedNumber)
-	_ = t.nested2[0].str //@hover("str", nestedString)
-	_ = t.x.x.x.x.x.m    //@hover("m", nestedMap)
+	_ = t.nested.number  //@hoverdef("number", nestedNumber)
+	_ = t.nested2[0].str //@hoverdef("str", nestedString)
+	_ = t.x.x.x.x.x.m    //@hoverdef("m", nestedMap)
 }
 
 func _() {
@@ -40,9 +40,9 @@
 			c int //@mark(structC, "c")
 		}
 	}
-	_ = s.a   //@hover("a", structA)
-	_ = s.b   //@hover("b", structB)
-	_ = s.b.c //@hover("c", structC)
+	_ = s.a   //@hoverdef("a", structA)
+	_ = s.b   //@hoverdef("b", structB)
+	_ = s.b.c //@hoverdef("c", structC)
 
 	var arr []struct {
 		// d field
@@ -53,9 +53,9 @@
 			f int //@mark(arrF, "f")
 		}
 	}
-	_ = arr[0].d   //@hover("d", arrD)
-	_ = arr[0].e   //@hover("e", arrE)
-	_ = arr[0].e.f //@hover("f", arrF)
+	_ = arr[0].d   //@hoverdef("d", arrD)
+	_ = arr[0].e   //@hoverdef("e", arrE)
+	_ = arr[0].e.f //@hoverdef("f", arrF)
 
 	var complex []struct {
 		c <-chan map[string][]struct {
@@ -68,16 +68,16 @@
 			}
 		}
 	}
-	_ = (<-complex[0].c)["0"][0].h   //@hover("h", complexH)
-	_ = (<-complex[0].c)["0"][0].i   //@hover("i", complexI)
-	_ = (<-complex[0].c)["0"][0].i.j //@hover("j", complexJ)
+	_ = (<-complex[0].c)["0"][0].h   //@hoverdef("h", complexH)
+	_ = (<-complex[0].c)["0"][0].i   //@hoverdef("i", complexI)
+	_ = (<-complex[0].c)["0"][0].i.j //@hoverdef("j", complexJ)
 
 	var mapWithStructKey map[struct {
 		// X key field
 		x []string //@mark(mapStructKeyX, "x")
 	}]int
 	for k := range mapWithStructKey {
-		_ = k.x //@hover("x", mapStructKeyX)
+		_ = k.x //@hoverdef("x", mapStructKeyX)
 	}
 
 	var mapWithStructKeyAndValue map[struct {
@@ -90,15 +90,15 @@
 	for k, v := range mapWithStructKeyAndValue {
 		// TODO: we don't show docs for y field because both map key and value
 		// are structs. And in this case, we parse only map value
-		_ = k.y //@hover("y", mapStructKeyY)
-		_ = v.x //@hover("x", mapStructValueX)
+		_ = k.y //@hoverdef("y", mapStructKeyY)
+		_ = v.x //@hoverdef("x", mapStructValueX)
 	}
 
 	var i []map[string]interface {
 		// open method comment
 		open() error //@mark(openMethod, "open")
 	}
-	i[0]["1"].open() //@hover("open", openMethod)
+	i[0]["1"].open() //@hoverdef("open", openMethod)
 }
 
 func _() {
@@ -106,7 +106,7 @@
 		// test description
 		desc string //@mark(testDescription, "desc")
 	}{}
-	_ = test.desc //@hover("desc", testDescription)
+	_ = test.desc //@hoverdef("desc", testDescription)
 
 	for _, tt := range []struct {
 		// test input
@@ -123,11 +123,11 @@
 			}
 		}
 	}{} {
-		_ = tt.in               //@hover("in", testInput)
-		_ = tt.in["0"][0].key   //@hover("key", testInputKey)
-		_ = tt.in["0"][0].value //@hover("value", testInputValue)
+		_ = tt.in               //@hoverdef("in", testInput)
+		_ = tt.in["0"][0].key   //@hoverdef("key", testInputKey)
+		_ = tt.in["0"][0].value //@hoverdef("value", testInputValue)
 
-		_ = (<-tt.result.v).value //@hover("value", testResultValue)
+		_ = (<-tt.result.v).value //@hoverdef("value", testResultValue)
 	}
 }
 
@@ -142,6 +142,6 @@
 	}
 
 	r := getPoints()
-	r[0].x //@hover("x", returnX)
-	r[0].y //@hover("y", returnY)
+	r[0].x //@hoverdef("x", returnX)
+	r[0].y //@hoverdef("y", returnY)
 }
diff --git a/internal/lsp/testdata/godef/a/h.go.golden b/internal/lsp/testdata/godef/a/h.go.golden
index 3525d4c..4b27211 100644
--- a/internal/lsp/testdata/godef/a/h.go.golden
+++ b/internal/lsp/testdata/godef/a/h.go.golden
@@ -1,134 +1,134 @@
--- arrD-hover --
+-- arrD-hoverdef --
 ```go
 field d int
 ```
 
 d field
--- arrE-hover --
+-- arrE-hoverdef --
 ```go
 field e struct{f int}
 ```
 
 e nested struct
--- arrF-hover --
+-- arrF-hoverdef --
 ```go
 field f int
 ```
 
 f field of nested struct
--- complexH-hover --
+-- complexH-hoverdef --
 ```go
 field h int
 ```
 
 h field
--- complexI-hover --
+-- complexI-hoverdef --
 ```go
 field i struct{j int}
 ```
 
 i nested struct
--- complexJ-hover --
+-- complexJ-hoverdef --
 ```go
 field j int
 ```
 
 j field of nested struct
--- mapStructKeyX-hover --
+-- mapStructKeyX-hoverdef --
 ```go
 field x []string
 ```
 
 X key field
--- mapStructKeyY-hover --
+-- mapStructKeyY-hoverdef --
 ```go
 field y string
 ```
--- mapStructValueX-hover --
+-- mapStructValueX-hoverdef --
 ```go
 field x string
 ```
 
 X value field
--- nestedMap-hover --
+-- nestedMap-hoverdef --
 ```go
 field m map[string]float64
 ```
 
 nested map
--- nestedNumber-hover --
+-- nestedNumber-hoverdef --
 ```go
 field number int64
 ```
 
 nested number
--- nestedString-hover --
+-- nestedString-hoverdef --
 ```go
 field str string
 ```
 
 nested string
--- openMethod-hover --
+-- openMethod-hoverdef --
 ```go
 func (interface).open() error
 ```
 
 open method comment
--- returnX-hover --
+-- returnX-hoverdef --
 ```go
 field x int
 ```
 
 X coord
--- returnY-hover --
+-- returnY-hoverdef --
 ```go
 field y int
 ```
 
 Y coord
--- structA-hover --
+-- structA-hoverdef --
 ```go
 field a int
 ```
 
 a field
--- structB-hover --
+-- structB-hoverdef --
 ```go
 field b struct{c int}
 ```
 
 b nested struct
--- structC-hover --
+-- structC-hoverdef --
 ```go
 field c int
 ```
 
 c field of nested struct
--- testDescription-hover --
+-- testDescription-hoverdef --
 ```go
 field desc string
 ```
 
 test description
--- testInput-hover --
+-- testInput-hoverdef --
 ```go
 field in map[string][]struct{key string; value interface{}}
 ```
 
 test input
--- testInputKey-hover --
+-- testInputKey-hoverdef --
 ```go
 field key string
 ```
 
 test key
--- testInputValue-hover --
+-- testInputValue-hoverdef --
 ```go
 field value interface{}
 ```
 
 test value
--- testResultValue-hover --
+-- testResultValue-hoverdef --
 ```go
 field value int
 ```
diff --git a/internal/lsp/testdata/godef/a/random.go.golden b/internal/lsp/testdata/godef/a/random.go.golden
index 0f99a52..381a11a 100644
--- a/internal/lsp/testdata/godef/a/random.go.golden
+++ b/internal/lsp/testdata/godef/a/random.go.golden
@@ -22,7 +22,7 @@
 	"description": "```go\nfunc (*Pos).Sum() int\n```\n\n[`(a.Pos).Sum` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Pos.Sum)"
 }
 
--- PosSum-hover --
+-- PosSum-hoverdef --
 ```go
 func (*Pos).Sum() int
 ```
@@ -52,7 +52,7 @@
 	"description": "```go\nfield x int\n```\n\n\\@mark\\(PosX, \\\"x\\\"\\),mark\\(PosY, \\\"y\\\"\\)"
 }
 
--- PosX-hover --
+-- PosX-hoverdef --
 ```go
 field x int
 ```
@@ -80,7 +80,7 @@
 	"description": "```go\nvar y int\n```"
 }
 
--- RandomParamY-hover --
+-- RandomParamY-hoverdef --
 ```go
 var y int
 ```
@@ -106,7 +106,7 @@
 	"description": "```go\nfield field string\n```"
 }
 
--- TypField-hover --
+-- TypField-hoverdef --
 ```go
 field field string
 ```
diff --git a/internal/lsp/testdata/godef/b/b.go b/internal/lsp/testdata/godef/b/b.go
index 23d908f..f9c1d64 100644
--- a/internal/lsp/testdata/godef/b/b.go
+++ b/internal/lsp/testdata/godef/b/b.go
@@ -13,13 +13,13 @@
 
 func _() {
 	e := Embed{}
-	e.Hi()      //@hover("Hi", AHi)
-	e.B()       //@hover("B", AB)
-	e.Field     //@hover("Field", AField)
-	e.Field2    //@hover("Field2", AField2)
-	e.Hello()   //@hover("Hello", AHello)
-	e.Hey()     //@hover("Hey", AHey)
-	e.Goodbye() //@hover("Goodbye", AGoodbye)
+	e.Hi()      //@hoverdef("Hi", AHi)
+	e.B()       //@hoverdef("B", AB)
+	e.Field     //@hoverdef("Field", AField)
+	e.Field2    //@hoverdef("Field2", AField2)
+	e.Hello()   //@hoverdef("Hello", AHello)
+	e.Hey()     //@hoverdef("Hey", AHey)
+	e.Goodbye() //@hoverdef("Goodbye", AGoodbye)
 }
 
 type aAlias = a.A //@mark(aAlias, "aAlias")
diff --git a/internal/lsp/testdata/godef/b/b.go.golden b/internal/lsp/testdata/godef/b/b.go.golden
index 5537180..5f7669b 100644
--- a/internal/lsp/testdata/godef/b/b.go.golden
+++ b/internal/lsp/testdata/godef/b/b.go.golden
@@ -1,50 +1,50 @@
--- AB-hover --
+-- AB-hoverdef --
 ```go
 func (a.I).B()
 ```
 
-[`(a.I).B` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#I.B)
-
 \@mark\(AB, \"B\"\)
--- AField-hover --
+
+[`(a.I).B` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#I.B)
+-- AField-hoverdef --
 ```go
 field Field int
 ```
 
-[`(a.S).Field` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#S.Field)
-
 \@mark\(AField, \"Field\"\)
--- AField2-hover --
+
+[`(a.S).Field` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#S.Field)
+-- AField2-hoverdef --
 ```go
 field Field2 int
 ```
 
-[`(a.R).Field2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#R.Field2)
-
 \@mark\(AField2, \"Field2\"\)
--- AGoodbye-hover --
+
+[`(a.R).Field2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#R.Field2)
+-- AGoodbye-hoverdef --
 ```go
 func (a.H).Goodbye()
 ```
 
-[`(a.H).Goodbye` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#H.Goodbye)
-
 \@mark\(AGoodbye, \"Goodbye\"\)
--- AHello-hover --
+
+[`(a.H).Goodbye` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#H.Goodbye)
+-- AHello-hoverdef --
 ```go
 func (a.J).Hello()
 ```
 
-[`(a.J).Hello` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#J.Hello)
-
 \@mark\(AHello, \"Hello\"\)
--- AHey-hover --
+
+[`(a.J).Hello` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#J.Hello)
+-- AHey-hoverdef --
 ```go
 func (a.R).Hey()
 ```
 
 [`(a.R).Hey` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#R.Hey)
--- AHi-hover --
+-- AHi-hoverdef --
 ```go
 func (a.A).Hi()
 ```
@@ -74,7 +74,7 @@
 	"description": "```go\npackage a (\"golang.org/x/tools/internal/lsp/godef/a\")\n```\n\n[`a` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls)"
 }
 
--- AImport-hover --
+-- AImport-hoverdef --
 ```go
 package a ("golang.org/x/tools/internal/lsp/godef/a")
 ```
@@ -85,9 +85,9 @@
 type A string
 ```
 
-[`a.A` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#A)
-
 \@mark\(AString, \"A\"\)
+
+[`a.A` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#A)
 -- AString-definition-json --
 {
 	"span": {
@@ -95,25 +95,25 @@
 		"start": {
 			"line": 26,
 			"column": 6,
-			"offset": 452
+			"offset": 467
 		},
 		"end": {
 			"line": 26,
 			"column": 7,
-			"offset": 453
+			"offset": 468
 		}
 	},
-	"description": "```go\ntype A string\n```\n\n[`a.A` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#A)\n\n\\@mark\\(AString, \\\"A\\\"\\)"
+	"description": "```go\ntype A string\n```\n\n\\@mark\\(AString, \\\"A\\\"\\)\n\n[`a.A` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#A)"
 }
 
--- AString-hover --
+-- AString-hoverdef --
 ```go
 type A string
 ```
 
-[`a.A` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#A)
-
 \@mark\(AString, \"A\"\)
+
+[`a.A` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#A)
 -- AStuff-definition --
 godef/a/a.go:28:6-12: defined here as ```go
 func a.AStuff()
@@ -127,18 +127,18 @@
 		"start": {
 			"line": 28,
 			"column": 6,
-			"offset": 489
+			"offset": 504
 		},
 		"end": {
 			"line": 28,
 			"column": 12,
-			"offset": 495
+			"offset": 510
 		}
 	},
 	"description": "```go\nfunc a.AStuff()\n```\n\n[`a.AStuff` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#AStuff)"
 }
 
--- AStuff-hover --
+-- AStuff-hoverdef --
 ```go
 func a.AStuff()
 ```
@@ -162,18 +162,18 @@
 		"start": {
 			"line": 27,
 			"column": 6,
-			"offset": 566
+			"offset": 587
 		},
 		"end": {
 			"line": 27,
 			"column": 8,
-			"offset": 568
+			"offset": 589
 		}
 	},
 	"description": "```go\ntype S1 struct {\n\tF1     int //@mark(S1F1, \"F1\")\n\tS2         //@godef(\"S2\", S2),mark(S1S2, \"S2\")\n\ta.A        //@godef(\"A\", AString)\n\taAlias     //@godef(\"a\", aAlias)\n}\n```\n\n[`b.S1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1)"
 }
 
--- S1-hover --
+-- S1-hoverdef --
 ```go
 type S1 struct {
 	F1     int //@mark(S1F1, "F1")
@@ -189,9 +189,9 @@
 field F1 int
 ```
 
-[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)
-
 \@mark\(S1F1, \"F1\"\)
+
+[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)
 -- S1F1-definition-json --
 {
 	"span": {
@@ -199,33 +199,33 @@
 		"start": {
 			"line": 28,
 			"column": 2,
-			"offset": 585
+			"offset": 606
 		},
 		"end": {
 			"line": 28,
 			"column": 4,
-			"offset": 587
+			"offset": 608
 		}
 	},
-	"description": "```go\nfield F1 int\n```\n\n[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)\n\n\\@mark\\(S1F1, \\\"F1\\\"\\)"
+	"description": "```go\nfield F1 int\n```\n\n\\@mark\\(S1F1, \\\"F1\\\"\\)\n\n[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)"
 }
 
--- S1F1-hover --
+-- S1F1-hoverdef --
 ```go
 field F1 int
 ```
 
-[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)
-
 \@mark\(S1F1, \"F1\"\)
+
+[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)
 -- S1S2-definition --
 godef/b/b.go:29:2-4: defined here as ```go
 field S2 S2
 ```
 
-[`(b.S1).S2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.S2)
-
 \@godef\(\"S2\", S2\),mark\(S1S2, \"S2\"\)
+
+[`(b.S1).S2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.S2)
 -- S1S2-definition-json --
 {
 	"span": {
@@ -233,25 +233,25 @@
 		"start": {
 			"line": 29,
 			"column": 2,
-			"offset": 617
+			"offset": 638
 		},
 		"end": {
 			"line": 29,
 			"column": 4,
-			"offset": 619
+			"offset": 640
 		}
 	},
-	"description": "```go\nfield S2 S2\n```\n\n[`(b.S1).S2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.S2)\n\n\\@godef\\(\\\"S2\\\", S2\\),mark\\(S1S2, \\\"S2\\\"\\)"
+	"description": "```go\nfield S2 S2\n```\n\n\\@godef\\(\\\"S2\\\", S2\\),mark\\(S1S2, \\\"S2\\\"\\)\n\n[`(b.S1).S2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.S2)"
 }
 
--- S1S2-hover --
+-- S1S2-hoverdef --
 ```go
 field S2 S2
 ```
 
-[`(b.S1).S2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.S2)
-
 \@godef\(\"S2\", S2\),mark\(S1S2, \"S2\"\)
+
+[`(b.S1).S2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.S2)
 -- S2-definition --
 godef/b/b.go:34:6-8: defined here as ```go
 type S2 struct {
@@ -269,18 +269,18 @@
 		"start": {
 			"line": 34,
 			"column": 6,
-			"offset": 741
+			"offset": 762
 		},
 		"end": {
 			"line": 34,
 			"column": 8,
-			"offset": 743
+			"offset": 764
 		}
 	},
 	"description": "```go\ntype S2 struct {\n\tF1   string //@mark(S2F1, \"F1\")\n\tF2   int    //@mark(S2F2, \"F2\")\n\t*a.A        //@godef(\"A\", AString),godef(\"a\",AImport)\n}\n```\n\n[`b.S2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2)"
 }
 
--- S2-hover --
+-- S2-hoverdef --
 ```go
 type S2 struct {
 	F1   string //@mark(S2F1, "F1")
@@ -295,9 +295,9 @@
 field F1 string
 ```
 
-[`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F1)
-
 \@mark\(S2F1, \"F1\"\)
+
+[`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F1)
 -- S2F1-definition-json --
 {
 	"span": {
@@ -305,33 +305,33 @@
 		"start": {
 			"line": 35,
 			"column": 2,
-			"offset": 760
+			"offset": 781
 		},
 		"end": {
 			"line": 35,
 			"column": 4,
-			"offset": 762
+			"offset": 783
 		}
 	},
-	"description": "```go\nfield F1 string\n```\n\n[`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F1)\n\n\\@mark\\(S2F1, \\\"F1\\\"\\)"
+	"description": "```go\nfield F1 string\n```\n\n\\@mark\\(S2F1, \\\"F1\\\"\\)\n\n[`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F1)"
 }
 
--- S2F1-hover --
+-- S2F1-hoverdef --
 ```go
 field F1 string
 ```
 
-[`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F1)
-
 \@mark\(S2F1, \"F1\"\)
+
+[`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F1)
 -- S2F2-definition --
 godef/b/b.go:36:2-4: defined here as ```go
 field F2 int
 ```
 
-[`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F2)
-
 \@mark\(S2F2, \"F2\"\)
+
+[`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F2)
 -- S2F2-definition-json --
 {
 	"span": {
@@ -339,25 +339,25 @@
 		"start": {
 			"line": 36,
 			"column": 2,
-			"offset": 793
+			"offset": 814
 		},
 		"end": {
 			"line": 36,
 			"column": 4,
-			"offset": 795
+			"offset": 816
 		}
 	},
-	"description": "```go\nfield F2 int\n```\n\n[`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F2)\n\n\\@mark\\(S2F2, \\\"F2\\\"\\)"
+	"description": "```go\nfield F2 int\n```\n\n\\@mark\\(S2F2, \\\"F2\\\"\\)\n\n[`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F2)"
 }
 
--- S2F2-hover --
+-- S2F2-hoverdef --
 ```go
 field F2 int
 ```
 
-[`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F2)
-
 \@mark\(S2F2, \"F2\"\)
+
+[`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F2)
 -- aAlias-definition --
 godef/b/b.go:25:6-12: defined here as ```go
 type aAlias = a.A
@@ -371,18 +371,18 @@
 		"start": {
 			"line": 25,
 			"column": 6,
-			"offset": 521
+			"offset": 542
 		},
 		"end": {
 			"line": 25,
 			"column": 12,
-			"offset": 527
+			"offset": 548
 		}
 	},
 	"description": "```go\ntype aAlias = a.A\n```\n\n\\@mark\\(aAlias, \\\"aAlias\\\"\\)"
 }
 
--- aAlias-hover --
+-- aAlias-hoverdef --
 ```go
 type aAlias = a.A
 ```
@@ -393,9 +393,9 @@
 const X untyped int = 0
 ```
 
-[`b.X` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#X)
-
 \@mark\(bX, \"X\"\),godef\(\"X\", bX\)
+
+[`b.X` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#X)
 -- bX-definition-json --
 {
 	"span": {
@@ -403,25 +403,25 @@
 		"start": {
 			"line": 57,
 			"column": 7,
-			"offset": 1228
+			"offset": 1249
 		},
 		"end": {
 			"line": 57,
 			"column": 8,
-			"offset": 1229
+			"offset": 1250
 		}
 	},
-	"description": "```go\nconst X untyped int = 0\n```\n\n[`b.X` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#X)\n\n\\@mark\\(bX, \\\"X\\\"\\),godef\\(\\\"X\\\", bX\\)"
+	"description": "```go\nconst X untyped int = 0\n```\n\n\\@mark\\(bX, \\\"X\\\"\\),godef\\(\\\"X\\\", bX\\)\n\n[`b.X` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#X)"
 }
 
--- bX-hover --
+-- bX-hoverdef --
 ```go
 const X untyped int = 0
 ```
 
-[`b.X` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#X)
-
 \@mark\(bX, \"X\"\),godef\(\"X\", bX\)
+
+[`b.X` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#X)
 -- myFoo-definition --
 godef/b/b.go:4:2-7: defined here as ```go
 package myFoo ("golang.org/x/tools/internal/lsp/foo")
@@ -446,7 +446,7 @@
 	"description": "```go\npackage myFoo (\"golang.org/x/tools/internal/lsp/foo\")\n```\n\n[`myFoo` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/foo?utm_source=gopls)"
 }
 
--- myFoo-hover --
+-- myFoo-hoverdef --
 ```go
 package myFoo ("golang.org/x/tools/internal/lsp/foo")
 ```
diff --git a/internal/lsp/testdata/godef/b/c.go.golden b/internal/lsp/testdata/godef/b/c.go.golden
index 9554c0d..e6205b7 100644
--- a/internal/lsp/testdata/godef/b/c.go.golden
+++ b/internal/lsp/testdata/godef/b/c.go.golden
@@ -16,18 +16,18 @@
 		"start": {
 			"line": 27,
 			"column": 6,
-			"offset": 566
+			"offset": 587
 		},
 		"end": {
 			"line": 27,
 			"column": 8,
-			"offset": 568
+			"offset": 589
 		}
 	},
 	"description": "```go\ntype S1 struct {\n\tF1     int //@mark(S1F1, \"F1\")\n\tS2         //@godef(\"S2\", S2),mark(S1S2, \"S2\")\n\ta.A        //@godef(\"A\", AString)\n\taAlias     //@godef(\"a\", aAlias)\n}\n```\n\n[`b.S1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1)"
 }
 
--- S1-hover --
+-- S1-hoverdef --
 ```go
 type S1 struct {
 	F1     int //@mark(S1F1, "F1")
@@ -43,9 +43,9 @@
 field F1 int
 ```
 
-[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)
-
 \@mark\(S1F1, \"F1\"\)
+
+[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)
 -- S1F1-definition-json --
 {
 	"span": {
@@ -53,22 +53,22 @@
 		"start": {
 			"line": 28,
 			"column": 2,
-			"offset": 585
+			"offset": 606
 		},
 		"end": {
 			"line": 28,
 			"column": 4,
-			"offset": 587
+			"offset": 608
 		}
 	},
-	"description": "```go\nfield F1 int\n```\n\n[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)\n\n\\@mark\\(S1F1, \\\"F1\\\"\\)"
+	"description": "```go\nfield F1 int\n```\n\n\\@mark\\(S1F1, \\\"F1\\\"\\)\n\n[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)"
 }
 
--- S1F1-hover --
+-- S1F1-hoverdef --
 ```go
 field F1 int
 ```
 
-[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)
-
 \@mark\(S1F1, \"F1\"\)
+
+[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)
diff --git a/internal/lsp/testdata/godef/b/e.go b/internal/lsp/testdata/godef/b/e.go
index 92037ed..7b96cd7 100644
--- a/internal/lsp/testdata/godef/b/e.go
+++ b/internal/lsp/testdata/godef/b/e.go
@@ -22,10 +22,10 @@
 
 func _() {
 	var x interface{}      //@mark(eInterface, "interface{}")
-	switch x := x.(type) { //@hover("x", eInterface)
+	switch x := x.(type) { //@hoverdef("x", eInterface)
 	case string: //@mark(eString, "string")
-		fmt.Println(x) //@hover("x", eString)
+		fmt.Println(x) //@hoverdef("x", eString)
 	case int: //@mark(eInt, "int")
-		fmt.Println(x) //@hover("x", eInt)
+		fmt.Println(x) //@hoverdef("x", eInt)
 	}
 }
diff --git a/internal/lsp/testdata/godef/b/e.go.golden b/internal/lsp/testdata/godef/b/e.go.golden
index 13c2e0e..f9af7b7 100644
--- a/internal/lsp/testdata/godef/b/e.go.golden
+++ b/internal/lsp/testdata/godef/b/e.go.golden
@@ -3,9 +3,9 @@
 field Member string
 ```
 
-[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)
-
 \@Member
+
+[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)
 -- Member-definition-json --
 {
 	"span": {
@@ -13,33 +13,33 @@
 		"start": {
 			"line": 6,
 			"column": 2,
-			"offset": 87
+			"offset": 90
 		},
 		"end": {
 			"line": 6,
 			"column": 8,
-			"offset": 93
+			"offset": 96
 		}
 	},
-	"description": "```go\nfield Member string\n```\n\n[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)\n\n\\@Member"
+	"description": "```go\nfield Member string\n```\n\n\\@Member\n\n[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)"
 }
 
--- Member-hover --
+-- Member-hoverdef --
 ```go
 field Member string
 ```
 
-[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)
-
 \@Member
+
+[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)
 -- Other-definition --
 godef/a/d.go:9:5-10: defined here as ```go
 var a.Other a.Thing
 ```
 
-[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)
-
 \@Other
+
+[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)
 -- Other-definition-json --
 {
 	"span": {
@@ -47,25 +47,25 @@
 		"start": {
 			"line": 9,
 			"column": 5,
-			"offset": 118
+			"offset": 121
 		},
 		"end": {
 			"line": 9,
 			"column": 10,
-			"offset": 123
+			"offset": 126
 		}
 	},
-	"description": "```go\nvar a.Other a.Thing\n```\n\n[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)\n\n\\@Other"
+	"description": "```go\nvar a.Other a.Thing\n```\n\n\\@Other\n\n[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)"
 }
 
--- Other-hover --
+-- Other-hoverdef --
 ```go
 var a.Other a.Thing
 ```
 
-[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)
-
 \@Other
+
+[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)
 -- Thing-definition --
 godef/a/d.go:5:6-11: defined here as ```go
 type Thing struct {
@@ -81,18 +81,18 @@
 		"start": {
 			"line": 5,
 			"column": 6,
-			"offset": 62
+			"offset": 65
 		},
 		"end": {
 			"line": 5,
 			"column": 11,
-			"offset": 67
+			"offset": 70
 		}
 	},
 	"description": "```go\ntype Thing struct {\n\tMember string //@Member\n}\n```\n\n[`a.Thing` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing)"
 }
 
--- Thing-hover --
+-- Thing-hoverdef --
 ```go
 type Thing struct {
 	Member string //@Member
@@ -113,32 +113,32 @@
 		"start": {
 			"line": 11,
 			"column": 6,
-			"offset": 145
+			"offset": 148
 		},
 		"end": {
 			"line": 11,
 			"column": 12,
-			"offset": 151
+			"offset": 154
 		}
 	},
 	"description": "```go\nfunc a.Things(val []string) []a.Thing\n```\n\n[`a.Things` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Things)"
 }
 
--- Things-hover --
+-- Things-hoverdef --
 ```go
 func a.Things(val []string) []a.Thing
 ```
 
 [`a.Things` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Things)
--- eInt-hover --
+-- eInt-hoverdef --
 ```go
 var x int
 ```
--- eInterface-hover --
+-- eInterface-hoverdef --
 ```go
 var x interface{}
 ```
--- eString-hover --
+-- eString-hoverdef --
 ```go
 var x string
 ```
diff --git a/internal/lsp/testdata/godef/b/h.go b/internal/lsp/testdata/godef/b/h.go
index c2776a0..c8cbe85 100644
--- a/internal/lsp/testdata/godef/b/h.go
+++ b/internal/lsp/testdata/godef/b/h.go
@@ -4,7 +4,7 @@
 
 func _() {
 	// variable of type a.A
-	var _ A //@mark(AVariable, "_"),hover("_", AVariable)
+	var _ A //@mark(AVariable, "_"),hoverdef("_", AVariable)
 
-	AStuff() //@hover("AStuff", AStuff)
+	AStuff() //@hoverdef("AStuff", AStuff)
 }
diff --git a/internal/lsp/testdata/godef/b/h.go.golden b/internal/lsp/testdata/godef/b/h.go.golden
index b854dd4..f32f026 100644
--- a/internal/lsp/testdata/godef/b/h.go.golden
+++ b/internal/lsp/testdata/godef/b/h.go.golden
@@ -1,10 +1,10 @@
--- AStuff-hover --
+-- AStuff-hoverdef --
 ```go
 func AStuff()
 ```
 
 [`a.AStuff` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#AStuff)
--- AVariable-hover --
+-- AVariable-hoverdef --
 ```go
 var _ A
 ```
diff --git a/internal/lsp/testdata/godef/broken/unclosedIf.go.golden b/internal/lsp/testdata/godef/broken/unclosedIf.go.golden
index eac0339..5c3329d 100644
--- a/internal/lsp/testdata/godef/broken/unclosedIf.go.golden
+++ b/internal/lsp/testdata/godef/broken/unclosedIf.go.golden
@@ -22,7 +22,7 @@
 	"description": "```go\nvar myUnclosedIf string\n```\n\n\\@myUnclosedIf"
 }
 
--- myUnclosedIf-hover --
+-- myUnclosedIf-hoverdef --
 ```go
 var myUnclosedIf string
 ```
diff --git a/internal/lsp/testdata/godef/hover_generics/hover.go b/internal/lsp/testdata/godef/hover_generics/hover.go
new file mode 100644
index 0000000..7400e1a
--- /dev/null
+++ b/internal/lsp/testdata/godef/hover_generics/hover.go
@@ -0,0 +1,15 @@
+package hover
+
+type value[T any] struct { //@mark(value, "value"),hoverdef("value", value),mark(valueTdecl, "T"),hoverdef("T",valueTdecl)
+	val T   //@mark(valueTparam, "T"),hoverdef("T", valueTparam)
+	Q   int //@mark(valueQfield, "Q"),hoverdef("Q", valueQfield)
+}
+
+type Value[T any] struct { //@mark(ValueTdecl, "T"),hoverdef("T",ValueTdecl)
+	val T   //@mark(ValueTparam, "T"),hoverdef("T", ValueTparam)
+	Q   int //@mark(ValueQfield, "Q"),hoverdef("Q", ValueQfield)
+}
+
+func F[P interface{ ~int | string }]() { //@mark(Pparam, "P"),hoverdef("P",Pparam)
+	var _ P //@mark(Pvar, "P"),hoverdef("P",Pvar)
+}
diff --git a/internal/lsp/testdata/godef/hover_generics/hover.go.golden b/internal/lsp/testdata/godef/hover_generics/hover.go.golden
new file mode 100644
index 0000000..cfebcc4
--- /dev/null
+++ b/internal/lsp/testdata/godef/hover_generics/hover.go.golden
@@ -0,0 +1,45 @@
+-- Pparam-hoverdef --
+```go
+type parameter P interface{~int|string}
+```
+-- Pvar-hoverdef --
+```go
+type parameter P interface{~int|string}
+```
+-- ValueQfield-hoverdef --
+```go
+field Q int
+```
+
+\@mark\(ValueQfield, \"Q\"\),hoverdef\(\"Q\", ValueQfield\)
+
+[`(hover.Value).Q` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/hover_generics?utm_source=gopls#Value.Q)
+-- ValueTdecl-hoverdef --
+```go
+type parameter T any
+```
+-- ValueTparam-hoverdef --
+```go
+type parameter T any
+```
+-- value-hoverdef --
+```go
+type value[T any] struct {
+	val T   //@mark(valueTparam, "T"),hoverdef("T", valueTparam)
+	Q   int //@mark(valueQfield, "Q"),hoverdef("Q", valueQfield)
+}
+```
+-- valueQfield-hoverdef --
+```go
+field Q int
+```
+
+\@mark\(valueQfield, \"Q\"\),hoverdef\(\"Q\", valueQfield\)
+-- valueTdecl-hoverdef --
+```go
+type parameter T any
+```
+-- valueTparam-hoverdef --
+```go
+type parameter T any
+```
diff --git a/internal/lsp/testdata/godef/infer_generics/inferred.go b/internal/lsp/testdata/godef/infer_generics/inferred.go
index 78abf27..2d92a95 100644
--- a/internal/lsp/testdata/godef/infer_generics/inferred.go
+++ b/internal/lsp/testdata/godef/infer_generics/inferred.go
@@ -1,12 +1,12 @@
 package inferred
 
-func app[S interface{ ~[]E }, E any](s S, e E) S {
+func app[S interface{ ~[]E }, E interface{}](s S, e E) S {
 	return append(s, e)
 }
 
 func _() {
-	_ = app[[]int]             //@mark(constrInfer, "app"),hover("app", constrInfer)
-	_ = app[[]int, int]        //@mark(instance, "app"),hover("app", instance)
-	_ = app[[]int]([]int{}, 0) //@mark(partialInfer, "app"),hover("app", partialInfer)
-	_ = app([]int{}, 0)        //@mark(argInfer, "app"),hover("app", argInfer)
+	_ = app[[]int]             //@mark(constrInfer, "app"),hoverdef("app", constrInfer)
+	_ = app[[]int, int]        //@mark(instance, "app"),hoverdef("app", instance)
+	_ = app[[]int]([]int{}, 0) //@mark(partialInfer, "app"),hoverdef("app", partialInfer)
+	_ = app([]int{}, 0)        //@mark(argInfer, "app"),hoverdef("app", argInfer)
 }
diff --git a/internal/lsp/testdata/godef/infer_generics/inferred.go.golden b/internal/lsp/testdata/godef/infer_generics/inferred.go.golden
index 2dd97d9..4a36ff4 100644
--- a/internal/lsp/testdata/godef/infer_generics/inferred.go.golden
+++ b/internal/lsp/testdata/godef/infer_generics/inferred.go.golden
@@ -1,20 +1,20 @@
--- argInfer-hover --
+-- argInfer-hoverdef --
+```go
+func app(s []int, e int) []int // func[S interface{~[]E}, E interface{}](s S, e E) S
+```
+-- constrInf-hoverdef --
 ```go
 func app(s []int, e int) []int // func[S₁ interface{~[]Eβ‚‚}, Eβ‚‚ interface{}](s S₁, e Eβ‚‚) S₁
 ```
--- constrInf-hover --
+-- constrInfer-hoverdef --
 ```go
-func app(s []int, e int) []int // func[S₁ interface{~[]Eβ‚‚}, Eβ‚‚ interface{}](s S₁, e Eβ‚‚) S₁
+func app(s []int, e int) []int // func[S interface{~[]E}, E interface{}](s S, e E) S
 ```
--- constrInfer-hover --
+-- instance-hoverdef --
 ```go
-func app(s []int, e int) []int // func[S₁ interface{~[]Eβ‚‚}, Eβ‚‚ interface{}](s S₁, e Eβ‚‚) S₁
+func app(s []int, e int) []int // func[S interface{~[]E}, E interface{}](s S, e E) S
 ```
--- instance-hover --
+-- partialInfer-hoverdef --
 ```go
-func app(s []int, e int) []int // func[S₁ interface{~[]Eβ‚‚}, Eβ‚‚ interface{}](s S₁, e Eβ‚‚) S₁
-```
--- partialInfer-hover --
-```go
-func app(s []int, e int) []int // func[S₁ interface{~[]Eβ‚‚}, Eβ‚‚ interface{}](s S₁, e Eβ‚‚) S₁
+func app(s []int, e int) []int // func[S interface{~[]E}, E interface{}](s S, e E) S
 ```
diff --git a/internal/lsp/testdata/missingfunction/channels.go b/internal/lsp/testdata/missingfunction/channels.go
new file mode 100644
index 0000000..436491c
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/channels.go
@@ -0,0 +1,9 @@
+package missingfunction
+
+func channels(s string) {
+	undefinedChannels(c()) //@suggestedfix("undefinedChannels", "quickfix")
+}
+
+func c() (<-chan string, chan string) {
+	return make(<-chan string), make(chan string)
+}
diff --git a/internal/lsp/testdata/missingfunction/channels.go.golden b/internal/lsp/testdata/missingfunction/channels.go.golden
new file mode 100644
index 0000000..f5078fe
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/channels.go.golden
@@ -0,0 +1,15 @@
+-- suggestedfix_channels_4_2 --
+package missingfunction
+
+func channels(s string) {
+	undefinedChannels(c()) //@suggestedfix("undefinedChannels", "quickfix")
+}
+
+func undefinedChannels(ch1 <-chan string, ch2 chan string) {
+	panic("unimplemented")
+}
+
+func c() (<-chan string, chan string) {
+	return make(<-chan string), make(chan string)
+}
+
diff --git a/internal/lsp/testdata/missingfunction/consecutive_params.go b/internal/lsp/testdata/missingfunction/consecutive_params.go
new file mode 100644
index 0000000..d2ec3be
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/consecutive_params.go
@@ -0,0 +1,6 @@
+package missingfunction
+
+func consecutiveParams() {
+	var s string
+	undefinedConsecutiveParams(s, s) //@suggestedfix("undefinedConsecutiveParams", "quickfix")
+}
diff --git a/internal/lsp/testdata/missingfunction/consecutive_params.go.golden b/internal/lsp/testdata/missingfunction/consecutive_params.go.golden
new file mode 100644
index 0000000..14a7664
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/consecutive_params.go.golden
@@ -0,0 +1,12 @@
+-- suggestedfix_consecutive_params_5_2 --
+package missingfunction
+
+func consecutiveParams() {
+	var s string
+	undefinedConsecutiveParams(s, s) //@suggestedfix("undefinedConsecutiveParams", "quickfix")
+}
+
+func undefinedConsecutiveParams(s1, s2 string) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/missingfunction/error_param.go b/internal/lsp/testdata/missingfunction/error_param.go
new file mode 100644
index 0000000..9fd943f
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/error_param.go
@@ -0,0 +1,6 @@
+package missingfunction
+
+func errorParam() {
+	var err error
+	undefinedErrorParam(err) //@suggestedfix("undefinedErrorParam", "quickfix")
+}
diff --git a/internal/lsp/testdata/missingfunction/error_param.go.golden b/internal/lsp/testdata/missingfunction/error_param.go.golden
new file mode 100644
index 0000000..2e12711
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/error_param.go.golden
@@ -0,0 +1,12 @@
+-- suggestedfix_error_param_5_2 --
+package missingfunction
+
+func errorParam() {
+	var err error
+	undefinedErrorParam(err) //@suggestedfix("undefinedErrorParam", "quickfix")
+}
+
+func undefinedErrorParam(err error) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/missingfunction/literals.go b/internal/lsp/testdata/missingfunction/literals.go
new file mode 100644
index 0000000..e276eae
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/literals.go
@@ -0,0 +1,7 @@
+package missingfunction
+
+type T struct{}
+
+func literals() {
+	undefinedLiterals("hey compiler", T{}, &T{}) //@suggestedfix("undefinedLiterals", "quickfix")
+}
diff --git a/internal/lsp/testdata/missingfunction/literals.go.golden b/internal/lsp/testdata/missingfunction/literals.go.golden
new file mode 100644
index 0000000..04782b9
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/literals.go.golden
@@ -0,0 +1,29 @@
+-- suggestedfix_literals_10_2 --
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package missingfunction
+
+type T struct{}
+
+func literals() {
+	undefinedLiterals("hey compiler", T{}, &T{}) //@suggestedfix("undefinedLiterals", "quickfix")
+}
+
+func undefinedLiterals(s string, t1 T, t2 *T) {
+	panic("implement me!")
+}
+-- suggestedfix_literals_6_2 --
+package missingfunction
+
+type T struct{}
+
+func literals() {
+	undefinedLiterals("hey compiler", T{}, &T{}) //@suggestedfix("undefinedLiterals", "quickfix")
+}
+
+func undefinedLiterals(s string, t1 T, t2 *T) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/missingfunction/operation.go b/internal/lsp/testdata/missingfunction/operation.go
new file mode 100644
index 0000000..0408219
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/operation.go
@@ -0,0 +1,7 @@
+package missingfunction
+
+import "time"
+
+func operation() {
+	undefinedOperation(10 * time.Second) //@suggestedfix("undefinedOperation", "quickfix")
+}
diff --git a/internal/lsp/testdata/missingfunction/operation.go.golden b/internal/lsp/testdata/missingfunction/operation.go.golden
new file mode 100644
index 0000000..5e35f30
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/operation.go.golden
@@ -0,0 +1,29 @@
+-- suggestedfix_operation_10_2 --
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package missingfunction
+
+import "time"
+
+func operation() {
+	undefinedOperation(10 * time.Second) //@suggestedfix("undefinedOperation", "quickfix")
+}
+
+func undefinedOperation(duration time.Duration) {
+	panic("implement me!")
+}
+-- suggestedfix_operation_6_2 --
+package missingfunction
+
+import "time"
+
+func operation() {
+	undefinedOperation(10 * time.Second) //@suggestedfix("undefinedOperation", "quickfix")
+}
+
+func undefinedOperation(duration time.Duration) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/missingfunction/selector.go b/internal/lsp/testdata/missingfunction/selector.go
new file mode 100644
index 0000000..afd1ab6
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/selector.go
@@ -0,0 +1,6 @@
+package missingfunction
+
+func selector() {
+	m := map[int]bool{}
+	undefinedSelector(m[1]) //@suggestedfix("undefinedSelector", "quickfix")
+}
diff --git a/internal/lsp/testdata/missingfunction/selector.go.golden b/internal/lsp/testdata/missingfunction/selector.go.golden
new file mode 100644
index 0000000..c48691c
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/selector.go.golden
@@ -0,0 +1,12 @@
+-- suggestedfix_selector_5_2 --
+package missingfunction
+
+func selector() {
+	m := map[int]bool{}
+	undefinedSelector(m[1]) //@suggestedfix("undefinedSelector", "quickfix")
+}
+
+func undefinedSelector(b bool) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/missingfunction/slice.go b/internal/lsp/testdata/missingfunction/slice.go
new file mode 100644
index 0000000..4a562a2
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/slice.go
@@ -0,0 +1,5 @@
+package missingfunction
+
+func slice() {
+	undefinedSlice([]int{1, 2}) //@suggestedfix("undefinedSlice", "quickfix")
+}
diff --git a/internal/lsp/testdata/missingfunction/slice.go.golden b/internal/lsp/testdata/missingfunction/slice.go.golden
new file mode 100644
index 0000000..0ccb861
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/slice.go.golden
@@ -0,0 +1,11 @@
+-- suggestedfix_slice_4_2 --
+package missingfunction
+
+func slice() {
+	undefinedSlice([]int{1, 2}) //@suggestedfix("undefinedSlice", "quickfix")
+}
+
+func undefinedSlice(i []int) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/missingfunction/tuple.go b/internal/lsp/testdata/missingfunction/tuple.go
new file mode 100644
index 0000000..1c4782c
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/tuple.go
@@ -0,0 +1,9 @@
+package missingfunction
+
+func tuple() {
+	undefinedTuple(b()) //@suggestedfix("undefinedTuple", "quickfix")
+}
+
+func b() (string, error) {
+	return "", nil
+}
diff --git a/internal/lsp/testdata/missingfunction/tuple.go.golden b/internal/lsp/testdata/missingfunction/tuple.go.golden
new file mode 100644
index 0000000..1e12bb7
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/tuple.go.golden
@@ -0,0 +1,15 @@
+-- suggestedfix_tuple_4_2 --
+package missingfunction
+
+func tuple() {
+	undefinedTuple(b()) //@suggestedfix("undefinedTuple", "quickfix")
+}
+
+func undefinedTuple(s string, err error) {
+	panic("unimplemented")
+}
+
+func b() (string, error) {
+	return "", nil
+}
+
diff --git a/internal/lsp/testdata/missingfunction/unique_params.go b/internal/lsp/testdata/missingfunction/unique_params.go
new file mode 100644
index 0000000..ffaba3f
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/unique_params.go
@@ -0,0 +1,7 @@
+package missingfunction
+
+func uniqueArguments() {
+	var s string
+	var i int
+	undefinedUniqueArguments(s, i, s) //@suggestedfix("undefinedUniqueArguments", "quickfix")
+}
diff --git a/internal/lsp/testdata/missingfunction/unique_params.go.golden b/internal/lsp/testdata/missingfunction/unique_params.go.golden
new file mode 100644
index 0000000..74fb91a
--- /dev/null
+++ b/internal/lsp/testdata/missingfunction/unique_params.go.golden
@@ -0,0 +1,30 @@
+-- suggestedfix_unique_params_10_2 --
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package missingfunction
+
+func uniqueArguments() {
+	var s string
+	var i int
+	undefinedUniqueArguments(s, i, s) //@suggestedfix("undefinedUniqueArguments", "quickfix")
+}
+
+func undefinedUniqueArguments(s1 string, i int, s2 string) {
+	panic("implement me!")
+}
+
+-- suggestedfix_unique_params_6_2 --
+package missingfunction
+
+func uniqueArguments() {
+	var s string
+	var i int
+	undefinedUniqueArguments(s, i, s) //@suggestedfix("undefinedUniqueArguments", "quickfix")
+}
+
+func undefinedUniqueArguments(s1 string, i int, s2 string) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/rename/generics/embedded.go b/internal/lsp/testdata/rename/generics/embedded.go
new file mode 100644
index 0000000..b44bab8
--- /dev/null
+++ b/internal/lsp/testdata/rename/generics/embedded.go
@@ -0,0 +1,10 @@
+//go:build go1.18
+// +build go1.18
+
+package generics
+
+type foo[P any] int //@rename("foo","bar")
+
+var x struct{ foo[int] }
+
+var _ = x.foo
diff --git a/internal/lsp/testdata/rename/generics/embedded.go.golden b/internal/lsp/testdata/rename/generics/embedded.go.golden
new file mode 100644
index 0000000..faa9afb
--- /dev/null
+++ b/internal/lsp/testdata/rename/generics/embedded.go.golden
@@ -0,0 +1,12 @@
+-- bar-rename --
+//go:build go1.18
+// +build go1.18
+
+package generics
+
+type bar[P any] int //@rename("foo","bar")
+
+var x struct{ bar[int] }
+
+var _ = x.bar
+
diff --git a/internal/lsp/testdata/rename/generics/generics.go b/internal/lsp/testdata/rename/generics/generics.go
new file mode 100644
index 0000000..977589c
--- /dev/null
+++ b/internal/lsp/testdata/rename/generics/generics.go
@@ -0,0 +1,25 @@
+//go:build go1.18
+// +build go1.18
+
+package generics
+
+type G[P any] struct {
+	F int
+}
+
+func (G[_]) M() {}
+
+func F[P any](P) {
+	var p P //@rename("P", "Q")
+	_ = p
+}
+
+func _() {
+	var x G[int] //@rename("G", "H")
+	_ = x.F      //@rename("F", "K")
+	x.M()        //@rename("M", "N")
+
+	var y G[string]
+	_ = y.F
+	y.M()
+}
diff --git a/internal/lsp/testdata/rename/generics/generics.go.golden b/internal/lsp/testdata/rename/generics/generics.go.golden
new file mode 100644
index 0000000..7d39813
--- /dev/null
+++ b/internal/lsp/testdata/rename/generics/generics.go.golden
@@ -0,0 +1,108 @@
+-- H-rename --
+//go:build go1.18
+// +build go1.18
+
+package generics
+
+type H[P any] struct {
+	F int
+}
+
+func (H[_]) M() {}
+
+func F[P any](P) {
+	var p P //@rename("P", "Q")
+	_ = p
+}
+
+func _() {
+	var x H[int] //@rename("G", "H")
+	_ = x.F      //@rename("F", "K")
+	x.M()        //@rename("M", "N")
+
+	var y H[string]
+	_ = y.F
+	y.M()
+}
+
+-- K-rename --
+//go:build go1.18
+// +build go1.18
+
+package generics
+
+type G[P any] struct {
+	K int
+}
+
+func (G[_]) M() {}
+
+func F[P any](P) {
+	var p P //@rename("P", "Q")
+	_ = p
+}
+
+func _() {
+	var x G[int] //@rename("G", "H")
+	_ = x.K      //@rename("F", "K")
+	x.M()        //@rename("M", "N")
+
+	var y G[string]
+	_ = y.K
+	y.M()
+}
+
+-- N-rename --
+//go:build go1.18
+// +build go1.18
+
+package generics
+
+type G[P any] struct {
+	F int
+}
+
+func (G[_]) N() {}
+
+func F[P any](P) {
+	var p P //@rename("P", "Q")
+	_ = p
+}
+
+func _() {
+	var x G[int] //@rename("G", "H")
+	_ = x.F      //@rename("F", "K")
+	x.N()        //@rename("M", "N")
+
+	var y G[string]
+	_ = y.F
+	y.N()
+}
+
+-- Q-rename --
+//go:build go1.18
+// +build go1.18
+
+package generics
+
+type G[P any] struct {
+	F int
+}
+
+func (G[_]) M() {}
+
+func F[Q any](Q) {
+	var p Q //@rename("P", "Q")
+	_ = p
+}
+
+func _() {
+	var x G[int] //@rename("G", "H")
+	_ = x.F      //@rename("F", "K")
+	x.M()        //@rename("M", "N")
+
+	var y G[string]
+	_ = y.F
+	y.M()
+}
+
diff --git a/internal/lsp/testdata/rename/generics/unions.go b/internal/lsp/testdata/rename/generics/unions.go
new file mode 100644
index 0000000..c737b5c
--- /dev/null
+++ b/internal/lsp/testdata/rename/generics/unions.go
@@ -0,0 +1,10 @@
+//go:build go1.18
+// +build go1.18
+
+package generics
+
+type T string //@rename("T", "R")
+
+type C interface {
+	T | ~int //@rename("T", "S")
+}
diff --git a/internal/lsp/testdata/rename/generics/unions.go.golden b/internal/lsp/testdata/rename/generics/unions.go.golden
new file mode 100644
index 0000000..4632896
--- /dev/null
+++ b/internal/lsp/testdata/rename/generics/unions.go.golden
@@ -0,0 +1,24 @@
+-- R-rename --
+//go:build go1.18
+// +build go1.18
+
+package generics
+
+type R string //@rename("T", "R")
+
+type C interface {
+	R | ~int //@rename("T", "S")
+}
+
+-- S-rename --
+//go:build go1.18
+// +build go1.18
+
+package generics
+
+type S string //@rename("T", "R")
+
+type C interface {
+	S | ~int //@rename("T", "S")
+}
+
diff --git a/internal/lsp/testdata/semantic/a.go.golden b/internal/lsp/testdata/semantic/a.go.golden
index 4bf70e5..4622ae4 100644
--- a/internal/lsp/testdata/semantic/a.go.golden
+++ b/internal/lsp/testdata/semantic/a.go.golden
@@ -2,7 +2,7 @@
 /*⇒7,keyword,[]*/package /*⇒14,namespace,[]*/semantictokens /*⇒16,comment,[]*///@ semantic("")
 
 /*⇒6,keyword,[]*/import (
-	_ "encoding/utf8"/*⇐4,namespace,[]*/
+	_ "encoding/utf8"
 	/*⇒3,namespace,[]*/utf "encoding/utf8"
 	"fmt"/*⇐3,namespace,[]*/ /*⇒19,comment,[]*///@ semantic("fmt")
 	. "fmt"
@@ -31,12 +31,12 @@
 }
 /*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/B /*⇒9,keyword,[]*/interface {
 	/*⇒1,type,[]*/A
-	/*⇒3,member,[definition]*/sad(/*⇒3,type,[defaultLibrary]*/int) /*⇒4,type,[defaultLibrary]*/bool
+	/*⇒3,method,[definition]*/sad(/*⇒3,type,[defaultLibrary]*/int) /*⇒4,type,[defaultLibrary]*/bool
 }
 
 /*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/F /*⇒3,type,[defaultLibrary]*/int
 
-/*⇒4,keyword,[]*/func (/*⇒1,variable,[]*/a /*⇒1,operator,[]*/*/*⇒1,type,[]*/A) /*⇒1,member,[definition]*/f() /*⇒4,type,[defaultLibrary]*/bool {
+/*⇒4,keyword,[]*/func (/*⇒1,variable,[]*/a /*⇒1,operator,[]*/*/*⇒1,type,[]*/A) /*⇒1,method,[definition]*/f() /*⇒4,type,[defaultLibrary]*/bool {
 	/*⇒3,keyword,[]*/var /*⇒1,variable,[definition]*/z /*⇒6,type,[defaultLibrary]*/string
 	/*⇒1,variable,[definition]*/x /*⇒2,operator,[]*/:= /*⇒5,string,[]*/"foo"
 	/*⇒1,variable,[]*/a(/*⇒1,variable,[]*/x)
diff --git a/internal/lsp/testdata/semantic/b.go.golden b/internal/lsp/testdata/semantic/b.go.golden
index 863a68c..203f6b1 100644
--- a/internal/lsp/testdata/semantic/b.go.golden
+++ b/internal/lsp/testdata/semantic/b.go.golden
@@ -31,6 +31,6 @@
 /*⇒4,keyword,[]*/type /*⇒2,type,[definition]*/CC /*⇒6,keyword,[]*/struct {
 	/*⇒2,variable,[definition]*/AA /*⇒3,type,[defaultLibrary]*/int
 }
-/*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/D /*⇒4,keyword,[]*/func(/*⇒2,variable,[definition]*/aa /*⇒2,type,[]*/AA) (/*⇒2,variable,[definition]*/BB /*⇒5,type,[]*/error)
+/*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/D /*⇒4,keyword,[]*/func(/*⇒2,parameter,[definition]*/aa /*⇒2,type,[]*/AA) (/*⇒2,parameter,[definition]*/BB /*⇒5,type,[]*/error)
 /*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/E /*⇒4,keyword,[]*/func(/*⇒2,type,[]*/AA) /*⇒2,type,[]*/BB
 
diff --git a/internal/lsp/testdata/signature/signature.go.golden b/internal/lsp/testdata/signature/signature.go.golden
index 486ca7f..d7a65b3 100644
--- a/internal/lsp/testdata/signature/signature.go.golden
+++ b/internal/lsp/testdata/signature/signature.go.golden
@@ -54,7 +54,7 @@
 myFunc(foo int) string
 
 -- panic(v interface{})-signature --
-panic(v interface{})
+panic(v any)
 
 The panic built-in function stops normal execution of the current goroutine.
 
diff --git a/internal/lsp/testdata/snippets/func_snippets118.go.in b/internal/lsp/testdata/snippets/func_snippets118.go.in
new file mode 100644
index 0000000..d493368
--- /dev/null
+++ b/internal/lsp/testdata/snippets/func_snippets118.go.in
@@ -0,0 +1,19 @@
+// +build go1.18
+//go:build go1.18
+
+package snippets
+
+type SyncMap[K comparable, V any] struct{}
+
+func NewSyncMap[K comparable, V any]() (result *SyncMap[K, V]) { //@item(NewSyncMap, "NewSyncMap", "", "")
+	return
+}
+
+func Identity[P ~int](p P) P { //@item(Identity, "Identity", "", "")
+	return p
+}
+
+func _() {
+	_ = NewSyncM //@snippet(" //", NewSyncMap, "NewSyncMap[${1:}]()", "NewSyncMap[${1:K comparable}, ${2:V any}]()")
+	_ = Identi //@snippet(" //", Identity, "Identity[${1:}](${2:})", "Identity[${1:P ~int}](${2:p P})")
+}
diff --git a/internal/lsp/testdata/snippets/literal_snippets.go.in b/internal/lsp/testdata/snippets/literal_snippets.go.in
index e1585dd..4a2a01d 100644
--- a/internal/lsp/testdata/snippets/literal_snippets.go.in
+++ b/internal/lsp/testdata/snippets/literal_snippets.go.in
@@ -2,6 +2,7 @@
 
 import (
 	"bytes"
+	"context"
 	"go/ast"
 	"net/http"
 	"sort"
@@ -137,14 +138,14 @@
 
 	sort.Slice(nil, fun) //@complete(")", litFunc),snippet(")", litFunc, "func(i, j int) bool {$0\\}", "func(i, j int) bool {$0\\}")
 
-	http.HandleFunc("", f) //@snippet(")", litFunc, "func(rw http.ResponseWriter, r *http.Request) {$0\\}", "func(${1:rw} http.ResponseWriter, ${2:r} *http.Request) {$0\\}")
+	http.HandleFunc("", f) //@snippet(")", litFunc, "func(w http.ResponseWriter, r *http.Request) {$0\\}", "func(${1:w} http.ResponseWriter, ${2:r} *http.Request) {$0\\}")
 
 	// no literal "func" completions
 	http.Handle("", fun) //@complete(")")
 
 	http.HandlerFunc() //@item(handlerFunc, "http.HandlerFunc()", "", "var")
 	http.Handle("", h) //@snippet(")", handlerFunc, "http.HandlerFunc($0)", "http.HandlerFunc($0)")
-	http.Handle("", http.HandlerFunc()) //@snippet("))", litFunc, "func(rw http.ResponseWriter, r *http.Request) {$0\\}", "func(${1:rw} http.ResponseWriter, ${2:r} *http.Request) {$0\\}")
+	http.Handle("", http.HandlerFunc()) //@snippet("))", litFunc, "func(w http.ResponseWriter, r *http.Request) {$0\\}", "func(${1:w} http.ResponseWriter, ${2:r} *http.Request) {$0\\}")
 
 	var namedReturn func(s string) (b bool)
 	namedReturn = f //@snippet(" //", litFunc, "func(s string) (b bool) {$0\\}", "func(s string) (b bool) {$0\\}")
@@ -167,6 +168,11 @@
 	builtinTypes = f //@snippet(" //", litFunc, "func(i1 []int, b [two]bool, m map[string]string, s struct{ i int \\}, i2 interface{ foo() \\}, c <-chan int) {$0\\}", "func(${1:i1} []int, ${2:b} [two]bool, ${3:m} map[string]string, ${4:s} struct{ i int \\}, ${5:i2} interface{ foo() \\}, ${6:c} <-chan int) {$0\\}")
 
 	var _ func(ast.Node) = f //@snippet(" //", litFunc, "func(n ast.Node) {$0\\}", "func(${1:n} ast.Node) {$0\\}")
+	var _ func(error) = f //@snippet(" //", litFunc, "func(err error) {$0\\}", "func(${1:err} error) {$0\\}")
+	var _ func(context.Context) = f //@snippet(" //", litFunc, "func(ctx context.Context) {$0\\}", "func(${1:ctx} context.Context) {$0\\}")
+
+	type context struct {}
+	var _ func(context) = f //@snippet(" //", litFunc, "func(ctx context) {$0\\}", "func(${1:ctx} context) {$0\\}")
 }
 
 func _() {
diff --git a/internal/lsp/testdata/snippets/literal_snippets118.go.in b/internal/lsp/testdata/snippets/literal_snippets118.go.in
new file mode 100644
index 0000000..8251a63
--- /dev/null
+++ b/internal/lsp/testdata/snippets/literal_snippets118.go.in
@@ -0,0 +1,14 @@
+// +build go1.18
+//go:build go1.18
+
+package snippets
+
+type Tree[T any] struct{}
+
+func (tree Tree[T]) Do(f func(s T)) {}
+
+func _() {
+    _ = "func(...) {}" //@item(litFunc, "func(...) {}", "", "var")
+	var t Tree[string]
+	t.Do(fun) //@complete(")", litFunc),snippet(")", litFunc, "func(s string) {$0\\}", "func(s string) {$0\\}")
+}
diff --git a/internal/lsp/testdata/stub/other/other.go b/internal/lsp/testdata/stub/other/other.go
new file mode 100644
index 0000000..ba3c174
--- /dev/null
+++ b/internal/lsp/testdata/stub/other/other.go
@@ -0,0 +1,10 @@
+package other
+
+import (
+	"bytes"
+	renamed_context "context"
+)
+
+type Interface interface {
+	Get(renamed_context.Context) *bytes.Buffer
+}
diff --git a/internal/lsp/testdata/stub/stub_add_selector.go b/internal/lsp/testdata/stub/stub_add_selector.go
new file mode 100644
index 0000000..a15afd7
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_add_selector.go
@@ -0,0 +1,12 @@
+package stub
+
+import "io"
+
+// This file tests that if an interface
+// method references a type from its own package
+// then our implementation must add the import/package selector
+// in the concrete method if the concrete type is outside of the interface
+// package
+var _ io.ReaderFrom = &readerFrom{} //@suggestedfix("&readerFrom", "refactor.rewrite")
+
+type readerFrom struct{}
diff --git a/internal/lsp/testdata/stub/stub_add_selector.go.golden b/internal/lsp/testdata/stub/stub_add_selector.go.golden
new file mode 100644
index 0000000..e885483
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_add_selector.go.golden
@@ -0,0 +1,19 @@
+-- suggestedfix_stub_add_selector_10_23 --
+package stub
+
+import "io"
+
+// This file tests that if an interface
+// method references a type from its own package
+// then our implementation must add the import/package selector
+// in the concrete method if the concrete type is outside of the interface
+// package
+var _ io.ReaderFrom = &readerFrom{} //@suggestedfix("&readerFrom", "refactor.rewrite")
+
+type readerFrom struct{}
+
+// ReadFrom implements io.ReaderFrom
+func (*readerFrom) ReadFrom(r io.Reader) (n int64, err error) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/stub/stub_assign.go b/internal/lsp/testdata/stub/stub_assign.go
new file mode 100644
index 0000000..9336361
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_assign.go
@@ -0,0 +1,10 @@
+package stub
+
+import "io"
+
+func main() {
+	var br io.ByteWriter
+	br = &byteWriter{} //@suggestedfix("&", "refactor.rewrite")
+}
+
+type byteWriter struct{}
diff --git a/internal/lsp/testdata/stub/stub_assign.go.golden b/internal/lsp/testdata/stub/stub_assign.go.golden
new file mode 100644
index 0000000..a52a823
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_assign.go.golden
@@ -0,0 +1,17 @@
+-- suggestedfix_stub_assign_7_7 --
+package stub
+
+import "io"
+
+func main() {
+	var br io.ByteWriter
+	br = &byteWriter{} //@suggestedfix("&", "refactor.rewrite")
+}
+
+type byteWriter struct{}
+
+// WriteByte implements io.ByteWriter
+func (*byteWriter) WriteByte(c byte) error {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/stub/stub_assign_multivars.go b/internal/lsp/testdata/stub/stub_assign_multivars.go
new file mode 100644
index 0000000..01b330f
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_assign_multivars.go
@@ -0,0 +1,11 @@
+package stub
+
+import "io"
+
+func main() {
+	var br io.ByteWriter
+	var i int
+	i, br = 1, &multiByteWriter{} //@suggestedfix("&", "refactor.rewrite")
+}
+
+type multiByteWriter struct{}
diff --git a/internal/lsp/testdata/stub/stub_assign_multivars.go.golden b/internal/lsp/testdata/stub/stub_assign_multivars.go.golden
new file mode 100644
index 0000000..e1e71ad
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_assign_multivars.go.golden
@@ -0,0 +1,18 @@
+-- suggestedfix_stub_assign_multivars_8_13 --
+package stub
+
+import "io"
+
+func main() {
+	var br io.ByteWriter
+	var i int
+	i, br = 1, &multiByteWriter{} //@suggestedfix("&", "refactor.rewrite")
+}
+
+type multiByteWriter struct{}
+
+// WriteByte implements io.ByteWriter
+func (*multiByteWriter) WriteByte(c byte) error {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/stub/stub_embedded.go b/internal/lsp/testdata/stub/stub_embedded.go
new file mode 100644
index 0000000..6d6a986
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_embedded.go
@@ -0,0 +1,15 @@
+package stub
+
+import (
+	"io"
+	"sort"
+)
+
+var _ embeddedInterface = (*embeddedConcrete)(nil) //@suggestedfix("(", "refactor.rewrite")
+
+type embeddedConcrete struct{}
+
+type embeddedInterface interface {
+	sort.Interface
+	io.Reader
+}
diff --git a/internal/lsp/testdata/stub/stub_embedded.go.golden b/internal/lsp/testdata/stub/stub_embedded.go.golden
new file mode 100644
index 0000000..c258eba
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_embedded.go.golden
@@ -0,0 +1,37 @@
+-- suggestedfix_stub_embedded_8_27 --
+package stub
+
+import (
+	"io"
+	"sort"
+)
+
+var _ embeddedInterface = (*embeddedConcrete)(nil) //@suggestedfix("(", "refactor.rewrite")
+
+type embeddedConcrete struct{}
+
+// Len implements embeddedInterface
+func (*embeddedConcrete) Len() int {
+	panic("unimplemented")
+}
+
+// Less implements embeddedInterface
+func (*embeddedConcrete) Less(i int, j int) bool {
+	panic("unimplemented")
+}
+
+// Swap implements embeddedInterface
+func (*embeddedConcrete) Swap(i int, j int) {
+	panic("unimplemented")
+}
+
+// Read implements embeddedInterface
+func (*embeddedConcrete) Read(p []byte) (n int, err error) {
+	panic("unimplemented")
+}
+
+type embeddedInterface interface {
+	sort.Interface
+	io.Reader
+}
+
diff --git a/internal/lsp/testdata/stub/stub_err.go b/internal/lsp/testdata/stub/stub_err.go
new file mode 100644
index 0000000..908c7d3
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_err.go
@@ -0,0 +1,7 @@
+package stub
+
+func main() {
+	var br error = &customErr{} //@suggestedfix("&", "refactor.rewrite")
+}
+
+type customErr struct{}
diff --git a/internal/lsp/testdata/stub/stub_err.go.golden b/internal/lsp/testdata/stub/stub_err.go.golden
new file mode 100644
index 0000000..717aed8
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_err.go.golden
@@ -0,0 +1,14 @@
+-- suggestedfix_stub_err_4_17 --
+package stub
+
+func main() {
+	var br error = &customErr{} //@suggestedfix("&", "refactor.rewrite")
+}
+
+type customErr struct{}
+
+// Error implements error
+func (*customErr) Error() string {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/stub/stub_function_return.go b/internal/lsp/testdata/stub/stub_function_return.go
new file mode 100644
index 0000000..bbf0588
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_function_return.go
@@ -0,0 +1,11 @@
+package stub
+
+import (
+	"io"
+)
+
+func newCloser() io.Closer {
+	return closer{} //@suggestedfix("c", "refactor.rewrite")
+}
+
+type closer struct{}
diff --git a/internal/lsp/testdata/stub/stub_function_return.go.golden b/internal/lsp/testdata/stub/stub_function_return.go.golden
new file mode 100644
index 0000000..f80874d
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_function_return.go.golden
@@ -0,0 +1,18 @@
+-- suggestedfix_stub_function_return_8_9 --
+package stub
+
+import (
+	"io"
+)
+
+func newCloser() io.Closer {
+	return closer{} //@suggestedfix("c", "refactor.rewrite")
+}
+
+type closer struct{}
+
+// Close implements io.Closer
+func (closer) Close() error {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/stub/stub_generic_receiver.go b/internal/lsp/testdata/stub/stub_generic_receiver.go
new file mode 100644
index 0000000..64e90fc
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_generic_receiver.go
@@ -0,0 +1,15 @@
+//go:build go1.18
+// +build go1.18
+
+package stub
+
+import "io"
+
+// This file tests that that the stub method generator accounts for concrete
+// types that have type parameters defined.
+var _ io.ReaderFrom = &genReader[string, int]{} //@suggestedfix("&genReader", "refactor.rewrite")
+
+type genReader[T, Y any] struct {
+	T T
+	Y Y
+}
diff --git a/internal/lsp/testdata/stub/stub_generic_receiver.go.golden b/internal/lsp/testdata/stub/stub_generic_receiver.go.golden
new file mode 100644
index 0000000..1fc7157
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_generic_receiver.go.golden
@@ -0,0 +1,22 @@
+-- suggestedfix_stub_generic_receiver_10_23 --
+//go:build go1.18
+// +build go1.18
+
+package stub
+
+import "io"
+
+// This file tests that that the stub method generator accounts for concrete
+// types that have type parameters defined.
+var _ io.ReaderFrom = &genReader[string, int]{} //@suggestedfix("&genReader", "refactor.rewrite")
+
+type genReader[T, Y any] struct {
+	T T
+	Y Y
+}
+
+// ReadFrom implements io.ReaderFrom
+func (*genReader[T, Y]) ReadFrom(r io.Reader) (n int64, err error) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/stub/stub_ignored_imports.go b/internal/lsp/testdata/stub/stub_ignored_imports.go
new file mode 100644
index 0000000..8f6ec73
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_ignored_imports.go
@@ -0,0 +1,18 @@
+package stub
+
+import (
+	"compress/zlib"
+	. "io"
+	_ "io"
+)
+
+// This file tests that dot-imports and underscore imports
+// are properly ignored and that a new import is added to
+// reference method types
+
+var (
+	_ Reader
+	_ zlib.Resetter = (*ignoredResetter)(nil) //@suggestedfix("(", "refactor.rewrite")
+)
+
+type ignoredResetter struct{}
diff --git a/internal/lsp/testdata/stub/stub_ignored_imports.go.golden b/internal/lsp/testdata/stub/stub_ignored_imports.go.golden
new file mode 100644
index 0000000..a0ddc17
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_ignored_imports.go.golden
@@ -0,0 +1,26 @@
+-- suggestedfix_stub_ignored_imports_15_20 --
+package stub
+
+import (
+	"compress/zlib"
+	"io"
+	. "io"
+	_ "io"
+)
+
+// This file tests that dot-imports and underscore imports
+// are properly ignored and that a new import is added to
+// reference method types
+
+var (
+	_ Reader
+	_ zlib.Resetter = (*ignoredResetter)(nil) //@suggestedfix("(", "refactor.rewrite")
+)
+
+type ignoredResetter struct{}
+
+// Reset implements zlib.Resetter
+func (*ignoredResetter) Reset(r io.Reader, dict []byte) error {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/stub/stub_multi_var.go b/internal/lsp/testdata/stub/stub_multi_var.go
new file mode 100644
index 0000000..4276b79
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_multi_var.go
@@ -0,0 +1,11 @@
+package stub
+
+import "io"
+
+// This test ensures that a variable declaration that
+// has multiple values on the same line can still be
+// analyzed correctly to target the interface implementation
+// diagnostic.
+var one, two, three io.Reader = nil, &multiVar{}, nil //@suggestedfix("&", "refactor.rewrite")
+
+type multiVar struct{}
diff --git a/internal/lsp/testdata/stub/stub_multi_var.go.golden b/internal/lsp/testdata/stub/stub_multi_var.go.golden
new file mode 100644
index 0000000..b9ac423
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_multi_var.go.golden
@@ -0,0 +1,18 @@
+-- suggestedfix_stub_multi_var_9_38 --
+package stub
+
+import "io"
+
+// This test ensures that a variable declaration that
+// has multiple values on the same line can still be
+// analyzed correctly to target the interface implementation
+// diagnostic.
+var one, two, three io.Reader = nil, &multiVar{}, nil //@suggestedfix("&", "refactor.rewrite")
+
+type multiVar struct{}
+
+// Read implements io.Reader
+func (*multiVar) Read(p []byte) (n int, err error) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/stub/stub_pointer.go b/internal/lsp/testdata/stub/stub_pointer.go
new file mode 100644
index 0000000..2b3681b
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_pointer.go
@@ -0,0 +1,9 @@
+package stub
+
+import "io"
+
+func getReaderFrom() io.ReaderFrom {
+	return &pointerImpl{} //@suggestedfix("&", "refactor.rewrite")
+}
+
+type pointerImpl struct{}
diff --git a/internal/lsp/testdata/stub/stub_pointer.go.golden b/internal/lsp/testdata/stub/stub_pointer.go.golden
new file mode 100644
index 0000000..c4133d7
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_pointer.go.golden
@@ -0,0 +1,16 @@
+-- suggestedfix_stub_pointer_6_9 --
+package stub
+
+import "io"
+
+func getReaderFrom() io.ReaderFrom {
+	return &pointerImpl{} //@suggestedfix("&", "refactor.rewrite")
+}
+
+type pointerImpl struct{}
+
+// ReadFrom implements io.ReaderFrom
+func (*pointerImpl) ReadFrom(r io.Reader) (n int64, err error) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/stub/stub_renamed_import.go b/internal/lsp/testdata/stub/stub_renamed_import.go
new file mode 100644
index 0000000..eaebe25
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_renamed_import.go
@@ -0,0 +1,11 @@
+package stub
+
+import (
+	"compress/zlib"
+	myio "io"
+)
+
+var _ zlib.Resetter = &myIO{} //@suggestedfix("&", "refactor.rewrite")
+var _ myio.Reader
+
+type myIO struct{}
diff --git a/internal/lsp/testdata/stub/stub_renamed_import.go.golden b/internal/lsp/testdata/stub/stub_renamed_import.go.golden
new file mode 100644
index 0000000..48ff4f1
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_renamed_import.go.golden
@@ -0,0 +1,18 @@
+-- suggestedfix_stub_renamed_import_8_23 --
+package stub
+
+import (
+	"compress/zlib"
+	myio "io"
+)
+
+var _ zlib.Resetter = &myIO{} //@suggestedfix("&", "refactor.rewrite")
+var _ myio.Reader
+
+type myIO struct{}
+
+// Reset implements zlib.Resetter
+func (*myIO) Reset(r myio.Reader, dict []byte) error {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/stub/stub_renamed_import_iface.go b/internal/lsp/testdata/stub/stub_renamed_import_iface.go
new file mode 100644
index 0000000..96caf54
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_renamed_import_iface.go
@@ -0,0 +1,13 @@
+package stub
+
+import (
+	"golang.org/x/tools/internal/lsp/stub/other"
+)
+
+// This file tests that if an interface
+// method references an import from its own package
+// that the concrete type does not yet import, and that import happens
+// to be renamed, then we prefer the renaming of the interface.
+var _ other.Interface = &otherInterfaceImpl{} //@suggestedfix("&otherInterfaceImpl", "refactor.rewrite")
+
+type otherInterfaceImpl struct{}
diff --git a/internal/lsp/testdata/stub/stub_renamed_import_iface.go.golden b/internal/lsp/testdata/stub/stub_renamed_import_iface.go.golden
new file mode 100644
index 0000000..9ba2cb4
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_renamed_import_iface.go.golden
@@ -0,0 +1,22 @@
+-- suggestedfix_stub_renamed_import_iface_11_25 --
+package stub
+
+import (
+	"bytes"
+	renamed_context "context"
+	"golang.org/x/tools/internal/lsp/stub/other"
+)
+
+// This file tests that if an interface
+// method references an import from its own package
+// that the concrete type does not yet import, and that import happens
+// to be renamed, then we prefer the renaming of the interface.
+var _ other.Interface = &otherInterfaceImpl{} //@suggestedfix("&otherInterfaceImpl", "refactor.rewrite")
+
+type otherInterfaceImpl struct{}
+
+// Get implements other.Interface
+func (*otherInterfaceImpl) Get(renamed_context.Context) *bytes.Buffer {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/stub/stub_stdlib.go b/internal/lsp/testdata/stub/stub_stdlib.go
new file mode 100644
index 0000000..0d54a6d
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_stdlib.go
@@ -0,0 +1,9 @@
+package stub
+
+import (
+	"io"
+)
+
+var _ io.Writer = writer{} //@suggestedfix("w", "refactor.rewrite")
+
+type writer struct{}
diff --git a/internal/lsp/testdata/stub/stub_stdlib.go.golden b/internal/lsp/testdata/stub/stub_stdlib.go.golden
new file mode 100644
index 0000000..8636cea
--- /dev/null
+++ b/internal/lsp/testdata/stub/stub_stdlib.go.golden
@@ -0,0 +1,16 @@
+-- suggestedfix_stub_stdlib_7_19 --
+package stub
+
+import (
+	"io"
+)
+
+var _ io.Writer = writer{} //@suggestedfix("w", "refactor.rewrite")
+
+type writer struct{}
+
+// Write implements io.Writer
+func (writer) Write(p []byte) (n int, err error) {
+	panic("unimplemented")
+}
+
diff --git a/internal/lsp/testdata/summary.txt.golden b/internal/lsp/testdata/summary.txt.golden
index 7143365..2949392 100644
--- a/internal/lsp/testdata/summary.txt.golden
+++ b/internal/lsp/testdata/summary.txt.golden
@@ -2,7 +2,7 @@
 CallHierarchyCount = 2
 CodeLensCount = 5
 CompletionsCount = 265
-CompletionSnippetCount = 103
+CompletionSnippetCount = 106
 UnimportedCompletionsCount = 5
 DeepCompletionsCount = 5
 FuzzyCompletionsCount = 8
@@ -13,8 +13,8 @@
 FormatCount = 6
 ImportCount = 8
 SemanticTokenCount = 3
-SuggestedFixCount = 40
-FunctionExtractionCount = 24
+SuggestedFixCount = 61
+FunctionExtractionCount = 25
 MethodExtractionCount = 6
 DefinitionsCount = 95
 TypeDefinitionsCount = 18
diff --git a/internal/lsp/testdata/summary_generics.txt.golden b/internal/lsp/testdata/summary_go1.18.txt.golden
similarity index 73%
rename from internal/lsp/testdata/summary_generics.txt.golden
rename to internal/lsp/testdata/summary_go1.18.txt.golden
index f93110a..4863989 100644
--- a/internal/lsp/testdata/summary_generics.txt.golden
+++ b/internal/lsp/testdata/summary_go1.18.txt.golden
@@ -1,26 +1,26 @@
 -- summary --
 CallHierarchyCount = 2
 CodeLensCount = 5
-CompletionsCount = 265
-CompletionSnippetCount = 103
+CompletionsCount = 266
+CompletionSnippetCount = 110
 UnimportedCompletionsCount = 5
 DeepCompletionsCount = 5
 FuzzyCompletionsCount = 8
-RankedCompletionsCount = 163
+RankedCompletionsCount = 169
 CaseSensitiveCompletionsCount = 4
 DiagnosticsCount = 37
 FoldingRangesCount = 2
 FormatCount = 6
 ImportCount = 8
 SemanticTokenCount = 3
-SuggestedFixCount = 40
-FunctionExtractionCount = 24
+SuggestedFixCount = 62
+FunctionExtractionCount = 25
 MethodExtractionCount = 6
-DefinitionsCount = 99
+DefinitionsCount = 108
 TypeDefinitionsCount = 18
 HighlightsCount = 69
 ReferencesCount = 27
-RenamesCount = 37
+RenamesCount = 48
 PrepareRenamesCount = 7
 SymbolsCount = 5
 WorkspaceSymbolsCount = 20
diff --git a/internal/lsp/testdata/typeparams/type_params.go b/internal/lsp/testdata/typeparams/type_params.go
new file mode 100644
index 0000000..1dfb103
--- /dev/null
+++ b/internal/lsp/testdata/typeparams/type_params.go
@@ -0,0 +1,33 @@
+//go:build go1.18
+// +build go1.18
+
+package typeparams
+
+func one[a int | string]()            {}
+func two[a int | string, b float64 | int]() {}
+
+func _() {
+	one[]() //@rank("]", string, float64)
+	two[]() //@rank("]", int, float64)
+	two[int, f]() //@rank("]", float64, float32)
+}
+
+func slices[a []int | []float64]() {} //@item(tpInts, "[]int", "[]int", "type"),item(tpFloats, "[]float64", "[]float64", "type")
+
+func _() {
+	slices[]() //@rank("]", tpInts),rank("]", tpFloats)
+}
+
+type s[a int | string] struct{}
+
+func _() {
+	s[]{} //@rank("]", int, float64)
+}
+
+func returnTP[A int | float64](a A) A { //@item(returnTP, "returnTP", "something", "func")
+	return a
+}
+
+func _() {
+	var _ int = returnTP //@snippet(" //", returnTP, "returnTP[${1:}](${2:})", "returnTP[${1:A int|float64}](${2:a A})")
+}
diff --git a/internal/lsp/tests/tests.go b/internal/lsp/tests/tests.go
index d5db454..6a77fc7 100644
--- a/internal/lsp/tests/tests.go
+++ b/internal/lsp/tests/tests.go
@@ -12,6 +12,7 @@
 	"fmt"
 	"go/ast"
 	"go/token"
+	"io"
 	"io/ioutil"
 	"os"
 	"path/filepath"
@@ -47,7 +48,7 @@
 
 func init() {
 	if typeparams.Enabled {
-		summaryFile = "summary_generics.txt"
+		summaryFile = "summary_go1.18.txt"
 	}
 }
 
@@ -84,6 +85,7 @@
 type Signatures map[span.Span]*protocol.SignatureHelp
 type Links map[span.URI][]Link
 type AddImport map[span.URI]string
+type Hovers map[span.Span]string
 
 type Data struct {
 	Config                   packages.Config
@@ -119,6 +121,7 @@
 	Signatures               Signatures
 	Links                    Links
 	AddImport                AddImport
+	Hovers                   Hovers
 
 	t         testing.TB
 	fragments map[string]string
@@ -161,6 +164,7 @@
 	SignatureHelp(*testing.T, span.Span, *protocol.SignatureHelp)
 	Link(*testing.T, span.URI, []Link)
 	AddImport(*testing.T, span.URI, string)
+	Hover(*testing.T, span.Span, string)
 }
 
 type Definition struct {
@@ -248,6 +252,7 @@
 			protocol.SourceOrganizeImports: true,
 		},
 		source.Sum:  {},
+		source.Work: {},
 		source.Tmpl: {},
 	}
 	o.UserOptions.Codelenses[string(command.Test)] = true
@@ -267,20 +272,18 @@
 	}
 	for _, mode := range modes {
 		t.Run(mode, func(t *testing.T) {
-			t.Helper()
 			if mode == "MultiModule" {
 				// Some bug in 1.12 breaks reading markers, and it's not worth figuring out.
 				testenv.NeedsGo1Point(t, 13)
 			}
 			datum := load(t, mode, dataDir)
+			t.Helper()
 			f(t, datum)
 		})
 	}
 }
 
 func load(t testing.TB, mode string, dir string) *Data {
-	t.Helper()
-
 	datum := &Data{
 		CallHierarchy:            make(CallHierarchy),
 		CodeLens:                 make(CodeLens),
@@ -309,6 +312,7 @@
 		Signatures:               make(Signatures),
 		Links:                    make(Links),
 		AddImport:                make(AddImport),
+		Hovers:                   make(Hovers),
 
 		t:         t,
 		dir:       dir,
@@ -459,7 +463,8 @@
 		"godef":           datum.collectDefinitions,
 		"implementations": datum.collectImplementations,
 		"typdef":          datum.collectTypeDefinitions,
-		"hover":           datum.collectHoverDefinitions,
+		"hoverdef":        datum.collectHoverDefinitions,
+		"hover":           datum.collectHovers,
 		"highlight":       datum.collectHighlights,
 		"refs":            datum.collectReferences,
 		"rename":          datum.collectRenames,
@@ -485,7 +490,7 @@
 	// Collect names for the entries that require golden files.
 	if err := datum.Exported.Expect(map[string]interface{}{
 		"godef":                        datum.collectDefinitionNames,
-		"hover":                        datum.collectDefinitionNames,
+		"hoverdef":                     datum.collectDefinitionNames,
 		"workspacesymbol":              datum.collectWorkspaceSymbols(WorkspaceSymbolsDefault),
 		"workspacesymbolfuzzy":         datum.collectWorkspaceSymbols(WorkspaceSymbolsFuzzy),
 		"workspacesymbolcasesensitive": datum.collectWorkspaceSymbols(WorkspaceSymbolsCaseSensitive),
@@ -493,7 +498,7 @@
 		t.Fatal(err)
 	}
 	if mode == "MultiModule" {
-		if err := os.Rename(filepath.Join(datum.Config.Dir, "go.mod"), filepath.Join(datum.Config.Dir, "testmodule/go.mod")); err != nil {
+		if err := moveFile(filepath.Join(datum.Config.Dir, "go.mod"), filepath.Join(datum.Config.Dir, "testmodule/go.mod")); err != nil {
 			t.Fatal(err)
 		}
 	}
@@ -501,6 +506,43 @@
 	return datum
 }
 
+// moveFile moves the file at oldpath to newpath, by renaming if possible
+// or copying otherwise.
+func moveFile(oldpath, newpath string) (err error) {
+	renameErr := os.Rename(oldpath, newpath)
+	if renameErr == nil {
+		return nil
+	}
+
+	src, err := os.Open(oldpath)
+	if err != nil {
+		return err
+	}
+	defer func() {
+		src.Close()
+		if err == nil {
+			err = os.Remove(oldpath)
+		}
+	}()
+
+	perm := os.ModePerm
+	fi, err := src.Stat()
+	if err == nil {
+		perm = fi.Mode().Perm()
+	}
+
+	dst, err := os.OpenFile(newpath, os.O_WRONLY|os.O_CREATE|os.O_EXCL, perm)
+	if err != nil {
+		return err
+	}
+
+	_, err = io.Copy(dst, src)
+	if closeErr := dst.Close(); err == nil {
+		err = closeErr
+	}
+	return err
+}
+
 func Run(t *testing.T, tests Tests, data *Data) {
 	t.Helper()
 	checkData(t, data)
@@ -730,6 +772,16 @@
 		}
 	})
 
+	t.Run("Hover", func(t *testing.T) {
+		t.Helper()
+		for pos, info := range data.Hovers {
+			t.Run(SpanName(pos), func(t *testing.T) {
+				t.Helper()
+				tests.Hover(t, pos, info)
+			})
+		}
+	})
+
 	t.Run("References", func(t *testing.T) {
 		t.Helper()
 		for src, itemList := range data.References {
@@ -1222,6 +1274,10 @@
 	}
 }
 
+func (data *Data) collectHovers(src span.Span, expected string) {
+	data.Hovers[src] = expected
+}
+
 func (data *Data) collectTypeDefinitions(src, target span.Span) {
 	data.Definitions[src] = Definition{
 		Src:    src,
diff --git a/internal/lsp/tests/util.go b/internal/lsp/tests/util.go
index 94c948d..11dda1f 100644
--- a/internal/lsp/tests/util.go
+++ b/internal/lsp/tests/util.go
@@ -252,7 +252,7 @@
 	}
 	g := got.Signatures[0]
 	w := want.Signatures[0]
-	if w.Label != g.Label {
+	if NormalizeAny(w.Label) != NormalizeAny(g.Label) {
 		wLabel := w.Label + "\n"
 		d, err := myers.ComputeEdits("", wLabel, g.Label+"\n")
 		if err != nil {
@@ -271,6 +271,14 @@
 	return "", nil
 }
 
+// NormalizeAny replaces occurrences of interface{} in input with any.
+//
+// In Go 1.18, standard library functions were changed to use the 'any'
+// alias in place of interface{}, which affects their type string.
+func NormalizeAny(input string) string {
+	return strings.ReplaceAll(input, "interface{}", "any")
+}
+
 // DiffCallHierarchyItems returns the diff between expected and actual call locations for incoming/outgoing call hierarchies
 func DiffCallHierarchyItems(gotCalls []protocol.CallHierarchyItem, expectedCalls []protocol.CallHierarchyItem) string {
 	expected := make(map[protocol.Location]bool)
@@ -369,7 +377,7 @@
 	for _, w := range want {
 		var found bool
 		for i, g := range got {
-			if w.Label == g.Label && w.Detail == g.Detail && w.Kind == g.Kind {
+			if w.Label == g.Label && NormalizeAny(w.Detail) == NormalizeAny(g.Detail) && w.Kind == g.Kind {
 				matchedIdxs = append(matchedIdxs, i)
 				found = true
 
@@ -444,7 +452,7 @@
 		if w.Label != g.Label {
 			return summarizeCompletionItems(i, want, got, "incorrect Label got %v want %v", g.Label, w.Label)
 		}
-		if w.Detail != g.Detail {
+		if NormalizeAny(w.Detail) != NormalizeAny(g.Detail) {
 			return summarizeCompletionItems(i, want, got, "incorrect Detail got %v want %v", g.Detail, w.Detail)
 		}
 		if w.Documentation != "" && !strings.HasPrefix(w.Documentation, "@") {
@@ -551,3 +559,22 @@
 	}
 	return fmt.Sprintf("%q", diff.ToUnified("want", "got", want, d))
 }
+
+// StripSubscripts removes type parameter id subscripts.
+//
+// TODO(rfindley): remove this function once subscripts are removed from the
+// type parameter type string.
+func StripSubscripts(s string) string {
+	var runes []rune
+	for _, r := range s {
+		// For debugging/uniqueness purposes, TypeString on a type parameter adds a
+		// subscript corresponding to the type parameter's unique id. This is going
+		// to be removed, but in the meantime we skip the subscript runes to get a
+		// deterministic output.
+		if 'β‚€' <= r && r < 'β‚€'+10 {
+			continue // trim type parameter subscripts
+		}
+		runes = append(runes, r)
+	}
+	return string(runes)
+}
diff --git a/internal/lsp/work/completion.go b/internal/lsp/work/completion.go
new file mode 100644
index 0000000..60b69f1
--- /dev/null
+++ b/internal/lsp/work/completion.go
@@ -0,0 +1,159 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package work
+
+import (
+	"context"
+	"go/token"
+	"os"
+	"path/filepath"
+	"sort"
+	"strings"
+
+	"golang.org/x/tools/internal/event"
+	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/lsp/source"
+	errors "golang.org/x/xerrors"
+)
+
+func Completion(ctx context.Context, snapshot source.Snapshot, fh source.VersionedFileHandle, position protocol.Position) (*protocol.CompletionList, error) {
+	ctx, done := event.Start(ctx, "work.Completion")
+	defer done()
+
+	// Get the position of the cursor.
+	pw, err := snapshot.ParseWork(ctx, fh)
+	if err != nil {
+		return nil, errors.Errorf("getting go.work file handle: %w", err)
+	}
+	spn, err := pw.Mapper.PointSpan(position)
+	if err != nil {
+		return nil, errors.Errorf("computing cursor position: %w", err)
+	}
+	rng, err := spn.Range(pw.Mapper.Converter)
+	if err != nil {
+		return nil, errors.Errorf("computing range: %w", err)
+	}
+
+	// Find the use statement the user is in.
+	cursor := rng.Start - 1
+	use, pathStart, _ := usePath(pw, cursor)
+	if use == nil {
+		return &protocol.CompletionList{}, nil
+	}
+	completingFrom := use.Path[:cursor-token.Pos(pathStart)]
+
+	// We're going to find the completions of the user input
+	// (completingFrom) by doing a walk on the innermost directory
+	// of the given path, and comparing the found paths to make sure
+	// that they match the component of the path after the
+	// innermost directory.
+	//
+	// We'll maintain two paths when doing this: pathPrefixSlash
+	// is essentially the path the user typed in, and pathPrefixAbs
+	// is the path made absolute from the go.work directory.
+
+	pathPrefixSlash := completingFrom
+	pathPrefixAbs := filepath.FromSlash(pathPrefixSlash)
+	if !filepath.IsAbs(pathPrefixAbs) {
+		pathPrefixAbs = filepath.Join(filepath.Dir(pw.URI.Filename()), pathPrefixAbs)
+	}
+
+	// pathPrefixDir is the directory that will be walked to find matches.
+	// If pathPrefixSlash is not explicitly a directory boundary (is either equivalent to "." or
+	// ends in a separator) we need to examine its parent directory to find sibling files that
+	// match.
+	depthBound := 5
+	pathPrefixDir, pathPrefixBase := pathPrefixAbs, ""
+	pathPrefixSlashDir := pathPrefixSlash
+	if filepath.Clean(pathPrefixSlash) != "." && !strings.HasSuffix(pathPrefixSlash, "/") {
+		depthBound++
+		pathPrefixDir, pathPrefixBase = filepath.Split(pathPrefixAbs)
+		pathPrefixSlashDir = dirNonClean(pathPrefixSlash)
+	}
+
+	var completions []string
+	// Stop traversing deeper once we've hit 10k files to try to stay generally under 100ms.
+	const numSeenBound = 10000
+	var numSeen int
+	stopWalking := errors.New("hit numSeenBound")
+	err = filepath.Walk(pathPrefixDir, func(wpath string, info os.FileInfo, err error) error {
+		if numSeen > numSeenBound {
+			// Stop traversing if we hit bound.
+			return stopWalking
+		}
+		numSeen++
+
+		// rel is the path relative to pathPrefixDir.
+		// Make sure that it has pathPrefixBase as a prefix
+		// otherwise it won't match the beginning of the
+		// base component of the path the user typed in.
+		rel := strings.TrimPrefix(wpath[len(pathPrefixDir):], string(filepath.Separator))
+		if info.IsDir() && wpath != pathPrefixDir && !strings.HasPrefix(rel, pathPrefixBase) {
+			return filepath.SkipDir
+		}
+
+		// Check for a match (a module directory).
+		if filepath.Base(rel) == "go.mod" {
+			relDir := strings.TrimSuffix(dirNonClean(rel), string(os.PathSeparator))
+			completionPath := join(pathPrefixSlashDir, filepath.ToSlash(relDir))
+
+			if !strings.HasPrefix(completionPath, completingFrom) {
+				return nil
+			}
+			if strings.HasSuffix(completionPath, "/") {
+				// Don't suggest paths that end in "/". This happens
+				// when the input is a path that ends in "/" and
+				// the completion is empty.
+				return nil
+			}
+			completion := completionPath[len(completingFrom):]
+			if completingFrom == "" && !strings.HasPrefix(completion, "./") {
+				// Bias towards "./" prefixes.
+				completion = join(".", completion)
+			}
+
+			completions = append(completions, completion)
+		}
+
+		if depth := strings.Count(rel, string(filepath.Separator)); depth >= depthBound {
+			return filepath.SkipDir
+		}
+		return nil
+	})
+	if err != nil && !errors.Is(err, stopWalking) {
+		return nil, errors.Errorf("walking to find completions: %w", err)
+	}
+
+	sort.Strings(completions)
+
+	var items []protocol.CompletionItem
+	for _, c := range completions {
+		items = append(items, protocol.CompletionItem{
+			Label:      c,
+			InsertText: c,
+		})
+	}
+	return &protocol.CompletionList{Items: items}, nil
+}
+
+// dirNonClean is filepath.Dir, without the Clean at the end.
+func dirNonClean(path string) string {
+	vol := filepath.VolumeName(path)
+	i := len(path) - 1
+	for i >= len(vol) && !os.IsPathSeparator(path[i]) {
+		i--
+	}
+	return path[len(vol) : i+1]
+}
+
+func join(a, b string) string {
+	if a == "" {
+		return b
+	}
+	if b == "" {
+		return a
+	}
+	return strings.TrimSuffix(a, "/") + "/" + b
+}
diff --git a/internal/lsp/work/diagnostics.go b/internal/lsp/work/diagnostics.go
new file mode 100644
index 0000000..e583e60
--- /dev/null
+++ b/internal/lsp/work/diagnostics.go
@@ -0,0 +1,93 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package work
+
+import (
+	"context"
+	"fmt"
+	"os"
+	"path/filepath"
+
+	"golang.org/x/mod/modfile"
+	"golang.org/x/tools/internal/event"
+	"golang.org/x/tools/internal/lsp/debug/tag"
+	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/lsp/source"
+	"golang.org/x/tools/internal/span"
+)
+
+func Diagnostics(ctx context.Context, snapshot source.Snapshot) (map[source.VersionedFileIdentity][]*source.Diagnostic, error) {
+	ctx, done := event.Start(ctx, "work.Diagnostics", tag.Snapshot.Of(snapshot.ID()))
+	defer done()
+
+	reports := map[source.VersionedFileIdentity][]*source.Diagnostic{}
+	uri := snapshot.WorkFile()
+	if uri == "" {
+		return nil, nil
+	}
+	fh, err := snapshot.GetVersionedFile(ctx, uri)
+	if err != nil {
+		return nil, err
+	}
+	reports[fh.VersionedFileIdentity()] = []*source.Diagnostic{}
+	diagnostics, err := DiagnosticsForWork(ctx, snapshot, fh)
+	if err != nil {
+		return nil, err
+	}
+	for _, d := range diagnostics {
+		fh, err := snapshot.GetVersionedFile(ctx, d.URI)
+		if err != nil {
+			return nil, err
+		}
+		reports[fh.VersionedFileIdentity()] = append(reports[fh.VersionedFileIdentity()], d)
+	}
+
+	return reports, nil
+}
+
+func DiagnosticsForWork(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]*source.Diagnostic, error) {
+	pw, err := snapshot.ParseWork(ctx, fh)
+	if err != nil {
+		if pw == nil || len(pw.ParseErrors) == 0 {
+			return nil, err
+		}
+		return pw.ParseErrors, nil
+	}
+
+	// Add diagnostic if a directory does not contain a module.
+	var diagnostics []*source.Diagnostic
+	for _, use := range pw.File.Use {
+		rng, err := source.LineToRange(pw.Mapper, fh.URI(), use.Syntax.Start, use.Syntax.End)
+		if err != nil {
+			return nil, err
+		}
+
+		modfh, err := snapshot.GetFile(ctx, modFileURI(pw, use))
+		if err != nil {
+			return nil, err
+		}
+		if _, err := modfh.Read(); err != nil && os.IsNotExist(err) {
+			diagnostics = append(diagnostics, &source.Diagnostic{
+				URI:      fh.URI(),
+				Range:    rng,
+				Severity: protocol.SeverityError,
+				Source:   source.UnknownError, // Do we need a new source for this?
+				Message:  fmt.Sprintf("directory %v does not contain a module", use.Path),
+			})
+		}
+	}
+	return diagnostics, nil
+}
+
+func modFileURI(pw *source.ParsedWorkFile, use *modfile.Use) span.URI {
+	workdir := filepath.Dir(pw.URI.Filename())
+
+	modroot := filepath.FromSlash(use.Path)
+	if !filepath.IsAbs(modroot) {
+		modroot = filepath.Join(workdir, modroot)
+	}
+
+	return span.URIFromPath(filepath.Join(modroot, "go.mod"))
+}
diff --git a/internal/lsp/work/format.go b/internal/lsp/work/format.go
new file mode 100644
index 0000000..35b804a
--- /dev/null
+++ b/internal/lsp/work/format.go
@@ -0,0 +1,31 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package work
+
+import (
+	"context"
+
+	"golang.org/x/mod/modfile"
+	"golang.org/x/tools/internal/event"
+	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/lsp/source"
+)
+
+func Format(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]protocol.TextEdit, error) {
+	ctx, done := event.Start(ctx, "work.Format")
+	defer done()
+
+	pw, err := snapshot.ParseWork(ctx, fh)
+	if err != nil {
+		return nil, err
+	}
+	formatted := modfile.Format(pw.File.Syntax)
+	// Calculate the edits to be made due to the change.
+	diff, err := snapshot.View().Options().ComputeEdits(fh.URI(), string(pw.Mapper.Content), string(formatted))
+	if err != nil {
+		return nil, err
+	}
+	return source.ToProtocolEdits(pw.Mapper, diff)
+}
diff --git a/internal/lsp/work/hover.go b/internal/lsp/work/hover.go
new file mode 100644
index 0000000..1699c5c
--- /dev/null
+++ b/internal/lsp/work/hover.go
@@ -0,0 +1,94 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package work
+
+import (
+	"bytes"
+	"context"
+	"go/token"
+
+	"golang.org/x/mod/modfile"
+	"golang.org/x/tools/internal/event"
+	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/lsp/source"
+	errors "golang.org/x/xerrors"
+)
+
+func Hover(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, position protocol.Position) (*protocol.Hover, error) {
+	// We only provide hover information for the view's go.work file.
+	if fh.URI() != snapshot.WorkFile() {
+		return nil, nil
+	}
+
+	ctx, done := event.Start(ctx, "work.Hover")
+	defer done()
+
+	// Get the position of the cursor.
+	pw, err := snapshot.ParseWork(ctx, fh)
+	if err != nil {
+		return nil, errors.Errorf("getting go.work file handle: %w", err)
+	}
+	spn, err := pw.Mapper.PointSpan(position)
+	if err != nil {
+		return nil, errors.Errorf("computing cursor position: %w", err)
+	}
+	hoverRng, err := spn.Range(pw.Mapper.Converter)
+	if err != nil {
+		return nil, errors.Errorf("computing hover range: %w", err)
+	}
+
+	// Confirm that the cursor is inside a use statement, and then find
+	// the position of the use statement's directory path.
+	use, pathStart, pathEnd := usePath(pw, hoverRng.Start)
+
+	// The cursor position is not on a use statement.
+	if use == nil {
+		return nil, nil
+	}
+
+	// Get the mod file denoted by the use.
+	modfh, err := snapshot.GetFile(ctx, modFileURI(pw, use))
+	if err != nil {
+		return nil, errors.Errorf("getting modfile handle: %w", err)
+	}
+	pm, err := snapshot.ParseMod(ctx, modfh)
+	if err != nil {
+		return nil, errors.Errorf("getting modfile handle: %w", err)
+	}
+	mod := pm.File.Module.Mod
+
+	// Get the range to highlight for the hover.
+	rng, err := source.ByteOffsetsToRange(pw.Mapper, fh.URI(), pathStart, pathEnd)
+	if err != nil {
+		return nil, err
+	}
+	options := snapshot.View().Options()
+	return &protocol.Hover{
+		Contents: protocol.MarkupContent{
+			Kind:  options.PreferredContentFormat,
+			Value: mod.Path,
+		},
+		Range: rng,
+	}, nil
+}
+
+func usePath(pw *source.ParsedWorkFile, pos token.Pos) (use *modfile.Use, pathStart, pathEnd int) {
+	for _, u := range pw.File.Use {
+		path := []byte(u.Path)
+		s, e := u.Syntax.Start.Byte, u.Syntax.End.Byte
+		i := bytes.Index(pw.Mapper.Content[s:e], path)
+		if i == -1 {
+			// This should not happen.
+			continue
+		}
+		// Shift the start position to the location of the
+		// module directory within the use statement.
+		pathStart, pathEnd = s+i, s+i+len(path)
+		if token.Pos(pathStart) <= pos && pos <= token.Pos(pathEnd) {
+			return u, pathStart, pathEnd
+		}
+	}
+	return nil, 0, 0
+}
diff --git a/internal/lsp/workspace.go b/internal/lsp/workspace.go
index c239942..1f01b3b 100644
--- a/internal/lsp/workspace.go
+++ b/internal/lsp/workspace.go
@@ -6,12 +6,7 @@
 
 import (
 	"context"
-	"fmt"
-	"os"
-	"path/filepath"
-	"sync/atomic"
 
-	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
 	"golang.org/x/tools/internal/span"
@@ -31,8 +26,6 @@
 	return s.addFolders(ctx, event.Added)
 }
 
-var wsIndex int64
-
 func (s *Server) addView(ctx context.Context, name string, uri span.URI) (source.Snapshot, func(), error) {
 	s.stateMu.Lock()
 	state := s.state
@@ -44,19 +37,7 @@
 	if err := s.fetchConfig(ctx, name, uri, options); err != nil {
 		return nil, func() {}, err
 	}
-	// Try to assign a persistent temp directory for tracking this view's
-	// temporary workspace.
-	var tempWorkspace span.URI
-	if s.tempDir != "" {
-		index := atomic.AddInt64(&wsIndex, 1)
-		wsDir := filepath.Join(s.tempDir, fmt.Sprintf("workspace.%d", index))
-		if err := os.Mkdir(wsDir, 0700); err == nil {
-			tempWorkspace = span.URIFromPath(wsDir)
-		} else {
-			event.Error(ctx, "making workspace dir", err)
-		}
-	}
-	_, snapshot, release, err := s.session.NewView(ctx, name, uri, tempWorkspace, options)
+	_, snapshot, release, err := s.session.NewView(ctx, name, uri, options)
 	return snapshot, release, err
 }
 
diff --git a/internal/memoize/memoize.go b/internal/memoize/memoize.go
index d4b8773..0037342 100644
--- a/internal/memoize/memoize.go
+++ b/internal/memoize/memoize.go
@@ -62,6 +62,8 @@
 	destroyed uint32
 	store     *Store
 	name      string
+	// destroyedBy describes the caller that togged destroyed from 0 to 1.
+	destroyedBy string
 	// wg tracks the reference count of this generation.
 	wg sync.WaitGroup
 }
@@ -69,10 +71,16 @@
 // Destroy waits for all operations referencing g to complete, then removes
 // all references to g from cache entries. Cache entries that no longer
 // reference any non-destroyed generation are removed. Destroy must be called
-// exactly once for each generation.
-func (g *Generation) Destroy() {
+// exactly once for each generation, and destroyedBy describes the caller.
+func (g *Generation) Destroy(destroyedBy string) {
 	g.wg.Wait()
-	atomic.StoreUint32(&g.destroyed, 1)
+
+	prevDestroyedBy := g.destroyedBy
+	g.destroyedBy = destroyedBy
+	if ok := atomic.CompareAndSwapUint32(&g.destroyed, 0, 1); !ok {
+		panic("Destroy on generation " + g.name + " already destroyed by " + prevDestroyedBy)
+	}
+
 	g.store.mu.Lock()
 	defer g.store.mu.Unlock()
 	for k, e := range g.store.handles {
@@ -94,13 +102,10 @@
 
 // Acquire creates a new reference to g, and returns a func to release that
 // reference.
-func (g *Generation) Acquire(ctx context.Context) func() {
+func (g *Generation) Acquire() func() {
 	destroyed := atomic.LoadUint32(&g.destroyed)
-	if ctx.Err() != nil {
-		return func() {}
-	}
 	if destroyed != 0 {
-		panic("acquire on destroyed generation " + g.name)
+		panic("acquire on generation " + g.name + " destroyed by " + g.destroyedBy)
 	}
 	g.wg.Add(1)
 	return g.wg.Done
@@ -175,7 +180,7 @@
 		panic("the function passed to bind must not be nil")
 	}
 	if atomic.LoadUint32(&g.destroyed) != 0 {
-		panic("operation on destroyed generation " + g.name)
+		panic("operation on generation " + g.name + " destroyed by " + g.destroyedBy)
 	}
 	g.store.mu.Lock()
 	defer g.store.mu.Unlock()
@@ -233,7 +238,7 @@
 func (g *Generation) Inherit(hs ...*Handle) {
 	for _, h := range hs {
 		if atomic.LoadUint32(&g.destroyed) != 0 {
-			panic("inherit on destroyed generation " + g.name)
+			panic("inherit on generation " + g.name + " destroyed by " + g.destroyedBy)
 		}
 
 		h.mu.Lock()
@@ -266,7 +271,7 @@
 // If the value is not yet ready, the underlying function will be invoked.
 // If ctx is cancelled, Get returns nil.
 func (h *Handle) Get(ctx context.Context, g *Generation, arg Arg) (interface{}, error) {
-	release := g.Acquire(ctx)
+	release := g.Acquire()
 	defer release()
 
 	if ctx.Err() != nil {
@@ -311,7 +316,7 @@
 	function := h.function // Read under the lock
 
 	// Make sure that the generation isn't destroyed while we're running in it.
-	release := g.Acquire(ctx)
+	release := g.Acquire()
 	go func() {
 		defer release()
 		// Just in case the function does something expensive without checking
diff --git a/internal/memoize/memoize_test.go b/internal/memoize/memoize_test.go
index 41f20d0..f05966b 100644
--- a/internal/memoize/memoize_test.go
+++ b/internal/memoize/memoize_test.go
@@ -60,11 +60,11 @@
 	expectGet(t, h2, g2, "res")
 
 	// With g1 destroyed, g2 should still work.
-	g1.Destroy()
+	g1.Destroy("TestGenerations")
 	expectGet(t, h2, g2, "res")
 
 	// With all generations destroyed, key should be re-evaluated.
-	g2.Destroy()
+	g2.Destroy("TestGenerations")
 	g3 := s.Generation("g3")
 	h3 := g3.Bind("key", func(context.Context, memoize.Arg) interface{} { return "new res" }, nil)
 	expectGet(t, h3, g3, "new res")
@@ -89,7 +89,7 @@
 	g2 := s.Generation("g2")
 	g2.Inherit(h1, h2)
 
-	g1.Destroy()
+	g1.Destroy("TestCleanup")
 	expectGet(t, h1, g2, &v1)
 	expectGet(t, h2, g2, &v2)
 	for k, v := range map[string]*bool{"key1": &v1, "key2": &v2} {
@@ -97,7 +97,7 @@
 			t.Errorf("after destroying g1, bound value %q is cleaned up", k)
 		}
 	}
-	g2.Destroy()
+	g2.Destroy("TestCleanup")
 	if got, want := v1, false; got != want {
 		t.Error("after destroying g2, v1 is cleaned up")
 	}
diff --git a/internal/mod/lazyregexp/lazyre.go b/internal/mod/lazyregexp/lazyre.go
deleted file mode 100644
index 7f524d2..0000000
--- a/internal/mod/lazyregexp/lazyre.go
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package lazyregexp is a thin wrapper over regexp, allowing the use of global
-// regexp variables without forcing them to be compiled at init.
-package lazyregexp
-
-import (
-	"os"
-	"regexp"
-	"strings"
-	"sync"
-)
-
-// Regexp is a wrapper around regexp.Regexp, where the underlying regexp will be
-// compiled the first time it is needed.
-type Regexp struct {
-	str  string
-	once sync.Once
-	rx   *regexp.Regexp
-}
-
-func (r *Regexp) re() *regexp.Regexp {
-	r.once.Do(r.build)
-	return r.rx
-}
-
-func (r *Regexp) build() {
-	r.rx = regexp.MustCompile(r.str)
-	r.str = ""
-}
-
-func (r *Regexp) MatchString(s string) bool {
-	return r.re().MatchString(s)
-}
-
-var inTest = len(os.Args) > 0 && strings.HasSuffix(strings.TrimSuffix(os.Args[0], ".exe"), ".test")
-
-// New creates a new lazy regexp, delaying the compiling work until it is first
-// needed. If the code is being run as part of tests, the regexp compiling will
-// happen immediately.
-func New(str string) *Regexp {
-	lr := &Regexp{str: str}
-	if inTest {
-		// In tests, always compile the regexps early.
-		lr.re()
-	}
-	return lr
-}
diff --git a/internal/mod/modfile/read.go b/internal/mod/modfile/read.go
deleted file mode 100644
index f49d553..0000000
--- a/internal/mod/modfile/read.go
+++ /dev/null
@@ -1,655 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package modfile
-
-import (
-	"bytes"
-	"errors"
-	"fmt"
-	"os"
-	"strconv"
-	"strings"
-	"unicode"
-	"unicode/utf8"
-
-	"golang.org/x/mod/modfile"
-)
-
-// An input represents a single input file being parsed.
-type input struct {
-	// Lexing state.
-	filename   string            // name of input file, for errors
-	complete   []byte            // entire input
-	remaining  []byte            // remaining input
-	tokenStart []byte            // token being scanned to end of input
-	token      token             // next token to be returned by lex, peek
-	pos        modfile.Position  // current input position
-	comments   []modfile.Comment // accumulated comments
-
-	// Parser state.
-	file        *modfile.FileSyntax // returned top-level syntax tree
-	parseErrors modfile.ErrorList   // errors encountered during parsing
-
-	// Comment assignment state.
-	pre  []modfile.Expr // all expressions, in preorder traversal
-	post []modfile.Expr // all expressions, in postorder traversal
-}
-
-func newInput(filename string, data []byte) *input {
-	return &input{
-		filename:  filename,
-		complete:  data,
-		remaining: data,
-		pos:       modfile.Position{Line: 1, LineRune: 1, Byte: 0},
-	}
-}
-
-// parse parses the input file.
-func parse(file string, data []byte) (f *modfile.FileSyntax, err error) {
-	// The parser panics for both routine errors like syntax errors
-	// and for programmer bugs like array index errors.
-	// Turn both into error returns. Catching bug panics is
-	// especially important when processing many files.
-	in := newInput(file, data)
-	defer func() {
-		if e := recover(); e != nil && e != &in.parseErrors {
-			in.parseErrors = append(in.parseErrors, modfile.Error{
-				Filename: in.filename,
-				Pos:      in.pos,
-				Err:      fmt.Errorf("internal error: %v", e),
-			})
-		}
-		if err == nil && len(in.parseErrors) > 0 {
-			err = in.parseErrors
-		}
-	}()
-
-	// Prime the lexer by reading in the first token. It will be available
-	// in the next peek() or lex() call.
-	in.readToken()
-
-	// Invoke the parser.
-	in.parseFile()
-	if len(in.parseErrors) > 0 {
-		return nil, in.parseErrors
-	}
-	in.file.Name = in.filename
-
-	// Assign comments to nearby syntax.
-	in.assignComments()
-
-	return in.file, nil
-}
-
-// Error is called to report an error.
-// Error does not return: it panics.
-func (in *input) Error(s string) {
-	in.parseErrors = append(in.parseErrors, modfile.Error{
-		Filename: in.filename,
-		Pos:      in.pos,
-		Err:      errors.New(s),
-	})
-	panic(&in.parseErrors)
-}
-
-// eof reports whether the input has reached end of file.
-func (in *input) eof() bool {
-	return len(in.remaining) == 0
-}
-
-// peekRune returns the next rune in the input without consuming it.
-func (in *input) peekRune() int {
-	if len(in.remaining) == 0 {
-		return 0
-	}
-	r, _ := utf8.DecodeRune(in.remaining)
-	return int(r)
-}
-
-// peekPrefix reports whether the remaining input begins with the given prefix.
-func (in *input) peekPrefix(prefix string) bool {
-	// This is like bytes.HasPrefix(in.remaining, []byte(prefix))
-	// but without the allocation of the []byte copy of prefix.
-	for i := 0; i < len(prefix); i++ {
-		if i >= len(in.remaining) || in.remaining[i] != prefix[i] {
-			return false
-		}
-	}
-	return true
-}
-
-// readRune consumes and returns the next rune in the input.
-func (in *input) readRune() int {
-	if len(in.remaining) == 0 {
-		in.Error("internal lexer error: readRune at EOF")
-	}
-	r, size := utf8.DecodeRune(in.remaining)
-	in.remaining = in.remaining[size:]
-	if r == '\n' {
-		in.pos.Line++
-		in.pos.LineRune = 1
-	} else {
-		in.pos.LineRune++
-	}
-	in.pos.Byte += size
-	return int(r)
-}
-
-type token struct {
-	kind   tokenKind
-	pos    modfile.Position
-	endPos modfile.Position
-	text   string
-}
-
-type tokenKind int
-
-const (
-	_EOF tokenKind = -(iota + 1)
-	_EOLCOMMENT
-	_IDENT
-	_STRING
-	_COMMENT
-
-	// newlines and punctuation tokens are allowed as ASCII codes.
-)
-
-func (k tokenKind) isComment() bool {
-	return k == _COMMENT || k == _EOLCOMMENT
-}
-
-// isEOL returns whether a token terminates a line.
-func (k tokenKind) isEOL() bool {
-	return k == _EOF || k == _EOLCOMMENT || k == '\n'
-}
-
-// startToken marks the beginning of the next input token.
-// It must be followed by a call to endToken, once the token's text has
-// been consumed using readRune.
-func (in *input) startToken() {
-	in.tokenStart = in.remaining
-	in.token.text = ""
-	in.token.pos = in.pos
-}
-
-// endToken marks the end of an input token.
-// It records the actual token string in tok.text.
-// A single trailing newline (LF or CRLF) will be removed from comment tokens.
-func (in *input) endToken(kind tokenKind) {
-	in.token.kind = kind
-	text := string(in.tokenStart[:len(in.tokenStart)-len(in.remaining)])
-	if kind.isComment() {
-		if strings.HasSuffix(text, "\r\n") {
-			text = text[:len(text)-2]
-		} else {
-			text = strings.TrimSuffix(text, "\n")
-		}
-	}
-	in.token.text = text
-	in.token.endPos = in.pos
-}
-
-// peek returns the kind of the the next token returned by lex.
-func (in *input) peek() tokenKind {
-	return in.token.kind
-}
-
-// lex is called from the parser to obtain the next input token.
-func (in *input) lex() token {
-	tok := in.token
-	in.readToken()
-	return tok
-}
-
-// readToken lexes the next token from the text and stores it in in.token.
-func (in *input) readToken() {
-	// Skip past spaces, stopping at non-space or EOF.
-	for !in.eof() {
-		c := in.peekRune()
-		if c == ' ' || c == '\t' || c == '\r' {
-			in.readRune()
-			continue
-		}
-
-		// Comment runs to end of line.
-		if in.peekPrefix("//") {
-			in.startToken()
-
-			// Is this comment the only thing on its line?
-			// Find the last \n before this // and see if it's all
-			// spaces from there to here.
-			i := bytes.LastIndex(in.complete[:in.pos.Byte], []byte("\n"))
-			suffix := len(bytes.TrimSpace(in.complete[i+1:in.pos.Byte])) > 0
-			in.readRune()
-			in.readRune()
-
-			// Consume comment.
-			for len(in.remaining) > 0 && in.readRune() != '\n' {
-			}
-
-			// If we are at top level (not in a statement), hand the comment to
-			// the parser as a _COMMENT token. The grammar is written
-			// to handle top-level comments itself.
-			if !suffix {
-				in.endToken(_COMMENT)
-				return
-			}
-
-			// Otherwise, save comment for later attachment to syntax tree.
-			in.endToken(_EOLCOMMENT)
-			in.comments = append(in.comments, modfile.Comment{in.token.pos, in.token.text, suffix})
-			return
-		}
-
-		if in.peekPrefix("/*") {
-			in.Error("mod files must use // comments (not /* */ comments)")
-		}
-
-		// Found non-space non-comment.
-		break
-	}
-
-	// Found the beginning of the next token.
-	in.startToken()
-
-	// End of file.
-	if in.eof() {
-		in.endToken(_EOF)
-		return
-	}
-
-	// Punctuation tokens.
-	switch c := in.peekRune(); c {
-	case '\n', '(', ')', '[', ']', '{', '}', ',':
-		in.readRune()
-		in.endToken(tokenKind(c))
-		return
-
-	case '"', '`': // quoted string
-		quote := c
-		in.readRune()
-		for {
-			if in.eof() {
-				in.pos = in.token.pos
-				in.Error("unexpected EOF in string")
-			}
-			if in.peekRune() == '\n' {
-				in.Error("unexpected newline in string")
-			}
-			c := in.readRune()
-			if c == quote {
-				break
-			}
-			if c == '\\' && quote != '`' {
-				if in.eof() {
-					in.pos = in.token.pos
-					in.Error("unexpected EOF in string")
-				}
-				in.readRune()
-			}
-		}
-		in.endToken(_STRING)
-		return
-	}
-
-	// Checked all punctuation. Must be identifier token.
-	if c := in.peekRune(); !isIdent(c) {
-		in.Error(fmt.Sprintf("unexpected input character %#q", c))
-	}
-
-	// Scan over identifier.
-	for isIdent(in.peekRune()) {
-		if in.peekPrefix("//") {
-			break
-		}
-		if in.peekPrefix("/*") {
-			in.Error("mod files must use // comments (not /* */ comments)")
-		}
-		in.readRune()
-	}
-	in.endToken(_IDENT)
-}
-
-// isIdent reports whether c is an identifier rune.
-// We treat most printable runes as identifier runes, except for a handful of
-// ASCII punctuation characters.
-func isIdent(c int) bool {
-	switch r := rune(c); r {
-	case ' ', '(', ')', '[', ']', '{', '}', ',':
-		return false
-	default:
-		return !unicode.IsSpace(r) && unicode.IsPrint(r)
-	}
-}
-
-// Comment assignment.
-// We build two lists of all subexpressions, preorder and postorder.
-// The preorder list is ordered by start location, with outer expressions first.
-// The postorder list is ordered by end location, with outer expressions last.
-// We use the preorder list to assign each whole-line comment to the syntax
-// immediately following it, and we use the postorder list to assign each
-// end-of-line comment to the syntax immediately preceding it.
-
-// order walks the expression adding it and its subexpressions to the
-// preorder and postorder lists.
-func (in *input) order(x modfile.Expr) {
-	if x != nil {
-		in.pre = append(in.pre, x)
-	}
-	switch x := x.(type) {
-	default:
-		panic(fmt.Errorf("order: unexpected type %T", x))
-	case nil:
-		// nothing
-	case *modfile.LParen, *modfile.RParen:
-		// nothing
-	case *modfile.CommentBlock:
-		// nothing
-	case *modfile.Line:
-		// nothing
-	case *modfile.FileSyntax:
-		for _, stmt := range x.Stmt {
-			in.order(stmt)
-		}
-	case *modfile.LineBlock:
-		in.order(&x.LParen)
-		for _, l := range x.Line {
-			in.order(l)
-		}
-		in.order(&x.RParen)
-	}
-	if x != nil {
-		in.post = append(in.post, x)
-	}
-}
-
-// assignComments attaches comments to nearby syntax.
-func (in *input) assignComments() {
-	const debug = false
-
-	// Generate preorder and postorder lists.
-	in.order(in.file)
-
-	// Split into whole-line comments and suffix comments.
-	var line, suffix []modfile.Comment
-	for _, com := range in.comments {
-		if com.Suffix {
-			suffix = append(suffix, com)
-		} else {
-			line = append(line, com)
-		}
-	}
-
-	if debug {
-		for _, c := range line {
-			fmt.Fprintf(os.Stderr, "LINE %q :%d:%d #%d\n", c.Token, c.Start.Line, c.Start.LineRune, c.Start.Byte)
-		}
-	}
-
-	// Assign line comments to syntax immediately following.
-	for _, x := range in.pre {
-		start, _ := x.Span()
-		if debug {
-			fmt.Fprintf(os.Stderr, "pre %T :%d:%d #%d\n", x, start.Line, start.LineRune, start.Byte)
-		}
-		xcom := x.Comment()
-		for len(line) > 0 && start.Byte >= line[0].Start.Byte {
-			if debug {
-				fmt.Fprintf(os.Stderr, "ASSIGN LINE %q #%d\n", line[0].Token, line[0].Start.Byte)
-			}
-			xcom.Before = append(xcom.Before, line[0])
-			line = line[1:]
-		}
-	}
-
-	// Remaining line comments go at end of file.
-	in.file.After = append(in.file.After, line...)
-
-	if debug {
-		for _, c := range suffix {
-			fmt.Fprintf(os.Stderr, "SUFFIX %q :%d:%d #%d\n", c.Token, c.Start.Line, c.Start.LineRune, c.Start.Byte)
-		}
-	}
-
-	// Assign suffix comments to syntax immediately before.
-	for i := len(in.post) - 1; i >= 0; i-- {
-		x := in.post[i]
-
-		start, end := x.Span()
-		if debug {
-			fmt.Fprintf(os.Stderr, "post %T :%d:%d #%d :%d:%d #%d\n", x, start.Line, start.LineRune, start.Byte, end.Line, end.LineRune, end.Byte)
-		}
-
-		// Do not assign suffix comments to end of line block or whole file.
-		// Instead assign them to the last element inside.
-		switch x.(type) {
-		case *modfile.FileSyntax:
-			continue
-		}
-
-		// Do not assign suffix comments to something that starts
-		// on an earlier line, so that in
-		//
-		//	x ( y
-		//		z ) // comment
-		//
-		// we assign the comment to z and not to x ( ... ).
-		if start.Line != end.Line {
-			continue
-		}
-		xcom := x.Comment()
-		for len(suffix) > 0 && end.Byte <= suffix[len(suffix)-1].Start.Byte {
-			if debug {
-				fmt.Fprintf(os.Stderr, "ASSIGN SUFFIX %q #%d\n", suffix[len(suffix)-1].Token, suffix[len(suffix)-1].Start.Byte)
-			}
-			xcom.Suffix = append(xcom.Suffix, suffix[len(suffix)-1])
-			suffix = suffix[:len(suffix)-1]
-		}
-	}
-
-	// We assigned suffix comments in reverse.
-	// If multiple suffix comments were appended to the same
-	// expression node, they are now in reverse. Fix that.
-	for _, x := range in.post {
-		reverseComments(x.Comment().Suffix)
-	}
-
-	// Remaining suffix comments go at beginning of file.
-	in.file.Before = append(in.file.Before, suffix...)
-}
-
-// reverseComments reverses the []Comment list.
-func reverseComments(list []modfile.Comment) {
-	for i, j := 0, len(list)-1; i < j; i, j = i+1, j-1 {
-		list[i], list[j] = list[j], list[i]
-	}
-}
-
-func (in *input) parseFile() {
-	in.file = new(modfile.FileSyntax)
-	var cb *modfile.CommentBlock
-	for {
-		switch in.peek() {
-		case '\n':
-			in.lex()
-			if cb != nil {
-				in.file.Stmt = append(in.file.Stmt, cb)
-				cb = nil
-			}
-		case _COMMENT:
-			tok := in.lex()
-			if cb == nil {
-				cb = &modfile.CommentBlock{Start: tok.pos}
-			}
-			com := cb.Comment()
-			com.Before = append(com.Before, modfile.Comment{Start: tok.pos, Token: tok.text})
-		case _EOF:
-			if cb != nil {
-				in.file.Stmt = append(in.file.Stmt, cb)
-			}
-			return
-		default:
-			in.parseStmt()
-			if cb != nil {
-				in.file.Stmt[len(in.file.Stmt)-1].Comment().Before = cb.Before
-				cb = nil
-			}
-		}
-	}
-}
-
-func (in *input) parseStmt() {
-	tok := in.lex()
-	start := tok.pos
-	end := tok.endPos
-	tokens := []string{tok.text}
-	for {
-		tok := in.lex()
-		switch {
-		case tok.kind.isEOL():
-			in.file.Stmt = append(in.file.Stmt, &modfile.Line{
-				Start: start,
-				Token: tokens,
-				End:   end,
-			})
-			return
-
-		case tok.kind == '(':
-			if next := in.peek(); next.isEOL() {
-				// Start of block: no more tokens on this line.
-				in.file.Stmt = append(in.file.Stmt, in.parseLineBlock(start, tokens, tok))
-				return
-			} else if next == ')' {
-				rparen := in.lex()
-				if in.peek().isEOL() {
-					// Empty block.
-					in.lex()
-					in.file.Stmt = append(in.file.Stmt, &modfile.LineBlock{
-						Start:  start,
-						Token:  tokens,
-						LParen: modfile.LParen{Pos: tok.pos},
-						RParen: modfile.RParen{Pos: rparen.pos},
-					})
-					return
-				}
-				// '( )' in the middle of the line, not a block.
-				tokens = append(tokens, tok.text, rparen.text)
-			} else {
-				// '(' in the middle of the line, not a block.
-				tokens = append(tokens, tok.text)
-			}
-
-		default:
-			tokens = append(tokens, tok.text)
-			end = tok.endPos
-		}
-	}
-}
-
-func (in *input) parseLineBlock(start modfile.Position, token []string, lparen token) *modfile.LineBlock {
-	x := &modfile.LineBlock{
-		Start:  start,
-		Token:  token,
-		LParen: modfile.LParen{Pos: lparen.pos},
-	}
-	var comments []modfile.Comment
-	for {
-		switch in.peek() {
-		case _EOLCOMMENT:
-			// Suffix comment, will be attached later by assignComments.
-			in.lex()
-		case '\n':
-			// Blank line. Add an empty comment to preserve it.
-			in.lex()
-			if len(comments) == 0 && len(x.Line) > 0 || len(comments) > 0 && comments[len(comments)-1].Token != "" {
-				comments = append(comments, modfile.Comment{})
-			}
-		case _COMMENT:
-			tok := in.lex()
-			comments = append(comments, modfile.Comment{Start: tok.pos, Token: tok.text})
-		case _EOF:
-			in.Error(fmt.Sprintf("syntax error (unterminated block started at %s:%d:%d)", in.filename, x.Start.Line, x.Start.LineRune))
-		case ')':
-			rparen := in.lex()
-			x.RParen.Before = comments
-			x.RParen.Pos = rparen.pos
-			if !in.peek().isEOL() {
-				in.Error("syntax error (expected newline after closing paren)")
-			}
-			in.lex()
-			return x
-		default:
-			l := in.parseLine()
-			x.Line = append(x.Line, l)
-			l.Comment().Before = comments
-			comments = nil
-		}
-	}
-}
-
-func (in *input) parseLine() *modfile.Line {
-	tok := in.lex()
-	if tok.kind.isEOL() {
-		in.Error("internal parse error: parseLine at end of line")
-	}
-	start := tok.pos
-	end := tok.endPos
-	tokens := []string{tok.text}
-	for {
-		tok := in.lex()
-		if tok.kind.isEOL() {
-			return &modfile.Line{
-				Start:   start,
-				Token:   tokens,
-				End:     end,
-				InBlock: true,
-			}
-		}
-		tokens = append(tokens, tok.text)
-		end = tok.endPos
-	}
-}
-
-var (
-	slashSlash = []byte("//")
-	moduleStr  = []byte("module")
-)
-
-// ModulePath returns the module path from the gomod file text.
-// If it cannot find a module path, it returns an empty string.
-// It is tolerant of unrelated problems in the go.mod file.
-func ModulePath(mod []byte) string {
-	for len(mod) > 0 {
-		line := mod
-		mod = nil
-		if i := bytes.IndexByte(line, '\n'); i >= 0 {
-			line, mod = line[:i], line[i+1:]
-		}
-		if i := bytes.Index(line, slashSlash); i >= 0 {
-			line = line[:i]
-		}
-		line = bytes.TrimSpace(line)
-		if !bytes.HasPrefix(line, moduleStr) {
-			continue
-		}
-		line = line[len(moduleStr):]
-		n := len(line)
-		line = bytes.TrimSpace(line)
-		if len(line) == n || len(line) == 0 {
-			continue
-		}
-
-		if line[0] == '"' || line[0] == '`' {
-			p, err := strconv.Unquote(string(line))
-			if err != nil {
-				return "" // malformed quoted string or multiline module path
-			}
-			return p
-		}
-
-		return string(line)
-	}
-	return "" // missing module path
-}
diff --git a/internal/mod/modfile/rule.go b/internal/mod/modfile/rule.go
deleted file mode 100644
index 163a2db..0000000
--- a/internal/mod/modfile/rule.go
+++ /dev/null
@@ -1,361 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package modfile implements a parser and formatter for go.mod files.
-//
-// The go.mod syntax is described in
-// https://golang.org/cmd/go/#hdr-The_go_mod_file.
-//
-// The Parse and ParseLax functions both parse a go.mod file and return an
-// abstract syntax tree. ParseLax ignores unknown statements and may be used to
-// parse go.mod files that may have been developed with newer versions of Go.
-//
-// The File struct returned by Parse and ParseLax represent an abstract
-// go.mod file. File has several methods like AddNewRequire and DropReplace
-// that can be used to programmatically edit a file.
-//
-// The Format function formats a File back to a byte slice which can be
-// written to a file.
-package modfile
-
-import (
-	"errors"
-	"fmt"
-	"path/filepath"
-	"strconv"
-	"strings"
-	"unicode"
-
-	"golang.org/x/mod/modfile"
-	"golang.org/x/mod/module"
-	"golang.org/x/tools/internal/mod/lazyregexp"
-)
-
-// A WorkFile is the parsed, interpreted form of a go.work file.
-type WorkFile struct {
-	Go        *modfile.Go
-	Directory []*Directory
-	Replace   []*modfile.Replace
-
-	Syntax *modfile.FileSyntax
-}
-
-// A Directory is a single directory statement.
-type Directory struct {
-	DiskPath   string // TODO(matloob): Replace uses module.Version for new. Do that here?
-	ModulePath string // Module path in the comment.
-	Syntax     *modfile.Line
-}
-
-// Parse parses and returns a go.work file.
-//
-// file is the name of the file, used in positions and errors.
-//
-// data is the content of the file.
-//
-// fix is an optional function that canonicalizes module versions.
-// If fix is nil, all module versions must be canonical (module.CanonicalVersion
-// must return the same string).
-func ParseWork(file string, data []byte, fix modfile.VersionFixer) (*WorkFile, error) {
-	return parseToWorkFile(file, data, fix, true)
-}
-
-var GoVersionRE = lazyregexp.New(`^([1-9][0-9]*)\.(0|[1-9][0-9]*)$`)
-
-func parseToWorkFile(file string, data []byte, fix modfile.VersionFixer, strict bool) (parsed *WorkFile, err error) {
-	fs, err := parse(file, data)
-	if err != nil {
-		return nil, err
-	}
-	f := &WorkFile{
-		Syntax: fs,
-	}
-	var errs modfile.ErrorList
-
-	for _, x := range fs.Stmt {
-		switch x := x.(type) {
-		case *modfile.Line:
-			f.add(&errs, nil, x, x.Token[0], x.Token[1:], fix, strict)
-
-		case *modfile.LineBlock:
-			if len(x.Token) > 1 {
-				if strict {
-					errs = append(errs, modfile.Error{
-						Filename: file,
-						Pos:      x.Start,
-						Err:      fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")),
-					})
-				}
-				continue
-			}
-			switch x.Token[0] {
-			default:
-				if strict {
-					errs = append(errs, modfile.Error{
-						Filename: file,
-						Pos:      x.Start,
-						Err:      fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")),
-					})
-				}
-				continue
-			case "module", "directory", "replace":
-				for _, l := range x.Line {
-					f.add(&errs, x, l, x.Token[0], l.Token, fix, strict)
-				}
-			}
-		}
-	}
-
-	if len(errs) > 0 {
-		return nil, errs
-	}
-	return f, nil
-}
-
-func (f *WorkFile) add(errs *modfile.ErrorList, block *modfile.LineBlock, line *modfile.Line, verb string, args []string, fix modfile.VersionFixer, strict bool) {
-	// If strict is false, this module is a dependency.
-	// We ignore all unknown directives as well as main-module-only
-	// directives like replace and exclude. It will work better for
-	// forward compatibility if we can depend on modules that have unknown
-	// statements (presumed relevant only when acting as the main module)
-	// and simply ignore those statements.
-	if !strict {
-		switch verb {
-		case "go", "module", "retract", "require":
-			// want these even for dependency go.mods
-		default:
-			return
-		}
-	}
-
-	wrapModPathError := func(modPath string, err error) {
-		*errs = append(*errs, modfile.Error{
-			Filename: f.Syntax.Name,
-			Pos:      line.Start,
-			ModPath:  modPath,
-			Verb:     verb,
-			Err:      err,
-		})
-	}
-	wrapError := func(err error) {
-		*errs = append(*errs, modfile.Error{
-			Filename: f.Syntax.Name,
-			Pos:      line.Start,
-			Err:      err,
-		})
-	}
-	errorf := func(format string, args ...interface{}) {
-		wrapError(fmt.Errorf(format, args...))
-	}
-
-	switch verb {
-	default:
-		errorf("unknown directive: %s", verb)
-
-	case "go":
-		if f.Go != nil {
-			errorf("repeated go statement")
-			return
-		}
-		if len(args) != 1 {
-			errorf("go directive expects exactly one argument")
-			return
-		} else if !GoVersionRE.MatchString(args[0]) {
-			errorf("invalid go version '%s': must match format 1.23", args[0])
-			return
-		}
-
-		f.Go = &modfile.Go{Syntax: line}
-		f.Go.Version = args[0]
-
-	case "directory":
-		if len(args) != 1 {
-			errorf("usage: %s ../local/directory", verb) // TODO(matloob) better example; most directories will be subdirectories of go.work dir
-			return
-		}
-		s, err := parseString(&args[0])
-		if err != nil {
-			errorf("invalid quoted string: %v", err)
-			return
-		}
-		f.Directory = append(f.Directory, &Directory{
-			DiskPath: s,
-			Syntax:   line,
-		})
-
-	case "replace":
-		arrow := 2
-		if len(args) >= 2 && args[1] == "=>" {
-			arrow = 1
-		}
-		if len(args) < arrow+2 || len(args) > arrow+3 || args[arrow] != "=>" {
-			errorf("usage: %s module/path [v1.2.3] => other/module v1.4\n\t or %s module/path [v1.2.3] => ../local/directory", verb, verb)
-			return
-		}
-		s, err := parseString(&args[0])
-		if err != nil {
-			errorf("invalid quoted string: %v", err)
-			return
-		}
-		pathMajor, err := modulePathMajor(s)
-		if err != nil {
-			wrapModPathError(s, err)
-			return
-		}
-		var v string
-		if arrow == 2 {
-			v, err = parseVersion(verb, s, &args[1], fix)
-			if err != nil {
-				wrapError(err)
-				return
-			}
-			if err := module.CheckPathMajor(v, pathMajor); err != nil {
-				wrapModPathError(s, err)
-				return
-			}
-		}
-		ns, err := parseString(&args[arrow+1])
-		if err != nil {
-			errorf("invalid quoted string: %v", err)
-			return
-		}
-		nv := ""
-		if len(args) == arrow+2 {
-			if !IsDirectoryPath(ns) {
-				errorf("replacement module without version must be directory path (rooted or starting with ./ or ../)")
-				return
-			}
-			if filepath.Separator == '/' && strings.Contains(ns, `\`) {
-				errorf("replacement directory appears to be Windows path (on a non-windows system)")
-				return
-			}
-		}
-		if len(args) == arrow+3 {
-			nv, err = parseVersion(verb, ns, &args[arrow+2], fix)
-			if err != nil {
-				wrapError(err)
-				return
-			}
-			if IsDirectoryPath(ns) {
-				errorf("replacement module directory path %q cannot have version", ns)
-				return
-			}
-		}
-		f.Replace = append(f.Replace, &modfile.Replace{
-			Old:    module.Version{Path: s, Version: v},
-			New:    module.Version{Path: ns, Version: nv},
-			Syntax: line,
-		})
-	}
-}
-
-// IsDirectoryPath reports whether the given path should be interpreted
-// as a directory path. Just like on the go command line, relative paths
-// and rooted paths are directory paths; the rest are module paths.
-func IsDirectoryPath(ns string) bool {
-	// Because go.mod files can move from one system to another,
-	// we check all known path syntaxes, both Unix and Windows.
-	return strings.HasPrefix(ns, "./") || strings.HasPrefix(ns, "../") || strings.HasPrefix(ns, "/") ||
-		strings.HasPrefix(ns, `.\`) || strings.HasPrefix(ns, `..\`) || strings.HasPrefix(ns, `\`) ||
-		len(ns) >= 2 && ('A' <= ns[0] && ns[0] <= 'Z' || 'a' <= ns[0] && ns[0] <= 'z') && ns[1] == ':'
-}
-
-// MustQuote reports whether s must be quoted in order to appear as
-// a single token in a go.mod line.
-func MustQuote(s string) bool {
-	for _, r := range s {
-		switch r {
-		case ' ', '"', '\'', '`':
-			return true
-
-		case '(', ')', '[', ']', '{', '}', ',':
-			if len(s) > 1 {
-				return true
-			}
-
-		default:
-			if !unicode.IsPrint(r) {
-				return true
-			}
-		}
-	}
-	return s == "" || strings.Contains(s, "//") || strings.Contains(s, "/*")
-}
-
-// AutoQuote returns s or, if quoting is required for s to appear in a go.mod,
-// the quotation of s.
-func AutoQuote(s string) string {
-	if MustQuote(s) {
-		return strconv.Quote(s)
-	}
-	return s
-}
-
-func parseString(s *string) (string, error) {
-	t := *s
-	if strings.HasPrefix(t, `"`) {
-		var err error
-		if t, err = strconv.Unquote(t); err != nil {
-			return "", err
-		}
-	} else if strings.ContainsAny(t, "\"'`") {
-		// Other quotes are reserved both for possible future expansion
-		// and to avoid confusion. For example if someone types 'x'
-		// we want that to be a syntax error and not a literal x in literal quotation marks.
-		return "", fmt.Errorf("unquoted string cannot contain quote")
-	}
-	*s = AutoQuote(t)
-	return t, nil
-}
-
-func parseVersion(verb string, path string, s *string, fix modfile.VersionFixer) (string, error) {
-	t, err := parseString(s)
-	if err != nil {
-		return "", &modfile.Error{
-			Verb:    verb,
-			ModPath: path,
-			Err: &module.InvalidVersionError{
-				Version: *s,
-				Err:     err,
-			},
-		}
-	}
-	if fix != nil {
-		fixed, err := fix(path, t)
-		if err != nil {
-			if err, ok := err.(*module.ModuleError); ok {
-				return "", &modfile.Error{
-					Verb:    verb,
-					ModPath: path,
-					Err:     err.Err,
-				}
-			}
-			return "", err
-		}
-		t = fixed
-	} else {
-		cv := module.CanonicalVersion(t)
-		if cv == "" {
-			return "", &modfile.Error{
-				Verb:    verb,
-				ModPath: path,
-				Err: &module.InvalidVersionError{
-					Version: t,
-					Err:     errors.New("must be of the form v1.2.3"),
-				},
-			}
-		}
-		t = cv
-	}
-	*s = t
-	return *s, nil
-}
-
-func modulePathMajor(path string) (string, error) {
-	_, major, ok := module.SplitPathVersion(path)
-	if !ok {
-		return "", fmt.Errorf("invalid module path")
-	}
-	return major, nil
-}
diff --git a/internal/testenv/testenv.go b/internal/testenv/testenv.go
index 61735dc..b381232 100644
--- a/internal/testenv/testenv.go
+++ b/internal/testenv/testenv.go
@@ -15,6 +15,7 @@
 	"runtime"
 	"strings"
 	"sync"
+	"time"
 
 	exec "golang.org/x/sys/execabs"
 )
@@ -258,6 +259,16 @@
 		// and there is only one of each. We shouldn't waste those scarce resources
 		// running very slow tests.
 		fmt.Fprintf(os.Stderr, "skipping test: %s builder is very slow\n", b)
+	case "dragonfly-amd64":
+		// As of 2021-11-02, this builder is running with GO_TEST_TIMEOUT_SCALE=2,
+		// and seems to have unusually slow disk performance.
+		fmt.Fprintln(os.Stderr, "skipping test: dragonfly-amd64 has slow disk (https://golang.org/issue/45216)")
+	case "linux-riscv64-unmatched":
+		// As of 2021-11-03, this builder is empirically not fast enough to run
+		// gopls tests. Ideally we should make the tests faster in short mode
+		// and/or fix them to not assume arbitrary deadlines.
+		// For now, we'll skip them instead.
+		fmt.Fprintf(os.Stderr, "skipping test: %s builder is too slow (https://golang.org/issue/49321)\n", b)
 	default:
 		return
 	}
@@ -297,3 +308,15 @@
 		t.Skipf("running Go version %q is version 1.%d, newer than maximum 1.%d", runtime.Version(), Go1Point(), x)
 	}
 }
+
+// Deadline returns the deadline of t, if known,
+// using the Deadline method added in Go 1.15.
+func Deadline(t Testing) (time.Time, bool) {
+	td, ok := t.(interface {
+		Deadline() (time.Time, bool)
+	})
+	if !ok {
+		return time.Time{}, false
+	}
+	return td.Deadline()
+}
diff --git a/internal/tool/tool.go b/internal/tool/tool.go
index 41ecd4e..526b6b7 100644
--- a/internal/tool/tool.go
+++ b/internal/tool/tool.go
@@ -15,6 +15,7 @@
 	"runtime"
 	"runtime/pprof"
 	"runtime/trace"
+	"strings"
 	"time"
 )
 
@@ -28,8 +29,9 @@
 //       (&Application{}).Main("myapp", "non-flag-command-line-arg-help", os.Args[1:])
 //     }
 // It recursively scans the application object for fields with a tag containing
-//     `flag:"flagname" help:"short help text"``
-// uses all those fields to build command line flags.
+//     `flag:"flagnames" help:"short help text"``
+// uses all those fields to build command line flags. It will split flagnames on
+// commas and add a flag per name.
 // It expects the Application type to have a method
 //     Run(context.Context, args...string) error
 // which it invokes only after all command line flag processing has been finished.
@@ -64,6 +66,10 @@
 	Run(ctx context.Context, args ...string) error
 }
 
+type SubCommand interface {
+	Parent() string
+}
+
 // This is the type returned by CommandLineErrorf, which causes the outer main
 // to trigger printing of the command line help.
 type commandLineError string
@@ -83,13 +89,7 @@
 // application exits with an exit code of 2.
 func Main(ctx context.Context, app Application, args []string) {
 	s := flag.NewFlagSet(app.Name(), flag.ExitOnError)
-	s.Usage = func() {
-		fmt.Fprint(s.Output(), app.ShortHelp())
-		fmt.Fprintf(s.Output(), "\n\nUsage: %v [flags] %v\n", app.Name(), app.Usage())
-		app.DetailedHelp(s)
-	}
-	addFlags(s, reflect.StructField{}, reflect.ValueOf(app))
-	if err := Run(ctx, app, args); err != nil {
+	if err := Run(ctx, s, app, args); err != nil {
 		fmt.Fprintf(s.Output(), "%s: %v\n", app.Name(), err)
 		if _, printHelp := err.(commandLineError); printHelp {
 			s.Usage()
@@ -101,15 +101,26 @@
 // Run is the inner loop for Main; invoked by Main, recursively by
 // Run, and by various tests.  It runs the application and returns an
 // error.
-func Run(ctx context.Context, app Application, args []string) error {
-	s := flag.NewFlagSet(app.Name(), flag.ExitOnError)
+func Run(ctx context.Context, s *flag.FlagSet, app Application, args []string) error {
 	s.Usage = func() {
-		fmt.Fprint(s.Output(), app.ShortHelp())
-		fmt.Fprintf(s.Output(), "\n\nUsage: %v [flags] %v\n", app.Name(), app.Usage())
+		if app.ShortHelp() != "" {
+			fmt.Fprintf(s.Output(), "%s\n\nUsage:\n  ", app.ShortHelp())
+			if sub, ok := app.(SubCommand); ok && sub.Parent() != "" {
+				fmt.Fprintf(s.Output(), "%s [flags] %s", sub.Parent(), app.Name())
+			} else {
+				fmt.Fprintf(s.Output(), "%s [flags]", app.Name())
+			}
+			if usage := app.Usage(); usage != "" {
+				fmt.Fprintf(s.Output(), " %s", usage)
+			}
+			fmt.Fprint(s.Output(), "\n")
+		}
 		app.DetailedHelp(s)
 	}
 	p := addFlags(s, reflect.StructField{}, reflect.ValueOf(app))
-	s.Parse(args)
+	if err := s.Parse(args); err != nil {
+		return err
+	}
 
 	if p != nil && p.CPU != "" {
 		f, err := os.Create(p.CPU)
@@ -161,30 +172,44 @@
 		return nil
 	}
 	// now see if is actually a flag
-	flagName, isFlag := field.Tag.Lookup("flag")
+	flagNames, isFlag := field.Tag.Lookup("flag")
 	help := field.Tag.Get("help")
-	if !isFlag {
-		// not a flag, but it might be a struct with flags in it
-		if value.Elem().Kind() != reflect.Struct {
-			return nil
-		}
-		p, _ := value.Interface().(*Profile)
-		// go through all the fields of the struct
-		sv := value.Elem()
-		for i := 0; i < sv.Type().NumField(); i++ {
-			child := sv.Type().Field(i)
-			v := sv.Field(i)
-			// make sure we have a pointer
-			if v.Kind() != reflect.Ptr {
-				v = v.Addr()
-			}
-			// check if that field is a flag or contains flags
-			if fp := addFlags(f, child, v); fp != nil {
-				p = fp
+	if isFlag {
+		nameList := strings.Split(flagNames, ",")
+		// add the main flag
+		addFlag(f, value, nameList[0], help)
+		if len(nameList) > 1 {
+			// and now add any aliases using the same flag value
+			fv := f.Lookup(nameList[0]).Value
+			for _, flagName := range nameList[1:] {
+				f.Var(fv, flagName, help)
 			}
 		}
-		return p
+		return nil
 	}
+	// not a flag, but it might be a struct with flags in it
+	value = resolve(value.Elem())
+	if value.Kind() != reflect.Struct {
+		return nil
+	}
+	p, _ := value.Addr().Interface().(*Profile)
+	// go through all the fields of the struct
+	for i := 0; i < value.Type().NumField(); i++ {
+		child := value.Type().Field(i)
+		v := value.Field(i)
+		// make sure we have a pointer
+		if v.Kind() != reflect.Ptr {
+			v = v.Addr()
+		}
+		// check if that field is a flag or contains flags
+		if fp := addFlags(f, child, v); fp != nil {
+			p = fp
+		}
+	}
+	return p
+}
+
+func addFlag(f *flag.FlagSet, value reflect.Value, flagName string, help string) {
 	switch v := value.Interface().(type) {
 	case flag.Value:
 		f.Var(v, flagName, help)
@@ -207,5 +232,15 @@
 	default:
 		log.Fatalf("Cannot understand flag of type %T", v)
 	}
-	return nil
+}
+
+func resolve(v reflect.Value) reflect.Value {
+	for {
+		switch v.Kind() {
+		case reflect.Interface, reflect.Ptr:
+			v = v.Elem()
+		default:
+			return v
+		}
+	}
 }
diff --git a/internal/typeparams/common.go b/internal/typeparams/common.go
index 9fc6b4b..ab6b30b 100644
--- a/internal/typeparams/common.go
+++ b/internal/typeparams/common.go
@@ -2,24 +2,179 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-// Package typeparams provides functions to work indirectly with type parameter
-// data stored in go/ast and go/types objects, while these API are guarded by a
-// build constraint.
+// Package typeparams contains common utilities for writing tools that interact
+// with generic Go code, as introduced with Go 1.18.
 //
-// This package exists to make it easier for tools to work with generic code,
-// while also compiling against older Go versions.
+// Many of the types and functions in this package are proxies for the new APIs
+// introduced in the standard library with Go 1.18. For example, the
+// typeparams.Union type is an alias for go/types.Union, and the ForTypeSpec
+// function returns the value of the go/ast.TypeSpec.TypeParams field. At Go
+// versions older than 1.18 these helpers are implemented as stubs, allowing
+// users of this package to write code that handles generic constructs inline,
+// even if the Go version being used to compile does not support generics.
+//
+// Additionally, this package contains common utilities for working with the
+// new generic constructs, to supplement the standard library APIs. Notably,
+// the StructuralTerms API computes a minimal representation of the structural
+// restrictions on a type parameter. In the future, this API may be available
+// from go/types.
+//
+// See the example/README.md for a more detailed guide on how to update tools
+// to support generics.
 package typeparams
 
 import (
 	"go/ast"
 	"go/token"
+	"go/types"
 )
 
-// A IndexExprData holds data from both ast.IndexExpr and the new
-// ast.MultiIndexExpr, which was introduced in Go 1.18.
-type IndexExprData struct {
-	X       ast.Expr   // expression
-	Lbrack  token.Pos  // position of "["
-	Indices []ast.Expr // index expressions
-	Rbrack  token.Pos  // position of "]"
+// UnpackIndexExpr extracts data from AST nodes that represent index
+// expressions.
+//
+// For an ast.IndexExpr, the resulting indices slice will contain exactly one
+// index expression. For an ast.IndexListExpr (go1.18+), it may have a variable
+// number of index expressions.
+//
+// For nodes that don't represent index expressions, the first return value of
+// UnpackIndexExpr will be nil.
+func UnpackIndexExpr(n ast.Node) (x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) {
+	switch e := n.(type) {
+	case *ast.IndexExpr:
+		return e.X, e.Lbrack, []ast.Expr{e.Index}, e.Rbrack
+	case *IndexListExpr:
+		return e.X, e.Lbrack, e.Indices, e.Rbrack
+	}
+	return nil, token.NoPos, nil, token.NoPos
+}
+
+// PackIndexExpr returns an *ast.IndexExpr or *ast.IndexListExpr, depending on
+// the cardinality of indices. Calling PackIndexExpr with len(indices) == 0
+// will panic.
+func PackIndexExpr(x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) ast.Expr {
+	switch len(indices) {
+	case 0:
+		panic("empty indices")
+	case 1:
+		return &ast.IndexExpr{
+			X:      x,
+			Lbrack: lbrack,
+			Index:  indices[0],
+			Rbrack: rbrack,
+		}
+	default:
+		return &IndexListExpr{
+			X:       x,
+			Lbrack:  lbrack,
+			Indices: indices,
+			Rbrack:  rbrack,
+		}
+	}
+}
+
+// IsTypeParam reports whether t is a type parameter.
+func IsTypeParam(t types.Type) bool {
+	_, ok := t.(*TypeParam)
+	return ok
+}
+
+// OriginMethod returns the origin method associated with the method fn.
+// For methods on a non-generic receiver base type, this is just
+// fn. However, for methods with a generic receiver, OriginMethod returns the
+// corresponding method in the method set of the origin type.
+//
+// As a special case, if fn is not a method (has no receiver), OriginMethod
+// returns fn.
+func OriginMethod(fn *types.Func) *types.Func {
+	recv := fn.Type().(*types.Signature).Recv()
+	if recv == nil {
+
+		return fn
+	}
+	base := recv.Type()
+	p, isPtr := base.(*types.Pointer)
+	if isPtr {
+		base = p.Elem()
+	}
+	named, isNamed := base.(*types.Named)
+	if !isNamed {
+		// Receiver is a *types.Interface.
+		return fn
+	}
+	if ForNamed(named).Len() == 0 {
+		// Receiver base has no type parameters, so we can avoid the lookup below.
+		return fn
+	}
+	orig := NamedTypeOrigin(named)
+	gfn, _, _ := types.LookupFieldOrMethod(orig, true, fn.Pkg(), fn.Name())
+	return gfn.(*types.Func)
+}
+
+// GenericAssignableTo is a generalization of types.AssignableTo that
+// implements the following rule for uninstantiated generic types:
+//
+// If V and T are generic named types, then V is considered assignable to T if,
+// for every possible instantation of V[A_1, ..., A_N], the instantiation
+// T[A_1, ..., A_N] is valid and V[A_1, ..., A_N] implements T[A_1, ..., A_N].
+//
+// If T has structural constraints, they must be satisfied by V.
+//
+// For example, consider the following type declarations:
+//
+//  type Interface[T any] interface {
+//  	Accept(T)
+//  }
+//
+//  type Container[T any] struct {
+//  	Element T
+//  }
+//
+//  func (c Container[T]) Accept(t T) { c.Element = t }
+//
+// In this case, GenericAssignableTo reports that instantiations of Container
+// are assignable to the corresponding instantiation of Interface.
+func GenericAssignableTo(ctxt *Context, V, T types.Type) bool {
+	// If V and T are not both named, or do not have matching non-empty type
+	// parameter lists, fall back on types.AssignableTo.
+
+	VN, Vnamed := V.(*types.Named)
+	TN, Tnamed := T.(*types.Named)
+	if !Vnamed || !Tnamed {
+		return types.AssignableTo(V, T)
+	}
+
+	vtparams := ForNamed(VN)
+	ttparams := ForNamed(TN)
+	if vtparams.Len() == 0 || vtparams.Len() != ttparams.Len() || NamedTypeArgs(VN).Len() != 0 || NamedTypeArgs(TN).Len() != 0 {
+		return types.AssignableTo(V, T)
+	}
+
+	// V and T have the same (non-zero) number of type params. Instantiate both
+	// with the type parameters of V. This must always succeed for V, and will
+	// succeed for T if and only if the type set of each type parameter of V is a
+	// subset of the type set of the corresponding type parameter of T, meaning
+	// that every instantiation of V corresponds to a valid instantiation of T.
+
+	// Minor optimization: ensure we share a context across the two
+	// instantiations below.
+	if ctxt == nil {
+		ctxt = NewContext()
+	}
+
+	var targs []types.Type
+	for i := 0; i < vtparams.Len(); i++ {
+		targs = append(targs, vtparams.At(i))
+	}
+
+	vinst, err := Instantiate(ctxt, V, targs, true)
+	if err != nil {
+		panic("type parameters should satisfy their own constraints")
+	}
+
+	tinst, err := Instantiate(ctxt, T, targs, true)
+	if err != nil {
+		return false
+	}
+
+	return types.AssignableTo(vinst, tinst)
 }
diff --git a/internal/typeparams/common_test.go b/internal/typeparams/common_test.go
index e15c297..68ef6c6 100644
--- a/internal/typeparams/common_test.go
+++ b/internal/typeparams/common_test.go
@@ -6,31 +6,227 @@
 
 import (
 	"go/ast"
+	"go/parser"
+	"go/token"
+	"go/types"
 	"testing"
 
-	"golang.org/x/tools/internal/typeparams"
+	"golang.org/x/tools/internal/testenv"
+	. "golang.org/x/tools/internal/typeparams"
 )
 
 func TestGetIndexExprData(t *testing.T) {
 	x := &ast.Ident{}
 	i := &ast.Ident{}
 
+	want := &IndexListExpr{X: x, Lbrack: 1, Indices: []ast.Expr{i}, Rbrack: 2}
 	tests := map[ast.Node]bool{
 		&ast.IndexExpr{X: x, Lbrack: 1, Index: i, Rbrack: 2}: true,
+		want:         true,
 		&ast.Ident{}: false,
 	}
-	want := &typeparams.IndexExprData{X: x, Lbrack: 1, Indices: []ast.Expr{i}, Rbrack: 2}
 
 	for n, isIndexExpr := range tests {
-		ix := typeparams.GetIndexExprData(n)
-		if got := ix != nil; got != isIndexExpr {
-			t.Errorf("GetIndexExprData(%+v) = %+v, want nil: %t", n, ix, !isIndexExpr)
+		X, lbrack, indices, rbrack := UnpackIndexExpr(n)
+		if got := X != nil; got != isIndexExpr {
+			t.Errorf("UnpackIndexExpr(%v) = %v, _, _, _; want nil: %t", n, x, !isIndexExpr)
 		}
-		if ix == nil {
+		if X == nil {
 			continue
 		}
-		if ix.X != x || ix.Lbrack != 1 || ix.Indices[0] != i || ix.Rbrack != 2 {
-			t.Errorf("GetIndexExprData(%+v) = %+v, want %+v", n, ix, want)
+		if X != x || lbrack != 1 || indices[0] != i || rbrack != 2 {
+			t.Errorf("UnpackIndexExprData(%v) = %v, %v, %v, %v; want %+v", n, x, lbrack, indices, rbrack, want)
+		}
+	}
+}
+
+func TestOriginMethodRecursive(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+	src := `package p
+
+type N[A any] int
+
+func (r N[B]) m() { r.m(); r.n() }
+
+func (r *N[C]) n() { }
+`
+	fset := token.NewFileSet()
+	f, err := parser.ParseFile(fset, "p.go", src, 0)
+	if err != nil {
+		t.Fatal(err)
+	}
+	info := types.Info{
+		Defs: make(map[*ast.Ident]types.Object),
+		Uses: make(map[*ast.Ident]types.Object),
+	}
+	var conf types.Config
+	if _, err := conf.Check("p", fset, []*ast.File{f}, &info); err != nil {
+		t.Fatal(err)
+	}
+
+	// Collect objects from types.Info.
+	var m, n *types.Func   // the 'origin' methods in Info.Defs
+	var mm, mn *types.Func // the methods used in the body of m
+
+	for _, decl := range f.Decls {
+		fdecl, ok := decl.(*ast.FuncDecl)
+		if !ok {
+			continue
+		}
+		def := info.Defs[fdecl.Name].(*types.Func)
+		switch fdecl.Name.Name {
+		case "m":
+			m = def
+			ast.Inspect(fdecl.Body, func(n ast.Node) bool {
+				if call, ok := n.(*ast.CallExpr); ok {
+					sel := call.Fun.(*ast.SelectorExpr)
+					use := info.Uses[sel.Sel].(*types.Func)
+					switch sel.Sel.Name {
+					case "m":
+						mm = use
+					case "n":
+						mn = use
+					}
+				}
+				return true
+			})
+		case "n":
+			n = def
+		}
+	}
+
+	tests := []struct {
+		name        string
+		input, want *types.Func
+	}{
+		{"declared m", m, m},
+		{"declared n", n, n},
+		{"used m", mm, m},
+		{"used n", mn, n},
+	}
+
+	for _, test := range tests {
+		if got := OriginMethod(test.input); got != test.want {
+			t.Errorf("OriginMethod(%q) = %v, want %v", test.name, test.input, test.want)
+		}
+	}
+}
+
+func TestOriginMethodUses(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+
+	tests := []string{
+		`type T interface { m() }; func _(t T) { t.m() }`,
+		`type T[P any] interface { m() P }; func _[A any](t T[A]) { t.m() }`,
+		`type T[P any] interface { m() P }; func _(t T[int]) { t.m() }`,
+		`type T[P any] int; func (r T[A]) m() { r.m() }`,
+		`type T[P any] int; func (r *T[A]) m() { r.m() }`,
+		`type T[P any] int; func (r *T[A]) m() {}; func _(t T[int]) { t.m() }`,
+		`type T[P any] int; func (r *T[A]) m() {}; func _[A any](t T[A]) { t.m() }`,
+	}
+
+	for _, src := range tests {
+		fset := token.NewFileSet()
+		f, err := parser.ParseFile(fset, "p.go", "package p; "+src, 0)
+		if err != nil {
+			t.Fatal(err)
+		}
+		info := types.Info{
+			Uses: make(map[*ast.Ident]types.Object),
+		}
+		var conf types.Config
+		pkg, err := conf.Check("p", fset, []*ast.File{f}, &info)
+		if err != nil {
+			t.Fatal(err)
+		}
+
+		T := pkg.Scope().Lookup("T").Type()
+		obj, _, _ := types.LookupFieldOrMethod(T, true, pkg, "m")
+		m := obj.(*types.Func)
+
+		ast.Inspect(f, func(n ast.Node) bool {
+			if call, ok := n.(*ast.CallExpr); ok {
+				sel := call.Fun.(*ast.SelectorExpr)
+				use := info.Uses[sel.Sel].(*types.Func)
+				orig := OriginMethod(use)
+				if orig != m {
+					t.Errorf("%s:\nUses[%v] = %v, want %v", src, types.ExprString(sel), use, m)
+				}
+			}
+			return true
+		})
+	}
+}
+
+func TestGenericAssignableTo(t *testing.T) {
+	testenv.NeedsGo1Point(t, 18)
+
+	tests := []struct {
+		src  string
+		want bool
+	}{
+		// The inciting issue: golang/go#50887.
+		{`
+			type T[P any] interface {
+			        Accept(P)
+			}
+
+			type V[Q any] struct {
+			        Element Q
+			}
+
+			func (c V[Q]) Accept(q Q) { c.Element = q }
+			`, true},
+
+		// Various permutations on constraints and signatures.
+		{`type T[P ~int] interface{ A(P) }; type V[Q int] int; func (V[Q]) A(Q) {}`, true},
+		{`type T[P int] interface{ A(P) }; type V[Q ~int] int; func (V[Q]) A(Q) {}`, false},
+		{`type T[P int|string] interface{ A(P) }; type V[Q int] int; func (V[Q]) A(Q) {}`, true},
+		{`type T[P any] interface{ A(P) }; type V[Q any] int; func (V[Q]) A(Q, Q) {}`, false},
+		{`type T[P any] interface{ int; A(P) }; type V[Q any] int; func (V[Q]) A(Q) {}`, false},
+
+		// Various structural restrictions on T.
+		{`type T[P any] interface{ ~int; A(P) }; type V[Q any] int; func (V[Q]) A(Q) {}`, true},
+		{`type T[P any] interface{ ~int|string; A(P) }; type V[Q any] int; func (V[Q]) A(Q) {}`, true},
+		{`type T[P any] interface{ int; A(P) }; type V[Q int] int; func (V[Q]) A(Q) {}`, false},
+
+		// Various recursive constraints.
+		{`type T[P ~struct{ f *P }] interface{ A(P) }; type V[Q ~struct{ f *Q }] int; func (V[Q]) A(Q) {}`, true},
+		{`type T[P ~struct{ f *P }] interface{ A(P) }; type V[Q ~struct{ g *Q }] int; func (V[Q]) A(Q) {}`, false},
+		{`type T[P ~*X, X any] interface{ A(P) X }; type V[Q ~*Y, Y any] int; func (V[Q, Y]) A(Q) (y Y) { return }`, true},
+		{`type T[P ~*X, X any] interface{ A(P) X }; type V[Q ~**Y, Y any] int; func (V[Q, Y]) A(Q) (y Y) { return }`, false},
+		{`type T[P, X any] interface{ A(P) X }; type V[Q ~*Y, Y any] int; func (V[Q, Y]) A(Q) (y Y) { return }`, true},
+		{`type T[P ~*X, X any] interface{ A(P) X }; type V[Q, Y any] int; func (V[Q, Y]) A(Q) (y Y) { return }`, false},
+		{`type T[P, X any] interface{ A(P) X }; type V[Q, Y any] int; func (V[Q, Y]) A(Q) (y Y) { return }`, true},
+
+		// In this test case, we reverse the type parameters in the signature of V.A
+		{`type T[P, X any] interface{ A(P) X }; type V[Q, Y any] int; func (V[Q, Y]) A(Y) (y Q) { return }`, false},
+		// It would be nice to return true here: V can only be instantiated with
+		// [int, int], so the identity of the type parameters should not matter.
+		{`type T[P, X any] interface{ A(P) X }; type V[Q, Y int] int; func (V[Q, Y]) A(Y) (y Q) { return }`, false},
+	}
+
+	for _, test := range tests {
+		fset := token.NewFileSet()
+		f, err := parser.ParseFile(fset, "p.go", "package p; "+test.src, 0)
+		if err != nil {
+			t.Fatalf("%s:\n%v", test.src, err)
+		}
+		var conf types.Config
+		pkg, err := conf.Check("p", fset, []*ast.File{f}, nil)
+		if err != nil {
+			t.Fatalf("%s:\n%v", test.src, err)
+		}
+
+		V := pkg.Scope().Lookup("V").Type()
+		T := pkg.Scope().Lookup("T").Type()
+
+		if types.AssignableTo(V, T) {
+			t.Fatal("AssignableTo")
+		}
+
+		if got := GenericAssignableTo(nil, V, T); got != test.want {
+			t.Fatalf("%s:\nGenericAssignableTo(%v, %v) = %v, want %v", test.src, V, T, got, test.want)
 		}
 	}
 }
diff --git a/internal/typeparams/copytermlist.go b/internal/typeparams/copytermlist.go
new file mode 100644
index 0000000..b8f458a
--- /dev/null
+++ b/internal/typeparams/copytermlist.go
@@ -0,0 +1,98 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build ignore
+// +build ignore
+
+// copytermlist.go copies the term list algorithm from GOROOT/src/go/types.
+
+package main
+
+import (
+	"bytes"
+	"fmt"
+	"go/ast"
+	"go/format"
+	"go/parser"
+	"go/token"
+	"os"
+	"path/filepath"
+	"reflect"
+	"runtime"
+	"strings"
+
+	"golang.org/x/tools/go/ast/astutil"
+)
+
+func main() {
+	if err := doCopy(); err != nil {
+		fmt.Fprintf(os.Stderr, "error copying from go/types: %v", err)
+		os.Exit(1)
+	}
+}
+
+func doCopy() error {
+	dir := filepath.Join(runtime.GOROOT(), "src", "go", "types")
+	for _, name := range []string{"typeterm.go", "termlist.go"} {
+		path := filepath.Join(dir, name)
+		fset := token.NewFileSet()
+		file, err := parser.ParseFile(fset, path, nil, parser.ParseComments)
+		if err != nil {
+			return err
+		}
+		file.Name.Name = "typeparams"
+		file.Doc = &ast.CommentGroup{List: []*ast.Comment{&ast.Comment{Text: "DO NOT MODIFY"}}}
+		var needImport bool
+		selectorType := reflect.TypeOf((*ast.SelectorExpr)(nil))
+		astutil.Apply(file, func(c *astutil.Cursor) bool {
+			if id, _ := c.Node().(*ast.Ident); id != nil {
+				// Check if this ident should be qualified with types. For simplicity,
+				// assume the copied files do not themselves contain any exported
+				// symbols.
+
+				// As a simple heuristic, just verify that the ident may be replaced by
+				// a selector.
+				if !token.IsExported(id.Name) {
+					return false
+				}
+				v := reflect.TypeOf(c.Parent()).Elem() // ast nodes are all pointers
+				field, ok := v.FieldByName(c.Name())
+				if !ok {
+					panic("missing field")
+				}
+				t := field.Type
+				if c.Index() > 0 { // => t is a slice
+					t = t.Elem()
+				}
+				if !selectorType.AssignableTo(t) {
+					return false
+				}
+				needImport = true
+				c.Replace(&ast.SelectorExpr{
+					X:   ast.NewIdent("types"),
+					Sel: ast.NewIdent(id.Name),
+				})
+			}
+			return true
+		}, nil)
+		if needImport {
+			astutil.AddImport(fset, file, "go/types")
+		}
+
+		var b bytes.Buffer
+		if err := format.Node(&b, fset, file); err != nil {
+			return err
+		}
+
+		// Hack in the 'generated' byline.
+		content := b.String()
+		header := "// Code generated by copytermlist.go DO NOT EDIT.\n\npackage typeparams"
+		content = strings.Replace(content, "package typeparams", header, 1)
+
+		if err := os.WriteFile(name, []byte(content), 0644); err != nil {
+			return err
+		}
+	}
+	return nil
+}
diff --git a/internal/typeparams/enabled_go117.go b/internal/typeparams/enabled_go117.go
new file mode 100644
index 0000000..1821239
--- /dev/null
+++ b/internal/typeparams/enabled_go117.go
@@ -0,0 +1,12 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.18
+// +build !go1.18
+
+package typeparams
+
+// Enabled reports whether type parameters are enabled in the current build
+// environment.
+const Enabled = false
diff --git a/internal/typeparams/enabled_go118.go b/internal/typeparams/enabled_go118.go
new file mode 100644
index 0000000..d671488
--- /dev/null
+++ b/internal/typeparams/enabled_go118.go
@@ -0,0 +1,15 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package typeparams
+
+// Note: this constant is in a separate file as this is the only acceptable
+// diff between the <1.18 API of this package and the 1.18 API.
+
+// Enabled reports whether type parameters are enabled in the current build
+// environment.
+const Enabled = true
diff --git a/internal/typeparams/example/README.md b/internal/typeparams/example/README.md
new file mode 100644
index 0000000..9877735
--- /dev/null
+++ b/internal/typeparams/example/README.md
@@ -0,0 +1,328 @@
+<!-- Autogenerated by weave; DO NOT EDIT -->
+<!-- To regenerate the readme, run: -->
+<!-- go run golang.org/x/example/gotypes@latest generic-go-types.md -->
+
+# Updating tools to support type parameters.
+
+This guide is maintained by Rob Findley (`rfindley@google.com`).
+
+**status**: this document is currently a work-in-progress. See
+[golang/go#50447](https://go.dev/issues/50447) for more details.
+
+1. [Introduction](#introduction)
+1. [Summary of new language features and their APIs](#summary-of-new-language-features-and-their-apis)
+1. [Examples](#examples)
+	1. [Generic types](#generic-types)
+	1. [Constraint Interfaces](#constraint-interfaces)
+	1. [Instantiation](#instantiation)
+1. [Updating tools while building at older Go versions](#updating-tools-while-building-at-older-go-versions)
+1. [Further help](#further-help)
+
+# Introduction
+
+With Go 1.18, Go now supports generic programming via type parameters. This
+document is intended to serve as a guide for tool authors that want to update
+their tools to support the new language constructs introduced for generic Go.
+
+This guide assumes some knowledge of the language changes to support generics.
+See the following references for more information:
+
+- The [original proposal](https://go.dev/issue/43651) for type parameters.
+- The [addendum for type sets](https://go.dev/issue/45346).
+- The [latest language specfication](https://tip.golang.org/ref/spec) (still in-progress as of 2021-01-11).
+- The proposals for new APIs in
+  [go/token and go/ast](https://go.dev/issue/47781), and in
+  [go/types](https://go.dev/issue/47916).
+
+It also assumes existing knowledge of `go/ast` and `go/types`. If you're just
+getting started,
+[x/example/gotypes](https://github.com/golang/example/tree/master/gotypes) is
+a great introduction (and was the inspiration for this guide).
+
+# Summary of new language features and their APIs
+
+While generic Go programming is a large change to the language, at a high level
+it introduces only a few new concepts. Specifically, we can break down our
+discussion into the following three broad categories. In each category, the
+relevant new APIs are listed (some constructors and getters/setters may be
+elided where they are trivial).
+
+**Generic types**. Types and functions may be _generic_, meaning their
+declaration has a non-empty _type parameter list_: as in `type  List[T any]
+...` or `func f[T1, T2 any]() { ... }`. Type parameter lists define placeholder
+types (_type parameters_), scoped to the declaration, which may be substituted
+by any type satisfying their corresponding _constraint interface_ to
+_instantiate_ a new type or function.
+
+Generic types may have methods, which declare `receiver type parameters` via
+their receiver type expression: `func (r T[P1, ..., PN]) method(...) (...)
+{...}`.
+
+_New APIs_:
+ - The field `ast.TypeSpec.TypeParams` holds the type parameter list syntax for
+   type declarations.
+ - The field `ast.FuncType.TypeParams` holds the type parameter list syntax for
+   function declarations.
+ - The type `types.TypeParam` is a `types.Type` representing a type parameter.
+   On this type, the `Constraint` and `SetConstraint` methods allow
+   getting/setting the constraint, the `Index` method returns the index of the
+   type parameter in the type parameter list that declares it, and the `Obj`
+   method returns the object declared in the declaration scope for the type
+   parameter (a `types.TypeName`).
+ - The type `types.TypeParamList` holds a list of type parameters.
+ - The method `types.Named.TypeParams` returns the type parameters for a type
+   declaration.
+ - The method `types.Named.SetTypeParams` sets type parameters on a defined
+   type.
+ - The function `types.NewSignatureType` creates a new (possibly generic)
+   signature type.
+ - The method `types.Signature.RecvTypeParams` returns the receiver type
+   parameters for a method.
+ - The method `types.Signature.TypeParams` returns the type parameters for
+   a function.
+
+**Constraint Interfaces**: type parameter constraints are interfaces, expressed
+via an interface type expression. Interfaces that are only used in constraint
+position are permitted new embedded elements composed of tilde expressions
+(`~T`) and unions (`A | B | ~C`). The new builtin interface type `comparable`
+is implemented by types for which `==` and `!=` are valid. As a special case,
+the `interface` keyword may be omitted from constraint expressions if it may be
+implied (in which case we say the interface is _implicit_).
+
+_New APIs_:
+ - The constant `token.TILDE` is used to represent tilde expressions as an
+   `ast.UnaryExpr`.
+ - Union expressions are represented as an `ast.BinaryExpr` using `|`. This
+   means that `ast.BinaryExpr` may now be both a type and value expression.
+ - The method `types.Interface.IsImplicit` reports whether the `interface`
+   keyword was elided from this interface.
+ - The method `types.Interface.MarkImplicit` marks an interface as being
+   implicit.
+ - The method `types.Interface.IsComparable` reports whether every type in an
+   interface's type set is comparable.
+ - The method `types.Interface.IsMethodSet` reports whether an interface is
+   defined entirely by its methods (has no _specific types_).
+ - The type `types.Union` is a type that represents an embedded union
+   expression in an interface. May only appear as an embedded element in
+   interfaces.
+ - The type `types.Term` represents a (possibly tilde) term of a union.
+
+**Instantiation**: generic types and functions may be _instantiated_ to create
+non-generic types and functions by providing _type arguments_ (`var x T[int]`).
+Function type arguments may be _inferred_ via function arguments, or via
+type parameter constraints.
+
+_New APIs_:
+ - The type `ast.IndexListExpr` holds index expressions with multiple indices,
+   as occurs in instantiation expressions with multiple type arguments, or in
+   receivers with multiple type parameters.
+ - The function `types.Instantiate` instantiates a generic type with type arguments.
+ - The type `types.Context` is an opaque instantiation context that may be
+   shared to reduce duplicate instances.
+ - The field `types.Config.Context` holds a shared `Context` to use for
+   instantiation while type-checking.
+ - The type `types.TypeList` holds a list of types.
+ - The type `types.ArgumentError` holds an error associated with a specific
+   argument index. Used to represent instantiation errors.
+ - The field `types.Info.Instances` maps instantiated identifiers to information
+   about the resulting type instance.
+ - The type `types.Instance` holds information about a type or function
+   instance.
+ - The method `types.Named.TypeArgs` reports the type arguments used to
+   instantiate a named type.
+
+# Examples
+
+The following examples demonstrate the new APIs above, and discuss their
+properties. All examples are runnable, contained in subdirectories of the
+directory holding this README.
+
+## Generic types
+
+### Type parameter lists
+
+Suppose we want to understand the generic library below, which defines a generic
+`Pair`, a constraint interface `Constraint`, and a generic function `MakePair`.
+
+```
+package main
+
+type Constraint interface {
+	Value() interface{}
+}
+
+type Pair[L, R any] struct {
+	left  L
+	right R
+}
+
+func MakePair[L, R Constraint](l L, r R) Pair[L, R] {
+	return Pair[L, R]{l, r}
+}
+```
+
+We can use the new `TypeParams` fields in `ast.TypeSpec` and `ast.FuncType` to
+access the syntax of the type parameter list. From there, we can access type
+parameter types in at least three ways:
+ - by looking up type parameter definitions in `types.Info`
+ - by calling `TypeParams()` on `types.Named` or `types.Signature`
+ - by looking up type parameter objects in the declaration scope. Note that
+   there now may be a scope associated with an `ast.TypeSpec` node.
+
+```
+func PrintTypeParams(fset *token.FileSet, file *ast.File) error {
+	conf := types.Config{Importer: importer.Default()}
+	info := &types.Info{
+		Scopes: make(map[ast.Node]*types.Scope),
+		Defs:   make(map[*ast.Ident]types.Object),
+	}
+	_, err := conf.Check("hello", fset, []*ast.File{file}, info)
+	if err != nil {
+		return err
+	}
+
+	// For convenience, we can use ast.Inspect to find the nodes we want to
+	// investigate.
+	ast.Inspect(file, func(n ast.Node) bool {
+		var name *ast.Ident                  // the name of the generic object, or nil
+		var tparamSyntax *ast.FieldList      // the list of type parameter fields
+		var tparamTypes *types.TypeParamList // the list of type parameter types
+		var scopeNode ast.Node               // the node associated with the declaration scope
+
+		switch n := n.(type) {
+		case *ast.TypeSpec:
+			name = n.Name
+			tparamSyntax = n.TypeParams
+			tparamTypes = info.Defs[name].Type().(*types.Named).TypeParams()
+			name = n.Name
+			scopeNode = n
+		case *ast.FuncDecl:
+			name = n.Name
+			tparamSyntax = n.Type.TypeParams
+			tparamTypes = info.Defs[name].Type().(*types.Signature).TypeParams()
+			scopeNode = n.Type
+		}
+
+		if name == nil {
+			return true // not a generic object
+		}
+
+		// Option 1: find type parameters by looking at their declaring field list.
+		if tparamSyntax != nil {
+			fmt.Printf("%s has a type parameter field list with %d fields\n", name.Name, tparamSyntax.NumFields())
+			for _, field := range tparamSyntax.List {
+				for _, name := range field.Names {
+					tparam := info.Defs[name]
+					fmt.Printf("  field %s defines an object %q\n", name.Name, tparam)
+				}
+			}
+		} else {
+			fmt.Printf("%s does not have a type parameter list\n", name.Name)
+		}
+
+		// Option 2: find type parameters via the TypeParams() method on the
+		// generic type.
+		fmt.Printf("%s has %d type parameters:\n", name.Name, tparamTypes.Len())
+		for i := 0; i < tparamTypes.Len(); i++ {
+			tparam := tparamTypes.At(i)
+			fmt.Printf("  %s has constraint %s\n", tparam, tparam.Constraint())
+		}
+
+		// Option 3: find type parameters by looking in the declaration scope.
+		scope, ok := info.Scopes[scopeNode]
+		if ok {
+			fmt.Printf("%s has a scope with %d objects:\n", name.Name, scope.Len())
+			for _, name := range scope.Names() {
+				fmt.Printf("  %s is a %T\n", name, scope.Lookup(name))
+			}
+		} else {
+			fmt.Printf("%s does not have a scope\n", name.Name)
+		}
+
+		return true
+	})
+	return nil
+}
+```
+
+This program produces the following output. Note that not every type spec has
+a scope.
+
+```
+> go run golang.org/x/tools/internal/typeparams/example/findtypeparams
+Constraint does not have a type parameter list
+Constraint has 0 type parameters:
+Constraint does not have a scope
+Pair has a type parameter field list with 2 fields
+  field L defines an object "type parameter L any"
+  field R defines an object "type parameter R any"
+Pair has 2 type parameters:
+  L has constraint any
+  R has constraint any
+Pair has a scope with 2 objects:
+  L is a *types.TypeName
+  R is a *types.TypeName
+MakePair has a type parameter field list with 2 fields
+  field L defines an object "type parameter L hello.Constraint"
+  field R defines an object "type parameter R hello.Constraint"
+MakePair has 2 type parameters:
+  L has constraint hello.Constraint
+  R has constraint hello.Constraint
+MakePair has a scope with 4 objects:
+  L is a *types.TypeName
+  R is a *types.TypeName
+  l is a *types.Var
+  r is a *types.Var
+```
+
+### Methods on generic types
+
+**TODO**
+
+## Constraint Interfaces
+
+### New interface elements
+
+**TODO**
+
+### Implicit interfaces
+
+**TODO**
+
+### Type sets
+
+**TODO**
+
+## Instantiation
+
+### Finding instantiated types
+
+**TODO**
+
+### Creating new instantiated types
+
+**TODO**
+
+### Using a shared context
+
+**TODO**
+
+# Updating tools while building at older Go versions
+
+In the examples above, we can see how a lot of the new APIs integrate with
+existing usage of `go/ast` or `go/types`. However, most tools still need to
+build at older Go versions, and handling the new language constructs in-line
+will break builds at older Go versions.
+
+For this purpose, the `x/exp/typeparams` package provides functions and types
+that proxy the new APIs (with stub implementations at older Go versions).
+**NOTE**: does not yet exist -- see
+[golang/go#50447](https://go.dev/issues/50447) for more information.
+
+# Further help
+
+If you're working on updating a tool to support generics, and need help, please
+feel free to reach out for help in any of the following ways:
+ - Via the [golang-tools](https://groups.google.com/g/golang-tools) mailing list.
+ - Directly to me via email (`rfindley@google.com`).
+ - For bugs, you can [file an issue](https://github.com/golang/go/issues/new/choose).
diff --git a/internal/typeparams/example/findtypeparams/main.go b/internal/typeparams/example/findtypeparams/main.go
new file mode 100644
index 0000000..0fe8011
--- /dev/null
+++ b/internal/typeparams/example/findtypeparams/main.go
@@ -0,0 +1,155 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package main
+
+import (
+	"fmt"
+	"go/ast"
+	"go/importer"
+	"go/parser"
+	"go/token"
+	"go/types"
+	"log"
+)
+
+const hello = `
+//!+input
+package main
+
+type Constraint interface {
+	Value() interface{}
+}
+
+type Pair[L, R any] struct {
+	left  L
+	right R
+}
+
+func MakePair[L, R Constraint](l L, r R) Pair[L, R] {
+	return Pair[L, R]{l, r}
+}
+//!-input
+`
+
+//!+print
+func PrintTypeParams(fset *token.FileSet, file *ast.File) error {
+	conf := types.Config{Importer: importer.Default()}
+	info := &types.Info{
+		Scopes: make(map[ast.Node]*types.Scope),
+		Defs:   make(map[*ast.Ident]types.Object),
+	}
+	_, err := conf.Check("hello", fset, []*ast.File{file}, info)
+	if err != nil {
+		return err
+	}
+
+	// For convenience, we can use ast.Inspect to find the nodes we want to
+	// investigate.
+	ast.Inspect(file, func(n ast.Node) bool {
+		var name *ast.Ident                  // the name of the generic object, or nil
+		var tparamSyntax *ast.FieldList      // the list of type parameter fields
+		var tparamTypes *types.TypeParamList // the list of type parameter types
+		var scopeNode ast.Node               // the node associated with the declaration scope
+
+		switch n := n.(type) {
+		case *ast.TypeSpec:
+			name = n.Name
+			tparamSyntax = n.TypeParams
+			tparamTypes = info.Defs[name].Type().(*types.Named).TypeParams()
+			name = n.Name
+			scopeNode = n
+		case *ast.FuncDecl:
+			name = n.Name
+			tparamSyntax = n.Type.TypeParams
+			tparamTypes = info.Defs[name].Type().(*types.Signature).TypeParams()
+			scopeNode = n.Type
+		}
+
+		if name == nil {
+			return true // not a generic object
+		}
+
+		// Option 1: find type parameters by looking at their declaring field list.
+		if tparamSyntax != nil {
+			fmt.Printf("%s has a type parameter field list with %d fields\n", name.Name, tparamSyntax.NumFields())
+			for _, field := range tparamSyntax.List {
+				for _, name := range field.Names {
+					tparam := info.Defs[name]
+					fmt.Printf("  field %s defines an object %q\n", name.Name, tparam)
+				}
+			}
+		} else {
+			fmt.Printf("%s does not have a type parameter list\n", name.Name)
+		}
+
+		// Option 2: find type parameters via the TypeParams() method on the
+		// generic type.
+		fmt.Printf("%s has %d type parameters:\n", name.Name, tparamTypes.Len())
+		for i := 0; i < tparamTypes.Len(); i++ {
+			tparam := tparamTypes.At(i)
+			fmt.Printf("  %s has constraint %s\n", tparam, tparam.Constraint())
+		}
+
+		// Option 3: find type parameters by looking in the declaration scope.
+		scope, ok := info.Scopes[scopeNode]
+		if ok {
+			fmt.Printf("%s has a scope with %d objects:\n", name.Name, scope.Len())
+			for _, name := range scope.Names() {
+				fmt.Printf("  %s is a %T\n", name, scope.Lookup(name))
+			}
+		} else {
+			fmt.Printf("%s does not have a scope\n", name.Name)
+		}
+
+		return true
+	})
+	return nil
+}
+
+//!-print
+
+/*
+//!+output
+> go run golang.org/x/tools/internal/typeparams/example/findtypeparams
+Constraint does not have a type parameter list
+Constraint has 0 type parameters:
+Constraint does not have a scope
+Pair has a type parameter field list with 2 fields
+  field L defines an object "type parameter L any"
+  field R defines an object "type parameter R any"
+Pair has 2 type parameters:
+  L has constraint any
+  R has constraint any
+Pair has a scope with 2 objects:
+  L is a *types.TypeName
+  R is a *types.TypeName
+MakePair has a type parameter field list with 2 fields
+  field L defines an object "type parameter L hello.Constraint"
+  field R defines an object "type parameter R hello.Constraint"
+MakePair has 2 type parameters:
+  L has constraint hello.Constraint
+  R has constraint hello.Constraint
+MakePair has a scope with 4 objects:
+  L is a *types.TypeName
+  R is a *types.TypeName
+  l is a *types.Var
+  r is a *types.Var
+//!-output
+*/
+
+func main() {
+	// Parse one file.
+	fset := token.NewFileSet()
+	f, err := parser.ParseFile(fset, "hello.go", hello, 0)
+	if err != nil {
+		log.Fatal(err) // parse error
+	}
+	if err := PrintTypeParams(fset, f); err != nil {
+		log.Fatal(err) // type error
+	}
+}
diff --git a/internal/typeparams/example/generic-go-types.md b/internal/typeparams/example/generic-go-types.md
new file mode 100644
index 0000000..8d2f6ff
--- /dev/null
+++ b/internal/typeparams/example/generic-go-types.md
@@ -0,0 +1,206 @@
+<!-- To regenerate the readme, run: -->
+<!-- go run golang.org/x/example/gotypes@latest generic-go-types.md -->
+
+# Updating tools to support type parameters.
+
+This guide is maintained by Rob Findley (`rfindley@google.com`).
+
+**status**: this document is currently a work-in-progress. See
+[golang/go#50447](https://go.dev/issues/50447) for more details.
+
+%toc
+
+# Introduction
+
+With Go 1.18, Go now supports generic programming via type parameters. This
+document is intended to serve as a guide for tool authors that want to update
+their tools to support the new language constructs introduced for generic Go.
+
+This guide assumes some knowledge of the language changes to support generics.
+See the following references for more information:
+
+- The [original proposal](https://go.dev/issue/43651) for type parameters.
+- The [addendum for type sets](https://go.dev/issue/45346).
+- The [latest language specfication](https://tip.golang.org/ref/spec) (still in-progress as of 2021-01-11).
+- The proposals for new APIs in
+  [go/token and go/ast](https://go.dev/issue/47781), and in
+  [go/types](https://go.dev/issue/47916).
+
+It also assumes existing knowledge of `go/ast` and `go/types`. If you're just
+getting started,
+[x/example/gotypes](https://github.com/golang/example/tree/master/gotypes) is
+a great introduction (and was the inspiration for this guide).
+
+# Summary of new language features and their APIs
+
+While generic Go programming is a large change to the language, at a high level
+it introduces only a few new concepts. Specifically, we can break down our
+discussion into the following three broad categories. In each category, the
+relevant new APIs are listed (some constructors and getters/setters may be
+elided where they are trivial).
+
+**Generic types**. Types and functions may be _generic_, meaning their
+declaration has a non-empty _type parameter list_: as in `type  List[T any]
+...` or `func f[T1, T2 any]() { ... }`. Type parameter lists define placeholder
+types (_type parameters_), scoped to the declaration, which may be substituted
+by any type satisfying their corresponding _constraint interface_ to
+_instantiate_ a new type or function.
+
+Generic types may have methods, which declare `receiver type parameters` via
+their receiver type expression: `func (r T[P1, ..., PN]) method(...) (...)
+{...}`.
+
+_New APIs_:
+ - The field `ast.TypeSpec.TypeParams` holds the type parameter list syntax for
+   type declarations.
+ - The field `ast.FuncType.TypeParams` holds the type parameter list syntax for
+   function declarations.
+ - The type `types.TypeParam` is a `types.Type` representing a type parameter.
+   On this type, the `Constraint` and `SetConstraint` methods allow
+   getting/setting the constraint, the `Index` method returns the index of the
+   type parameter in the type parameter list that declares it, and the `Obj`
+   method returns the object declared in the declaration scope for the type
+   parameter (a `types.TypeName`).
+ - The type `types.TypeParamList` holds a list of type parameters.
+ - The method `types.Named.TypeParams` returns the type parameters for a type
+   declaration.
+ - The method `types.Named.SetTypeParams` sets type parameters on a defined
+   type.
+ - The function `types.NewSignatureType` creates a new (possibly generic)
+   signature type.
+ - The method `types.Signature.RecvTypeParams` returns the receiver type
+   parameters for a method.
+ - The method `types.Signature.TypeParams` returns the type parameters for
+   a function.
+
+**Constraint Interfaces**: type parameter constraints are interfaces, expressed
+via an interface type expression. Interfaces that are only used in constraint
+position are permitted new embedded elements composed of tilde expressions
+(`~T`) and unions (`A | B | ~C`). The new builtin interface type `comparable`
+is implemented by types for which `==` and `!=` are valid. As a special case,
+the `interface` keyword may be omitted from constraint expressions if it may be
+implied (in which case we say the interface is _implicit_).
+
+_New APIs_:
+ - The constant `token.TILDE` is used to represent tilde expressions as an
+   `ast.UnaryExpr`.
+ - Union expressions are represented as an `ast.BinaryExpr` using `|`. This
+   means that `ast.BinaryExpr` may now be both a type and value expression.
+ - The method `types.Interface.IsImplicit` reports whether the `interface`
+   keyword was elided from this interface.
+ - The method `types.Interface.MarkImplicit` marks an interface as being
+   implicit.
+ - The method `types.Interface.IsComparable` reports whether every type in an
+   interface's type set is comparable.
+ - The method `types.Interface.IsMethodSet` reports whether an interface is
+   defined entirely by its methods (has no _specific types_).
+ - The type `types.Union` is a type that represents an embedded union
+   expression in an interface. May only appear as an embedded element in
+   interfaces.
+ - The type `types.Term` represents a (possibly tilde) term of a union.
+
+**Instantiation**: generic types and functions may be _instantiated_ to create
+non-generic types and functions by providing _type arguments_ (`var x T[int]`).
+Function type arguments may be _inferred_ via function arguments, or via
+type parameter constraints.
+
+_New APIs_:
+ - The type `ast.IndexListExpr` holds index expressions with multiple indices,
+   as occurs in instantiation expressions with multiple type arguments, or in
+   receivers with multiple type parameters.
+ - The function `types.Instantiate` instantiates a generic type with type arguments.
+ - The type `types.Context` is an opaque instantiation context that may be
+   shared to reduce duplicate instances.
+ - The field `types.Config.Context` holds a shared `Context` to use for
+   instantiation while type-checking.
+ - The type `types.TypeList` holds a list of types.
+ - The type `types.ArgumentError` holds an error associated with a specific
+   argument index. Used to represent instantiation errors.
+ - The field `types.Info.Instances` maps instantiated identifiers to information
+   about the resulting type instance.
+ - The type `types.Instance` holds information about a type or function
+   instance.
+ - The method `types.Named.TypeArgs` reports the type arguments used to
+   instantiate a named type.
+
+# Examples
+
+The following examples demonstrate the new APIs above, and discuss their
+properties. All examples are runnable, contained in subdirectories of the
+directory holding this README.
+
+## Generic types
+
+### Type parameter lists
+
+Suppose we want to understand the generic library below, which defines a generic
+`Pair`, a constraint interface `Constraint`, and a generic function `MakePair`.
+
+%include findtypeparams/main.go input -
+
+We can use the new `TypeParams` fields in `ast.TypeSpec` and `ast.FuncType` to
+access the syntax of the type parameter list. From there, we can access type
+parameter types in at least three ways:
+ - by looking up type parameter definitions in `types.Info`
+ - by calling `TypeParams()` on `types.Named` or `types.Signature`
+ - by looking up type parameter objects in the declaration scope. Note that
+   there now may be a scope associated with an `ast.TypeSpec` node.
+
+%include findtypeparams/main.go print -
+
+This program produces the following output. Note that not every type spec has
+a scope.
+
+%include findtypeparams/main.go output -
+
+### Methods on generic types
+
+**TODO**
+
+## Constraint Interfaces
+
+### New interface elements
+
+**TODO**
+
+### Implicit interfaces
+
+**TODO**
+
+### Type sets
+
+**TODO**
+
+## Instantiation
+
+### Finding instantiated types
+
+**TODO**
+
+### Creating new instantiated types
+
+**TODO**
+
+### Using a shared context
+
+**TODO**
+
+# Updating tools while building at older Go versions
+
+In the examples above, we can see how a lot of the new APIs integrate with
+existing usage of `go/ast` or `go/types`. However, most tools still need to
+build at older Go versions, and handling the new language constructs in-line
+will break builds at older Go versions.
+
+For this purpose, the `x/exp/typeparams` package provides functions and types
+that proxy the new APIs (with stub implementations at older Go versions).
+**NOTE**: does not yet exist -- see
+[golang/go#50447](https://go.dev/issues/50447) for more information.
+
+# Further help
+
+If you're working on updating a tool to support generics, and need help, please
+feel free to reach out for help in any of the following ways:
+ - Via the [golang-tools](https://groups.google.com/g/golang-tools) mailing list.
+ - Directly to me via email (`rfindley@google.com`).
+ - For bugs, you can [file an issue](https://github.com/golang/go/issues/new/choose).
diff --git a/internal/typeparams/genericfeatures/features.go b/internal/typeparams/genericfeatures/features.go
new file mode 100644
index 0000000..8ceef86
--- /dev/null
+++ b/internal/typeparams/genericfeatures/features.go
@@ -0,0 +1,105 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// The genericfeatures package provides utilities for detecting usage of
+// generic programming in Go packages.
+package genericfeatures
+
+import (
+	"go/ast"
+	"go/types"
+	"strings"
+
+	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
+)
+
+// Features is a set of flags reporting which features of generic Go code a
+// package uses, or 0.
+type Features int
+
+const (
+	// GenericTypeDecls indicates whether the package declares types with type
+	// parameters.
+	GenericTypeDecls Features = 1 << iota
+
+	// GenericFuncDecls indicates whether the package declares functions with
+	// type parameters.
+	GenericFuncDecls
+
+	// EmbeddedTypeSets indicates whether the package declares interfaces that
+	// contain structural type restrictions, i.e. are not fully described by
+	// their method sets.
+	EmbeddedTypeSets
+
+	// TypeInstantiation indicates whether the package instantiates any generic
+	// types.
+	TypeInstantiation
+
+	// FuncInstantiation indicates whether the package instantiates any generic
+	// functions.
+	FuncInstantiation
+)
+
+func (f Features) String() string {
+	var feats []string
+	if f&GenericTypeDecls != 0 {
+		feats = append(feats, "typeDecl")
+	}
+	if f&GenericFuncDecls != 0 {
+		feats = append(feats, "funcDecl")
+	}
+	if f&EmbeddedTypeSets != 0 {
+		feats = append(feats, "typeSet")
+	}
+	if f&TypeInstantiation != 0 {
+		feats = append(feats, "typeInstance")
+	}
+	if f&FuncInstantiation != 0 {
+		feats = append(feats, "funcInstance")
+	}
+	return "features{" + strings.Join(feats, ",") + "}"
+}
+
+// ForPackage computes which generic features are used directly by the
+// package being analyzed.
+func ForPackage(inspect *inspector.Inspector, info *types.Info) Features {
+	nodeFilter := []ast.Node{
+		(*ast.FuncType)(nil),
+		(*ast.InterfaceType)(nil),
+		(*ast.ImportSpec)(nil),
+		(*ast.TypeSpec)(nil),
+	}
+
+	var direct Features
+
+	inspect.Preorder(nodeFilter, func(node ast.Node) {
+		switch n := node.(type) {
+		case *ast.FuncType:
+			if tparams := typeparams.ForFuncType(n); tparams != nil {
+				direct |= GenericFuncDecls
+			}
+		case *ast.InterfaceType:
+			tv := info.Types[n]
+			if iface, _ := tv.Type.(*types.Interface); iface != nil && !typeparams.IsMethodSet(iface) {
+				direct |= EmbeddedTypeSets
+			}
+		case *ast.TypeSpec:
+			if tparams := typeparams.ForTypeSpec(n); tparams != nil {
+				direct |= GenericTypeDecls
+			}
+		}
+	})
+
+	instances := typeparams.GetInstances(info)
+	for _, inst := range instances {
+		switch inst.Type.(type) {
+		case *types.Named:
+			direct |= TypeInstantiation
+		case *types.Signature:
+			direct |= FuncInstantiation
+		}
+	}
+	return direct
+}
diff --git a/internal/typeparams/normalize.go b/internal/typeparams/normalize.go
new file mode 100644
index 0000000..090f142
--- /dev/null
+++ b/internal/typeparams/normalize.go
@@ -0,0 +1,216 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import (
+	"errors"
+	"fmt"
+	"go/types"
+	"os"
+	"strings"
+)
+
+//go:generate go run copytermlist.go
+
+const debug = false
+
+var ErrEmptyTypeSet = errors.New("empty type set")
+
+// StructuralTerms returns a slice of terms representing the normalized
+// structural type restrictions of a type parameter, if any.
+//
+// Structural type restrictions of a type parameter are created via
+// non-interface types embedded in its constraint interface (directly, or via a
+// chain of interface embeddings). For example, in the declaration
+//  type T[P interface{~int; m()}] int
+// the structural restriction of the type parameter P is ~int.
+//
+// With interface embedding and unions, the specification of structural type
+// restrictions may be arbitrarily complex. For example, consider the
+// following:
+//
+//  type A interface{ ~string|~[]byte }
+//
+//  type B interface{ int|string }
+//
+//  type C interface { ~string|~int }
+//
+//  type T[P interface{ A|B; C }] int
+//
+// In this example, the structural type restriction of P is ~string|int: A|B
+// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int,
+// which when intersected with C (~string|~int) yields ~string|int.
+//
+// StructuralTerms computes these expansions and reductions, producing a
+// "normalized" form of the embeddings. A structural restriction is normalized
+// if it is a single union containing no interface terms, and is minimal in the
+// sense that removing any term changes the set of types satisfying the
+// constraint. It is left as a proof for the reader that, modulo sorting, there
+// is exactly one such normalized form.
+//
+// Because the minimal representation always takes this form, StructuralTerms
+// returns a slice of tilde terms corresponding to the terms of the union in
+// the normalized structural restriction. An error is returned if the
+// constraint interface is invalid, exceeds complexity bounds, or has an empty
+// type set. In the latter case, StructuralTerms returns ErrEmptyTypeSet.
+//
+// StructuralTerms makes no guarantees about the order of terms, except that it
+// is deterministic.
+func StructuralTerms(tparam *TypeParam) ([]*Term, error) {
+	constraint := tparam.Constraint()
+	if constraint == nil {
+		return nil, fmt.Errorf("%s has nil constraint", tparam)
+	}
+	iface, _ := constraint.Underlying().(*types.Interface)
+	if iface == nil {
+		return nil, fmt.Errorf("constraint is %T, not *types.Interface", constraint.Underlying())
+	}
+	return InterfaceTermSet(iface)
+}
+
+// InterfaceTermSet computes the normalized terms for a constraint interface,
+// returning an error if the term set cannot be computed or is empty. In the
+// latter case, the error will be ErrEmptyTypeSet.
+//
+// See the documentation of StructuralTerms for more information on
+// normalization.
+func InterfaceTermSet(iface *types.Interface) ([]*Term, error) {
+	return computeTermSet(iface)
+}
+
+// UnionTermSet computes the normalized terms for a union, returning an error
+// if the term set cannot be computed or is empty. In the latter case, the
+// error will be ErrEmptyTypeSet.
+//
+// See the documentation of StructuralTerms for more information on
+// normalization.
+func UnionTermSet(union *Union) ([]*Term, error) {
+	return computeTermSet(union)
+}
+
+func computeTermSet(typ types.Type) ([]*Term, error) {
+	tset, err := computeTermSetInternal(typ, make(map[types.Type]*termSet), 0)
+	if err != nil {
+		return nil, err
+	}
+	if tset.terms.isEmpty() {
+		return nil, ErrEmptyTypeSet
+	}
+	if tset.terms.isAll() {
+		return nil, nil
+	}
+	var terms []*Term
+	for _, term := range tset.terms {
+		terms = append(terms, NewTerm(term.tilde, term.typ))
+	}
+	return terms, nil
+}
+
+// A termSet holds the normalized set of terms for a given type.
+//
+// The name termSet is intentionally distinct from 'type set': a type set is
+// all types that implement a type (and includes method restrictions), whereas
+// a term set just represents the structural restrictions on a type.
+type termSet struct {
+	complete bool
+	terms    termlist
+}
+
+func indentf(depth int, format string, args ...interface{}) {
+	fmt.Fprintf(os.Stderr, strings.Repeat(".", depth)+format+"\n", args...)
+}
+
+func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth int) (res *termSet, err error) {
+	if t == nil {
+		panic("nil type")
+	}
+
+	if debug {
+		indentf(depth, "%s", t.String())
+		defer func() {
+			if err != nil {
+				indentf(depth, "=> %s", err)
+			} else {
+				indentf(depth, "=> %s", res.terms.String())
+			}
+		}()
+	}
+
+	const maxTermCount = 100
+	if tset, ok := seen[t]; ok {
+		if !tset.complete {
+			return nil, fmt.Errorf("cycle detected in the declaration of %s", t)
+		}
+		return tset, nil
+	}
+
+	// Mark the current type as seen to avoid infinite recursion.
+	tset := new(termSet)
+	defer func() {
+		tset.complete = true
+	}()
+	seen[t] = tset
+
+	switch u := t.Underlying().(type) {
+	case *types.Interface:
+		// The term set of an interface is the intersection of the term sets of its
+		// embedded types.
+		tset.terms = allTermlist
+		for i := 0; i < u.NumEmbeddeds(); i++ {
+			embedded := u.EmbeddedType(i)
+			if _, ok := embedded.Underlying().(*TypeParam); ok {
+				return nil, fmt.Errorf("invalid embedded type %T", embedded)
+			}
+			tset2, err := computeTermSetInternal(embedded, seen, depth+1)
+			if err != nil {
+				return nil, err
+			}
+			tset.terms = tset.terms.intersect(tset2.terms)
+		}
+	case *Union:
+		// The term set of a union is the union of term sets of its terms.
+		tset.terms = nil
+		for i := 0; i < u.Len(); i++ {
+			t := u.Term(i)
+			var terms termlist
+			switch t.Type().Underlying().(type) {
+			case *types.Interface:
+				tset2, err := computeTermSetInternal(t.Type(), seen, depth+1)
+				if err != nil {
+					return nil, err
+				}
+				terms = tset2.terms
+			case *TypeParam, *Union:
+				// A stand-alone type parameter or union is not permitted as union
+				// term.
+				return nil, fmt.Errorf("invalid union term %T", t)
+			default:
+				if t.Type() == types.Typ[types.Invalid] {
+					continue
+				}
+				terms = termlist{{t.Tilde(), t.Type()}}
+			}
+			tset.terms = tset.terms.union(terms)
+			if len(tset.terms) > maxTermCount {
+				return nil, fmt.Errorf("exceeded max term count %d", maxTermCount)
+			}
+		}
+	case *TypeParam:
+		panic("unreachable")
+	default:
+		// For all other types, the term set is just a single non-tilde term
+		// holding the type itself.
+		if u != types.Typ[types.Invalid] {
+			tset.terms = termlist{{false, t}}
+		}
+	}
+	return tset, nil
+}
+
+// under is a facade for the go/types internal function of the same name. It is
+// used by typeterm.go.
+func under(t types.Type) types.Type {
+	return t.Underlying()
+}
diff --git a/internal/typeparams/normalize_test.go b/internal/typeparams/normalize_test.go
new file mode 100644
index 0000000..5969eee
--- /dev/null
+++ b/internal/typeparams/normalize_test.go
@@ -0,0 +1,104 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams_test
+
+import (
+	"go/ast"
+	"go/parser"
+	"go/token"
+	"go/types"
+	"strings"
+	"testing"
+
+	"golang.org/x/tools/internal/typeparams"
+	. "golang.org/x/tools/internal/typeparams"
+)
+
+func TestStructuralTerms(t *testing.T) {
+	if !Enabled {
+		t.Skip("typeparams are not enabled")
+	}
+
+	// In the following tests, src must define a type T with (at least) one type
+	// parameter. We will compute the structural terms of the first type
+	// parameter.
+	tests := []struct {
+		src       string
+		want      string
+		wantError string
+	}{
+		{"package emptyinterface0; type T[P interface{}] int", "all", ""},
+		{"package emptyinterface1; type T[P interface{ int | interface{} }] int", "all", ""},
+		{"package singleton; type T[P interface{ int }] int", "int", ""},
+		{"package under; type T[P interface{~int}] int", "~int", ""},
+		{"package superset; type T[P interface{ ~int | int }] int", "~int", ""},
+		{"package overlap; type T[P interface{ ~int; int }] int", "int", ""},
+		{"package emptyintersection; type T[P interface{ ~int; string }] int", "", "empty type set"},
+
+		{"package embedded0; type T[P interface{ I }] int; type I interface { int }", "int", ""},
+		{"package embedded1; type T[P interface{ I | string }] int; type I interface{ int | ~string }", "int|~string", ""},
+		{"package embedded2; type T[P interface{ I; string }] int; type I interface{ int | ~string }", "string", ""},
+
+		{"package named; type T[P C] int; type C interface{ ~int|int }", "~int", ""},
+		{`// package example is taken from the docstring for StructuralTerms
+package example
+
+type A interface{ ~string|~[]byte }
+
+type B interface{ int|string }
+
+type C interface { ~string|~int }
+
+type T[P interface{ A|B; C }] int
+`, "~string|int", ""},
+	}
+
+	for _, test := range tests {
+		fset := token.NewFileSet()
+		f, err := parser.ParseFile(fset, "p.go", test.src, 0)
+		if err != nil {
+			t.Fatal(err)
+		}
+		t.Run(f.Name.Name, func(t *testing.T) {
+			conf := types.Config{
+				Error: func(error) {}, // keep going on errors
+			}
+			pkg, err := conf.Check("", fset, []*ast.File{f}, nil)
+			if err != nil {
+				t.Logf("types.Config.Check: %v", err)
+				// keep going on type checker errors: we want to assert on behavior of
+				// invalid code as well.
+			}
+			obj := pkg.Scope().Lookup("T")
+			if obj == nil {
+				t.Fatal("type T not found")
+			}
+			T := typeparams.ForNamed(obj.Type().(*types.Named)).At(0)
+			terms, err := StructuralTerms(T)
+			if test.wantError != "" {
+				if err == nil {
+					t.Fatalf("StructuralTerms(%s): nil error, want %q", T, test.wantError)
+				}
+				if !strings.Contains(err.Error(), test.wantError) {
+					t.Errorf("StructuralTerms(%s): err = %q, want %q", T, err, test.wantError)
+				}
+				return
+			}
+			if err != nil {
+				t.Fatal(err)
+			}
+			var got string
+			if len(terms) == 0 {
+				got = "all"
+			} else {
+				qf := types.RelativeTo(pkg)
+				got = types.TypeString(NewUnion(terms), qf)
+			}
+			if got != test.want {
+				t.Errorf("StructuralTerms(%s) = %q, want %q", T, got, test.want)
+			}
+		})
+	}
+}
diff --git a/internal/typeparams/notypeparams.go b/internal/typeparams/notypeparams.go
deleted file mode 100644
index e975e47..0000000
--- a/internal/typeparams/notypeparams.go
+++ /dev/null
@@ -1,93 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build !typeparams || !go1.18
-// +build !typeparams !go1.18
-
-package typeparams
-
-import (
-	"go/ast"
-	"go/types"
-)
-
-// NOTE: doc comments must be kept in sync with typeparams.go.
-
-// Enabled reports whether type parameters are enabled in the current build
-// environment.
-const Enabled = false
-
-// GetIndexExprData extracts data from AST nodes that represent index
-// expressions.
-//
-// For an ast.IndexExpr, the resulting IndexExprData will have exactly one
-// index expression. For an ast.MultiIndexExpr (go1.18+), it may have a
-// variable number of index expressions.
-//
-// For nodes that don't represent index expressions, GetIndexExprData returns
-// nil.
-func GetIndexExprData(n ast.Node) *IndexExprData {
-	if e, _ := n.(*ast.IndexExpr); e != nil {
-		return &IndexExprData{
-			X:       e.X,
-			Lbrack:  e.Lbrack,
-			Indices: []ast.Expr{e.Index},
-			Rbrack:  e.Rbrack,
-		}
-	}
-	return nil
-}
-
-// ForTypeDecl extracts the (possibly nil) type parameter node list from n.
-func ForTypeDecl(*ast.TypeSpec) *ast.FieldList {
-	return nil
-}
-
-// ForFuncDecl extracts the (possibly nil) type parameter node list from n.
-func ForFuncDecl(*ast.FuncDecl) *ast.FieldList {
-	return nil
-}
-
-// ForSignature extracts the (possibly empty) type parameter object list from
-// sig.
-func ForSignature(*types.Signature) []*types.TypeName {
-	return nil
-}
-
-// IsComparable reports if iface is the comparable interface.
-func IsComparable(*types.Interface) bool {
-	return false
-}
-
-// IsConstraint reports whether iface may only be used as a type parameter
-// constraint (i.e. has a type set or is the comparable interface).
-func IsConstraint(*types.Interface) bool {
-	return false
-}
-
-// ForNamed extracts the (possibly empty) type parameter object list from
-// named.
-func ForNamed(*types.Named) []*types.TypeName {
-	return nil
-}
-
-// NamedTArgs extracts the (possibly empty) type argument list from named.
-func NamedTArgs(*types.Named) []types.Type {
-	return nil
-}
-
-// InitInferred initializes info to record inferred type information.
-func InitInferred(*types.Info) {
-}
-
-// GetInferred extracts inferred type information from info for e.
-//
-// The expression e may have an inferred type if it is an *ast.IndexExpr
-// representing partial instantiation of a generic function type for which type
-// arguments have been inferred using constraint type inference, or if it is an
-// *ast.CallExpr for which type type arguments have be inferred using both
-// constraint type inference and function argument inference.
-func GetInferred(*types.Info, ast.Expr) ([]types.Type, *types.Signature) {
-	return nil, nil
-}
diff --git a/internal/typeparams/termlist.go b/internal/typeparams/termlist.go
new file mode 100644
index 0000000..10857d5
--- /dev/null
+++ b/internal/typeparams/termlist.go
@@ -0,0 +1,172 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Code generated by copytermlist.go DO NOT EDIT.
+
+package typeparams
+
+import (
+	"bytes"
+	"go/types"
+)
+
+// A termlist represents the type set represented by the union
+// t1 ∪ y2 ∪ ... tn of the type sets of the terms t1 to tn.
+// A termlist is in normal form if all terms are disjoint.
+// termlist operations don't require the operands to be in
+// normal form.
+type termlist []*term
+
+// allTermlist represents the set of all types.
+// It is in normal form.
+var allTermlist = termlist{new(term)}
+
+// String prints the termlist exactly (without normalization).
+func (xl termlist) String() string {
+	if len(xl) == 0 {
+		return "∅"
+	}
+	var buf bytes.Buffer
+	for i, x := range xl {
+		if i > 0 {
+			buf.WriteString(" ∪ ")
+		}
+		buf.WriteString(x.String())
+	}
+	return buf.String()
+}
+
+// isEmpty reports whether the termlist xl represents the empty set of types.
+func (xl termlist) isEmpty() bool {
+	// If there's a non-nil term, the entire list is not empty.
+	// If the termlist is in normal form, this requires at most
+	// one iteration.
+	for _, x := range xl {
+		if x != nil {
+			return false
+		}
+	}
+	return true
+}
+
+// isAll reports whether the termlist xl represents the set of all types.
+func (xl termlist) isAll() bool {
+	// If there's a 𝓀 term, the entire list is 𝓀.
+	// If the termlist is in normal form, this requires at most
+	// one iteration.
+	for _, x := range xl {
+		if x != nil && x.typ == nil {
+			return true
+		}
+	}
+	return false
+}
+
+// norm returns the normal form of xl.
+func (xl termlist) norm() termlist {
+	// Quadratic algorithm, but good enough for now.
+	// TODO(gri) fix asymptotic performance
+	used := make([]bool, len(xl))
+	var rl termlist
+	for i, xi := range xl {
+		if xi == nil || used[i] {
+			continue
+		}
+		for j := i + 1; j < len(xl); j++ {
+			xj := xl[j]
+			if xj == nil || used[j] {
+				continue
+			}
+			if u1, u2 := xi.union(xj); u2 == nil {
+				// If we encounter a 𝓀 term, the entire list is 𝓀.
+				// Exit early.
+				// (Note that this is not just an optimization;
+				// if we continue, we may end up with a 𝓀 term
+				// and other terms and the result would not be
+				// in normal form.)
+				if u1.typ == nil {
+					return allTermlist
+				}
+				xi = u1
+				used[j] = true // xj is now unioned into xi - ignore it in future iterations
+			}
+		}
+		rl = append(rl, xi)
+	}
+	return rl
+}
+
+// If the type set represented by xl is specified by a single (non-𝓀) term,
+// structuralType returns that type. Otherwise it returns nil.
+func (xl termlist) structuralType() types.Type {
+	if nl := xl.norm(); len(nl) == 1 {
+		return nl[0].typ // if nl.isAll() then typ is nil, which is ok
+	}
+	return nil
+}
+
+// union returns the union xl ∪ yl.
+func (xl termlist) union(yl termlist) termlist {
+	return append(xl, yl...).norm()
+}
+
+// intersect returns the intersection xl ∩ yl.
+func (xl termlist) intersect(yl termlist) termlist {
+	if xl.isEmpty() || yl.isEmpty() {
+		return nil
+	}
+
+	// Quadratic algorithm, but good enough for now.
+	// TODO(gri) fix asymptotic performance
+	var rl termlist
+	for _, x := range xl {
+		for _, y := range yl {
+			if r := x.intersect(y); r != nil {
+				rl = append(rl, r)
+			}
+		}
+	}
+	return rl.norm()
+}
+
+// equal reports whether xl and yl represent the same type set.
+func (xl termlist) equal(yl termlist) bool {
+	// TODO(gri) this should be more efficient
+	return xl.subsetOf(yl) && yl.subsetOf(xl)
+}
+
+// includes reports whether t ∈ xl.
+func (xl termlist) includes(t types.Type) bool {
+	for _, x := range xl {
+		if x.includes(t) {
+			return true
+		}
+	}
+	return false
+}
+
+// supersetOf reports whether y ⊆ xl.
+func (xl termlist) supersetOf(y *term) bool {
+	for _, x := range xl {
+		if y.subsetOf(x) {
+			return true
+		}
+	}
+	return false
+}
+
+// subsetOf reports whether xl ⊆ yl.
+func (xl termlist) subsetOf(yl termlist) bool {
+	if yl.isEmpty() {
+		return xl.isEmpty()
+	}
+
+	// each term x of xl must be a subset of yl
+	for _, x := range xl {
+		if !yl.supersetOf(x) {
+			return false // x is not a subset yl
+		}
+	}
+	return true
+}
diff --git a/internal/typeparams/typeparams.go b/internal/typeparams/typeparams.go
deleted file mode 100644
index d459b32..0000000
--- a/internal/typeparams/typeparams.go
+++ /dev/null
@@ -1,134 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build typeparams && go1.18
-// +build typeparams,go1.18
-
-package typeparams
-
-import (
-	"go/ast"
-	"go/types"
-)
-
-// NOTE: doc comments must be kept in sync with notypeparams.go.
-
-// Enabled reports whether type parameters are enabled in the current build
-// environment.
-const Enabled = true
-
-// GetIndexExprData extracts data from AST nodes that represent index
-// expressions.
-//
-// For an ast.IndexExpr, the resulting IndexExprData will have exactly one
-// index expression. For an ast.MultiIndexExpr (go1.18+), it may have a
-// variable number of index expressions.
-//
-// For nodes that don't represent index expressions, GetIndexExprData returns
-// nil.
-func GetIndexExprData(n ast.Node) *IndexExprData {
-	switch e := n.(type) {
-	case *ast.IndexExpr:
-		return &IndexExprData{
-			X:       e.X,
-			Lbrack:  e.Lbrack,
-			Indices: []ast.Expr{e.Index},
-			Rbrack:  e.Rbrack,
-		}
-	case *ast.MultiIndexExpr:
-		return (*IndexExprData)(e)
-	}
-	return nil
-}
-
-// ForTypeDecl extracts the (possibly nil) type parameter node list from n.
-func ForTypeDecl(n *ast.TypeSpec) *ast.FieldList {
-	return n.TParams
-}
-
-// ForFuncDecl extracts the (possibly nil) type parameter node list from n.
-func ForFuncDecl(n *ast.FuncDecl) *ast.FieldList {
-	if n.Type != nil {
-		return n.Type.TParams
-	}
-	return nil
-}
-
-// ForSignature extracts the (possibly empty) type parameter object list from
-// sig.
-func ForSignature(sig *types.Signature) []*types.TypeName {
-	return tparamsSlice(sig.TParams())
-}
-
-// IsComparable reports if iface is the comparable interface.
-func IsComparable(iface *types.Interface) bool {
-	return iface.IsComparable()
-}
-
-// IsConstraint reports whether iface may only be used as a type parameter
-// constraint (i.e. has a type set or is the comparable interface).
-func IsConstraint(iface *types.Interface) bool {
-	return iface.IsConstraint()
-}
-
-// ForNamed extracts the (possibly empty) type parameter object list from
-// named.
-func ForNamed(named *types.Named) []*types.TypeName {
-	return tparamsSlice(named.TParams())
-}
-
-func tparamsSlice(tparams *types.TParamList) []*types.TypeName {
-	length := tparams.Len()
-	if length == 0 {
-		return nil
-	}
-
-	result := make([]*types.TypeName, length)
-	for i := 0; i < length; i++ {
-		result[i] = tparams.At(i).Obj()
-	}
-
-	return result
-}
-
-// NamedTArgs extracts the (possibly empty) type argument list from named.
-func NamedTArgs(named *types.Named) []types.Type {
-	targs := named.TArgs()
-	numArgs := targs.Len()
-
-	typs := make([]types.Type, numArgs)
-	for i := 0; i < numArgs; i++ {
-		typs[i] = targs.At(i)
-	}
-
-	return typs
-}
-
-// InitInferred initializes info to record inferred type information.
-func InitInferred(info *types.Info) {
-	info.Inferred = make(map[ast.Expr]types.Inferred)
-}
-
-// GetInferred extracts inferred type information from info for e.
-//
-// The expression e may have an inferred type if it is an *ast.IndexExpr
-// representing partial instantiation of a generic function type for which type
-// arguments have been inferred using constraint type inference, or if it is an
-// *ast.CallExpr for which type type arguments have be inferred using both
-// constraint type inference and function argument inference.
-func GetInferred(info *types.Info, e ast.Expr) ([]types.Type, *types.Signature) {
-	if info.Inferred == nil {
-		return nil, nil
-	}
-	inf := info.Inferred[e]
-
-	length := inf.TArgs.Len()
-
-	typs := make([]types.Type, length)
-	for i := 0; i < length; i++ {
-		typs[i] = inf.TArgs.At(i)
-	}
-
-	return typs, inf.Sig
-}
diff --git a/internal/typeparams/typeparams_go117.go b/internal/typeparams/typeparams_go117.go
new file mode 100644
index 0000000..b478897
--- /dev/null
+++ b/internal/typeparams/typeparams_go117.go
@@ -0,0 +1,197 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.18
+// +build !go1.18
+
+package typeparams
+
+import (
+	"go/ast"
+	"go/token"
+	"go/types"
+)
+
+func unsupported() {
+	panic("type parameters are unsupported at this go version")
+}
+
+// IndexListExpr is a placeholder type, as type parameters are not supported at
+// this Go version. Its methods panic on use.
+type IndexListExpr struct {
+	ast.Expr
+	X       ast.Expr   // expression
+	Lbrack  token.Pos  // position of "["
+	Indices []ast.Expr // index expressions
+	Rbrack  token.Pos  // position of "]"
+}
+
+// ForTypeSpec returns an empty field list, as type parameters on not supported
+// at this Go version.
+func ForTypeSpec(*ast.TypeSpec) *ast.FieldList {
+	return nil
+}
+
+// ForFuncType returns an empty field list, as type parameters are not
+// supported at this Go version.
+func ForFuncType(*ast.FuncType) *ast.FieldList {
+	return nil
+}
+
+// TypeParam is a placeholder type, as type parameters are not supported at
+// this Go version. Its methods panic on use.
+type TypeParam struct{ types.Type }
+
+func (*TypeParam) Index() int             { unsupported(); return 0 }
+func (*TypeParam) Constraint() types.Type { unsupported(); return nil }
+func (*TypeParam) Obj() *types.TypeName   { unsupported(); return nil }
+
+// TypeParamList is a placeholder for an empty type parameter list.
+type TypeParamList struct{}
+
+func (*TypeParamList) Len() int          { return 0 }
+func (*TypeParamList) At(int) *TypeParam { unsupported(); return nil }
+
+// TypeList is a placeholder for an empty type list.
+type TypeList struct{}
+
+func (*TypeList) Len() int          { return 0 }
+func (*TypeList) At(int) types.Type { unsupported(); return nil }
+
+// NewTypeParam is unsupported at this Go version, and panics.
+func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam {
+	unsupported()
+	return nil
+}
+
+// SetTypeParamConstraint is unsupported at this Go version, and panics.
+func SetTypeParamConstraint(tparam *TypeParam, constraint types.Type) {
+	unsupported()
+}
+
+// NewSignatureType calls types.NewSignature, panicking if recvTypeParams or
+// typeParams is non-empty.
+func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature {
+	if len(recvTypeParams) != 0 || len(typeParams) != 0 {
+		panic("signatures cannot have type parameters at this Go version")
+	}
+	return types.NewSignature(recv, params, results, variadic)
+}
+
+// ForSignature returns an empty slice.
+func ForSignature(*types.Signature) *TypeParamList {
+	return nil
+}
+
+// RecvTypeParams returns a nil slice.
+func RecvTypeParams(sig *types.Signature) *TypeParamList {
+	return nil
+}
+
+// IsComparable returns false, as no interfaces are type-restricted at this Go
+// version.
+func IsComparable(*types.Interface) bool {
+	return false
+}
+
+// IsMethodSet returns true, as no interfaces are type-restricted at this Go
+// version.
+func IsMethodSet(*types.Interface) bool {
+	return true
+}
+
+// IsImplicit returns false, as no interfaces are implicit at this Go version.
+func IsImplicit(*types.Interface) bool {
+	return false
+}
+
+// MarkImplicit does nothing, because this Go version does not have implicit
+// interfaces.
+func MarkImplicit(*types.Interface) {}
+
+// ForNamed returns an empty type parameter list, as type parameters are not
+// supported at this Go version.
+func ForNamed(*types.Named) *TypeParamList {
+	return nil
+}
+
+// SetForNamed panics if tparams is non-empty.
+func SetForNamed(_ *types.Named, tparams []*TypeParam) {
+	if len(tparams) > 0 {
+		unsupported()
+	}
+}
+
+// NamedTypeArgs returns nil.
+func NamedTypeArgs(*types.Named) *TypeList {
+	return nil
+}
+
+// NamedTypeOrigin is the identity method at this Go version.
+func NamedTypeOrigin(named *types.Named) types.Type {
+	return named
+}
+
+// Term holds information about a structural type restriction.
+type Term struct {
+	tilde bool
+	typ   types.Type
+}
+
+func (m *Term) Tilde() bool      { return m.tilde }
+func (m *Term) Type() types.Type { return m.typ }
+func (m *Term) String() string {
+	pre := ""
+	if m.tilde {
+		pre = "~"
+	}
+	return pre + m.typ.String()
+}
+
+// NewTerm is unsupported at this Go version, and panics.
+func NewTerm(tilde bool, typ types.Type) *Term {
+	return &Term{tilde, typ}
+}
+
+// Union is a placeholder type, as type parameters are not supported at this Go
+// version. Its methods panic on use.
+type Union struct{ types.Type }
+
+func (*Union) Len() int         { return 0 }
+func (*Union) Term(i int) *Term { unsupported(); return nil }
+
+// NewUnion is unsupported at this Go version, and panics.
+func NewUnion(terms []*Term) *Union {
+	unsupported()
+	return nil
+}
+
+// InitInstanceInfo is a noop at this Go version.
+func InitInstanceInfo(*types.Info) {}
+
+// Instance is a placeholder type, as type parameters are not supported at this
+// Go version.
+type Instance struct {
+	TypeArgs *TypeList
+	Type     types.Type
+}
+
+// GetInstances returns a nil map, as type parameters are not supported at this
+// Go version.
+func GetInstances(info *types.Info) map[*ast.Ident]Instance { return nil }
+
+// Context is a placeholder type, as type parameters are not supported at
+// this Go version.
+type Context struct{}
+
+// NewContext returns a placeholder Context instance.
+func NewContext() *Context {
+	return &Context{}
+}
+
+// Instantiate is unsupported on this Go version, and panics.
+func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) {
+	unsupported()
+	return nil, nil
+}
diff --git a/internal/typeparams/typeparams_go118.go b/internal/typeparams/typeparams_go118.go
new file mode 100644
index 0000000..114a36b
--- /dev/null
+++ b/internal/typeparams/typeparams_go118.go
@@ -0,0 +1,151 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package typeparams
+
+import (
+	"go/ast"
+	"go/types"
+)
+
+// IndexListExpr is an alias for ast.IndexListExpr.
+type IndexListExpr = ast.IndexListExpr
+
+// ForTypeSpec returns n.TypeParams.
+func ForTypeSpec(n *ast.TypeSpec) *ast.FieldList {
+	if n == nil {
+		return nil
+	}
+	return n.TypeParams
+}
+
+// ForFuncType returns n.TypeParams.
+func ForFuncType(n *ast.FuncType) *ast.FieldList {
+	if n == nil {
+		return nil
+	}
+	return n.TypeParams
+}
+
+// TypeParam is an alias for types.TypeParam
+type TypeParam = types.TypeParam
+
+// TypeParamList is an alias for types.TypeParamList
+type TypeParamList = types.TypeParamList
+
+// TypeList is an alias for types.TypeList
+type TypeList = types.TypeList
+
+// NewTypeParam calls types.NewTypeParam.
+func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam {
+	return types.NewTypeParam(name, constraint)
+}
+
+// SetTypeParamConstraint calls tparam.SetConstraint(constraint).
+func SetTypeParamConstraint(tparam *TypeParam, constraint types.Type) {
+	tparam.SetConstraint(constraint)
+}
+
+// NewSignatureType calls types.NewSignatureType.
+func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature {
+	return types.NewSignatureType(recv, recvTypeParams, typeParams, params, results, variadic)
+}
+
+// ForSignature returns sig.TypeParams()
+func ForSignature(sig *types.Signature) *TypeParamList {
+	return sig.TypeParams()
+}
+
+// RecvTypeParams returns sig.RecvTypeParams().
+func RecvTypeParams(sig *types.Signature) *TypeParamList {
+	return sig.RecvTypeParams()
+}
+
+// IsComparable calls iface.IsComparable().
+func IsComparable(iface *types.Interface) bool {
+	return iface.IsComparable()
+}
+
+// IsMethodSet calls iface.IsMethodSet().
+func IsMethodSet(iface *types.Interface) bool {
+	return iface.IsMethodSet()
+}
+
+// IsImplicit calls iface.IsImplicit().
+func IsImplicit(iface *types.Interface) bool {
+	return iface.IsImplicit()
+}
+
+// MarkImplicit calls iface.MarkImplicit().
+func MarkImplicit(iface *types.Interface) {
+	iface.MarkImplicit()
+}
+
+// ForNamed extracts the (possibly empty) type parameter object list from
+// named.
+func ForNamed(named *types.Named) *TypeParamList {
+	return named.TypeParams()
+}
+
+// SetForNamed sets the type params tparams on n. Each tparam must be of
+// dynamic type *types.TypeParam.
+func SetForNamed(n *types.Named, tparams []*TypeParam) {
+	n.SetTypeParams(tparams)
+}
+
+// NamedTypeArgs returns named.TypeArgs().
+func NamedTypeArgs(named *types.Named) *TypeList {
+	return named.TypeArgs()
+}
+
+// NamedTypeOrigin returns named.Orig().
+func NamedTypeOrigin(named *types.Named) types.Type {
+	return named.Origin()
+}
+
+// Term is an alias for types.Term.
+type Term = types.Term
+
+// NewTerm calls types.NewTerm.
+func NewTerm(tilde bool, typ types.Type) *Term {
+	return types.NewTerm(tilde, typ)
+}
+
+// Union is an alias for types.Union
+type Union = types.Union
+
+// NewUnion calls types.NewUnion.
+func NewUnion(terms []*Term) *Union {
+	return types.NewUnion(terms)
+}
+
+// InitInstanceInfo initializes info to record information about type and
+// function instances.
+func InitInstanceInfo(info *types.Info) {
+	info.Instances = make(map[*ast.Ident]types.Instance)
+}
+
+// Instance is an alias for types.Instance.
+type Instance = types.Instance
+
+// GetInstances returns info.Instances.
+func GetInstances(info *types.Info) map[*ast.Ident]Instance {
+	return info.Instances
+}
+
+// Context is an alias for types.Context.
+type Context = types.Context
+
+// NewContext calls types.NewContext.
+func NewContext() *Context {
+	return types.NewContext()
+}
+
+// Instantiate calls types.Instantiate.
+func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) {
+	return types.Instantiate(ctxt, typ, targs, validate)
+}
diff --git a/internal/typeparams/typeparams_test.go b/internal/typeparams/typeparams_test.go
new file mode 100644
index 0000000..cdbcae9
--- /dev/null
+++ b/internal/typeparams/typeparams_test.go
@@ -0,0 +1,68 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package typeparams_test
+
+import (
+	"go/ast"
+	"go/importer"
+	"go/parser"
+	"go/token"
+	"go/types"
+	"strings"
+	"testing"
+
+	"golang.org/x/tools/internal/apidiff"
+	"golang.org/x/tools/internal/testenv"
+)
+
+func TestAPIConsistency(t *testing.T) {
+	testenv.NeedsGoBuild(t) // This is a lie. We actually need the source code.
+
+	// The packages below exclude enabled_*.go, as typeparams.Enabled is
+	// permitted to change between versions.
+	old := typeCheck(t, []string{"common.go", "typeparams_go117.go"})
+	new := typeCheck(t, []string{"common.go", "typeparams_go118.go"})
+
+	report := apidiff.Changes(old, new)
+
+	// Temporarily ignore API diff related to Environment, so that we can use a
+	// transient alias in go/types to allow renaming this type without ever
+	// breaking the x/tools builder.
+	// TODO(rfindley): remove this
+	var filteredChanges []apidiff.Change
+	for _, change := range report.Changes {
+		if strings.Contains(change.Message, "Environment") {
+			continue
+		}
+		filteredChanges = append(filteredChanges, change)
+	}
+	report.Changes = filteredChanges
+	if len(report.Changes) > 0 {
+		t.Errorf("API diff:\n%s", report)
+	}
+}
+
+func typeCheck(t *testing.T, filenames []string) *types.Package {
+	fset := token.NewFileSet()
+	var files []*ast.File
+	for _, name := range filenames {
+		f, err := parser.ParseFile(fset, name, nil, 0)
+		if err != nil {
+			t.Fatal(err)
+		}
+		files = append(files, f)
+	}
+	conf := types.Config{
+		Importer: importer.Default(),
+	}
+	pkg, err := conf.Check("", fset, files, nil)
+	if err != nil {
+		t.Fatal(err)
+	}
+	return pkg
+}
diff --git a/internal/typeparams/typeterm.go b/internal/typeparams/typeterm.go
new file mode 100644
index 0000000..7ddee28
--- /dev/null
+++ b/internal/typeparams/typeterm.go
@@ -0,0 +1,170 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Code generated by copytermlist.go DO NOT EDIT.
+
+package typeparams
+
+import "go/types"
+
+// A term describes elementary type sets:
+//
+//   ∅:  (*term)(nil)     == ∅                      // set of no types (empty set)
+//   𝓀:  &term{}          == 𝓀                      // set of all types (𝓀niverse)
+//   T:  &term{false, T}  == {T}                    // set of type T
+//  ~t:  &term{true, t}   == {t' | under(t') == t}  // set of types with underlying type t
+//
+type term struct {
+	tilde bool // valid if typ != nil
+	typ   types.Type
+}
+
+func (x *term) String() string {
+	switch {
+	case x == nil:
+		return "∅"
+	case x.typ == nil:
+		return "𝓀"
+	case x.tilde:
+		return "~" + x.typ.String()
+	default:
+		return x.typ.String()
+	}
+}
+
+// equal reports whether x and y represent the same type set.
+func (x *term) equal(y *term) bool {
+	// easy cases
+	switch {
+	case x == nil || y == nil:
+		return x == y
+	case x.typ == nil || y.typ == nil:
+		return x.typ == y.typ
+	}
+	// ∅ ⊂ x, y ⊂ 𝓀
+
+	return x.tilde == y.tilde && types.Identical(x.typ, y.typ)
+}
+
+// union returns the union x ∪ y: zero, one, or two non-nil terms.
+func (x *term) union(y *term) (_, _ *term) {
+	// easy cases
+	switch {
+	case x == nil && y == nil:
+		return nil, nil // ∅ ∪ ∅ == ∅
+	case x == nil:
+		return y, nil // ∅ ∪ y == y
+	case y == nil:
+		return x, nil // x ∪ ∅ == x
+	case x.typ == nil:
+		return x, nil // 𝓀 ∪ y == 𝓀
+	case y.typ == nil:
+		return y, nil // x ∪ 𝓀 == 𝓀
+	}
+	// ∅ ⊂ x, y ⊂ 𝓀
+
+	if x.disjoint(y) {
+		return x, y // x ∪ y == (x, y) if x ∩ y == ∅
+	}
+	// x.typ == y.typ
+
+	// ~t ∪ ~t == ~t
+	// ~t ∪  T == ~t
+	//  T ∪ ~t == ~t
+	//  T ∪  T ==  T
+	if x.tilde || !y.tilde {
+		return x, nil
+	}
+	return y, nil
+}
+
+// intersect returns the intersection x ∩ y.
+func (x *term) intersect(y *term) *term {
+	// easy cases
+	switch {
+	case x == nil || y == nil:
+		return nil // ∅ ∩ y == ∅ and ∩ ∅ == ∅
+	case x.typ == nil:
+		return y // 𝓀 ∩ y == y
+	case y.typ == nil:
+		return x // x ∩ 𝓀 == x
+	}
+	// ∅ ⊂ x, y ⊂ 𝓀
+
+	if x.disjoint(y) {
+		return nil // x ∩ y == ∅ if x ∩ y == ∅
+	}
+	// x.typ == y.typ
+
+	// ~t ∩ ~t == ~t
+	// ~t ∩  T ==  T
+	//  T ∩ ~t ==  T
+	//  T ∩  T ==  T
+	if !x.tilde || y.tilde {
+		return x
+	}
+	return y
+}
+
+// includes reports whether t ∈ x.
+func (x *term) includes(t types.Type) bool {
+	// easy cases
+	switch {
+	case x == nil:
+		return false // t ∈ ∅ == false
+	case x.typ == nil:
+		return true // t ∈ 𝓀 == true
+	}
+	// ∅ ⊂ x ⊂ 𝓀
+
+	u := t
+	if x.tilde {
+		u = under(u)
+	}
+	return types.Identical(x.typ, u)
+}
+
+// subsetOf reports whether x ⊆ y.
+func (x *term) subsetOf(y *term) bool {
+	// easy cases
+	switch {
+	case x == nil:
+		return true // ∅ ⊆ y == true
+	case y == nil:
+		return false // x ⊆ ∅ == false since x != ∅
+	case y.typ == nil:
+		return true // x ⊆ 𝓀 == true
+	case x.typ == nil:
+		return false // 𝓀 ⊆ y == false since y != 𝓀
+	}
+	// ∅ ⊂ x, y ⊂ 𝓀
+
+	if x.disjoint(y) {
+		return false // x ⊆ y == false if x ∩ y == ∅
+	}
+	// x.typ == y.typ
+
+	// ~t ⊆ ~t == true
+	// ~t ⊆ T == false
+	//  T ⊆ ~t == true
+	//  T ⊆  T == true
+	return !x.tilde || y.tilde
+}
+
+// disjoint reports whether x ∩ y == ∅.
+// x.typ and y.typ must not be nil.
+func (x *term) disjoint(y *term) bool {
+	if debug && (x.typ == nil || y.typ == nil) {
+		panic("invalid argument(s)")
+	}
+	ux := x.typ
+	if y.tilde {
+		ux = under(ux)
+	}
+	uy := y.typ
+	if x.tilde {
+		uy = under(uy)
+	}
+	return !types.Identical(ux, uy)
+}
diff --git a/internal/typesinternal/errorcode.go b/internal/typesinternal/errorcode.go
index fa2834e..d38ee3c 100644
--- a/internal/typesinternal/errorcode.go
+++ b/internal/typesinternal/errorcode.go
@@ -1365,4 +1365,162 @@
 	//  	return i
 	//  }
 	InvalidGo
+
+	// All codes below were added in Go 1.17.
+
+	/* decl */
+
+	// BadDecl occurs when a declaration has invalid syntax.
+	BadDecl
+
+	// RepeatedDecl occurs when an identifier occurs more than once on the left
+	// hand side of a short variable declaration.
+	//
+	// Example:
+	//  func _() {
+	//  	x, y, y := 1, 2, 3
+	//  }
+	RepeatedDecl
+
+	/* unsafe */
+
+	// InvalidUnsafeAdd occurs when unsafe.Add is called with a
+	// length argument that is not of integer type.
+	//
+	// Example:
+	//  import "unsafe"
+	//
+	//  var p unsafe.Pointer
+	//  var _ = unsafe.Add(p, float64(1))
+	InvalidUnsafeAdd
+
+	// InvalidUnsafeSlice occurs when unsafe.Slice is called with a
+	// pointer argument that is not of pointer type or a length argument
+	// that is not of integer type, negative, or out of bounds.
+	//
+	// Example:
+	//  import "unsafe"
+	//
+	//  var x int
+	//  var _ = unsafe.Slice(x, 1)
+	//
+	// Example:
+	//  import "unsafe"
+	//
+	//  var x int
+	//  var _ = unsafe.Slice(&x, float64(1))
+	//
+	// Example:
+	//  import "unsafe"
+	//
+	//  var x int
+	//  var _ = unsafe.Slice(&x, -1)
+	//
+	// Example:
+	//  import "unsafe"
+	//
+	//  var x int
+	//  var _ = unsafe.Slice(&x, uint64(1) << 63)
+	InvalidUnsafeSlice
+
+	// All codes below were added in Go 1.18.
+
+	/* features */
+
+	// UnsupportedFeature occurs when a language feature is used that is not
+	// supported at this Go version.
+	UnsupportedFeature
+
+	/* type params */
+
+	// NotAGenericType occurs when a non-generic type is used where a generic
+	// type is expected: in type or function instantiation.
+	//
+	// Example:
+	//  type T int
+	//
+	//  var _ T[int]
+	NotAGenericType
+
+	// WrongTypeArgCount occurs when a type or function is instantiated with an
+	// incorrent number of type arguments, including when a generic type or
+	// function is used without instantiation.
+	//
+	// Errors inolving failed type inference are assigned other error codes.
+	//
+	// Example:
+	//  type T[p any] int
+	//
+	//  var _ T[int, string]
+	//
+	// Example:
+	//  func f[T any]() {}
+	//
+	//  var x = f
+	WrongTypeArgCount
+
+	// CannotInferTypeArgs occurs when type or function type argument inference
+	// fails to infer all type arguments.
+	//
+	// Example:
+	//  func f[T any]() {}
+	//
+	//  func _() {
+	//  	f()
+	//  }
+	//
+	// Example:
+	//   type N[P, Q any] struct{}
+	//
+	//   var _ N[int]
+	CannotInferTypeArgs
+
+	// InvalidTypeArg occurs when a type argument does not satisfy its
+	// corresponding type parameter constraints.
+	//
+	// Example:
+	//  type T[P ~int] struct{}
+	//
+	//  var _ T[string]
+	InvalidTypeArg // arguments? InferenceFailed
+
+	// InvalidInstanceCycle occurs when an invalid cycle is detected
+	// within the instantiation graph.
+	//
+	// Example:
+	//  func f[T any]() { f[*T]() }
+	InvalidInstanceCycle
+
+	// InvalidUnion occurs when an embedded union or approximation element is
+	// not valid.
+	//
+	// Example:
+	//  type _ interface {
+	//   	~int | interface{ m() }
+	//  }
+	InvalidUnion
+
+	// MisplacedConstraintIface occurs when a constraint-type interface is used
+	// outside of constraint position.
+	//
+	// Example:
+	//   type I interface { ~int }
+	//
+	//   var _ I
+	MisplacedConstraintIface
+
+	// InvalidMethodTypeParams occurs when methods have type parameters.
+	//
+	// It cannot be encountered with an AST parsed using go/parser.
+	InvalidMethodTypeParams
+
+	// MisplacedTypeParam occurs when a type parameter is used in a place where
+	// it is not permitted.
+	//
+	// Example:
+	//  type T[P any] P
+	//
+	// Example:
+	//  type T[P any] struct{ *P }
+	MisplacedTypeParam
 )
diff --git a/internal/typesinternal/errorcode_string.go b/internal/typesinternal/errorcode_string.go
index 3e5842a..de90e95 100644
--- a/internal/typesinternal/errorcode_string.go
+++ b/internal/typesinternal/errorcode_string.go
@@ -138,11 +138,25 @@
 	_ = x[UnusedResults-128]
 	_ = x[InvalidDefer-129]
 	_ = x[InvalidGo-130]
+	_ = x[BadDecl-131]
+	_ = x[RepeatedDecl-132]
+	_ = x[InvalidUnsafeAdd-133]
+	_ = x[InvalidUnsafeSlice-134]
+	_ = x[UnsupportedFeature-135]
+	_ = x[NotAGenericType-136]
+	_ = x[WrongTypeArgCount-137]
+	_ = x[CannotInferTypeArgs-138]
+	_ = x[InvalidTypeArg-139]
+	_ = x[InvalidInstanceCycle-140]
+	_ = x[InvalidUnion-141]
+	_ = x[MisplacedConstraintIface-142]
+	_ = x[InvalidMethodTypeParams-143]
+	_ = x[MisplacedTypeParam-144]
 }
 
-const _ErrorCode_name = "TestBlankPkgNameMismatchedPkgNameInvalidPkgUseBadImportPathBrokenImportImportCRenamedUnusedImportInvalidInitCycleDuplicateDeclInvalidDeclCycleInvalidTypeCycleInvalidConstInitInvalidConstValInvalidConstTypeUntypedNilWrongAssignCountUnassignableOperandNoNewVarMultiValAssignOpInvalidIfaceAssignInvalidChanAssignIncompatibleAssignUnaddressableFieldAssignNotATypeInvalidArrayLenBlankIfaceMethodIncomparableMapKeyInvalidIfaceEmbedInvalidPtrEmbedBadRecvInvalidRecvDuplicateFieldAndMethodDuplicateMethodInvalidBlankInvalidIotaMissingInitBodyInvalidInitSigInvalidInitDeclInvalidMainDeclTooManyValuesNotAnExprTruncatedFloatNumericOverflowUndefinedOpMismatchedTypesDivByZeroNonNumericIncDecUnaddressableOperandInvalidIndirectionNonIndexableOperandInvalidIndexSwappedSliceIndicesNonSliceableOperandInvalidSliceExprInvalidShiftCountInvalidShiftOperandInvalidReceiveInvalidSendDuplicateLitKeyMissingLitKeyInvalidLitIndexOversizeArrayLitMixedStructLitInvalidStructLitMissingLitFieldDuplicateLitFieldUnexportedLitFieldInvalidLitFieldUntypedLitInvalidLitAmbiguousSelectorUndeclaredImportedNameUnexportedNameUndeclaredNameMissingFieldOrMethodBadDotDotDotSyntaxNonVariadicDotDotDotMisplacedDotDotDotInvalidDotDotDotOperandInvalidDotDotDotUncalledBuiltinInvalidAppendInvalidCapInvalidCloseInvalidCopyInvalidComplexInvalidDeleteInvalidImagInvalidLenSwappedMakeArgsInvalidMakeInvalidRealInvalidAssertImpossibleAssertInvalidConversionInvalidUntypedConversionBadOffsetofSyntaxInvalidOffsetofUnusedExprUnusedVarMissingReturnWrongResultCountOutOfScopeResultInvalidCondInvalidPostDeclInvalidChanRangeInvalidIterVarInvalidRangeExprMisplacedBreakMisplacedContinueMisplacedFallthroughDuplicateCaseDuplicateDefaultBadTypeKeywordInvalidTypeSwitchInvalidExprSwitchInvalidSelectCaseUndeclaredLabelDuplicateLabelMisplacedLabelUnusedLabelJumpOverDeclJumpIntoBlockInvalidMethodExprWrongArgCountInvalidCallUnusedResultsInvalidDeferInvalidGo"
+const _ErrorCode_name = "TestBlankPkgNameMismatchedPkgNameInvalidPkgUseBadImportPathBrokenImportImportCRenamedUnusedImportInvalidInitCycleDuplicateDeclInvalidDeclCycleInvalidTypeCycleInvalidConstInitInvalidConstValInvalidConstTypeUntypedNilWrongAssignCountUnassignableOperandNoNewVarMultiValAssignOpInvalidIfaceAssignInvalidChanAssignIncompatibleAssignUnaddressableFieldAssignNotATypeInvalidArrayLenBlankIfaceMethodIncomparableMapKeyInvalidIfaceEmbedInvalidPtrEmbedBadRecvInvalidRecvDuplicateFieldAndMethodDuplicateMethodInvalidBlankInvalidIotaMissingInitBodyInvalidInitSigInvalidInitDeclInvalidMainDeclTooManyValuesNotAnExprTruncatedFloatNumericOverflowUndefinedOpMismatchedTypesDivByZeroNonNumericIncDecUnaddressableOperandInvalidIndirectionNonIndexableOperandInvalidIndexSwappedSliceIndicesNonSliceableOperandInvalidSliceExprInvalidShiftCountInvalidShiftOperandInvalidReceiveInvalidSendDuplicateLitKeyMissingLitKeyInvalidLitIndexOversizeArrayLitMixedStructLitInvalidStructLitMissingLitFieldDuplicateLitFieldUnexportedLitFieldInvalidLitFieldUntypedLitInvalidLitAmbiguousSelectorUndeclaredImportedNameUnexportedNameUndeclaredNameMissingFieldOrMethodBadDotDotDotSyntaxNonVariadicDotDotDotMisplacedDotDotDotInvalidDotDotDotOperandInvalidDotDotDotUncalledBuiltinInvalidAppendInvalidCapInvalidCloseInvalidCopyInvalidComplexInvalidDeleteInvalidImagInvalidLenSwappedMakeArgsInvalidMakeInvalidRealInvalidAssertImpossibleAssertInvalidConversionInvalidUntypedConversionBadOffsetofSyntaxInvalidOffsetofUnusedExprUnusedVarMissingReturnWrongResultCountOutOfScopeResultInvalidCondInvalidPostDeclInvalidChanRangeInvalidIterVarInvalidRangeExprMisplacedBreakMisplacedContinueMisplacedFallthroughDuplicateCaseDuplicateDefaultBadTypeKeywordInvalidTypeSwitchInvalidExprSwitchInvalidSelectCaseUndeclaredLabelDuplicateLabelMisplacedLabelUnusedLabelJumpOverDeclJumpIntoBlockInvalidMethodExprWrongArgCountInvalidCallUnusedResultsInvalidDeferInvalidGoBadDeclRepeatedDeclInvalidUnsafeAddInvalidUnsafeSliceUnsupportedFeatureNotAGenericTypeWrongTypeArgCountCannotInferTypeArgsInvalidTypeArgInvalidInstanceCycleInvalidUnionMisplacedConstraintIfaceInvalidMethodTypeParamsMisplacedTypeParam"
 
-var _ErrorCode_index = [...]uint16{0, 4, 16, 33, 46, 59, 71, 85, 97, 113, 126, 142, 158, 174, 189, 205, 215, 231, 250, 258, 274, 292, 309, 327, 351, 359, 374, 390, 408, 425, 440, 447, 458, 481, 496, 508, 519, 534, 548, 563, 578, 591, 600, 614, 629, 640, 655, 664, 680, 700, 718, 737, 749, 768, 787, 803, 820, 839, 853, 864, 879, 892, 907, 923, 937, 953, 968, 985, 1003, 1018, 1028, 1038, 1055, 1077, 1091, 1105, 1125, 1143, 1163, 1181, 1204, 1220, 1235, 1248, 1258, 1270, 1281, 1295, 1308, 1319, 1329, 1344, 1355, 1366, 1379, 1395, 1412, 1436, 1453, 1468, 1478, 1487, 1500, 1516, 1532, 1543, 1558, 1574, 1588, 1604, 1618, 1635, 1655, 1668, 1684, 1698, 1715, 1732, 1749, 1764, 1778, 1792, 1803, 1815, 1828, 1845, 1858, 1869, 1882, 1894, 1903}
+var _ErrorCode_index = [...]uint16{0, 4, 16, 33, 46, 59, 71, 85, 97, 113, 126, 142, 158, 174, 189, 205, 215, 231, 250, 258, 274, 292, 309, 327, 351, 359, 374, 390, 408, 425, 440, 447, 458, 481, 496, 508, 519, 534, 548, 563, 578, 591, 600, 614, 629, 640, 655, 664, 680, 700, 718, 737, 749, 768, 787, 803, 820, 839, 853, 864, 879, 892, 907, 923, 937, 953, 968, 985, 1003, 1018, 1028, 1038, 1055, 1077, 1091, 1105, 1125, 1143, 1163, 1181, 1204, 1220, 1235, 1248, 1258, 1270, 1281, 1295, 1308, 1319, 1329, 1344, 1355, 1366, 1379, 1395, 1412, 1436, 1453, 1468, 1478, 1487, 1500, 1516, 1532, 1543, 1558, 1574, 1588, 1604, 1618, 1635, 1655, 1668, 1684, 1698, 1715, 1732, 1749, 1764, 1778, 1792, 1803, 1815, 1828, 1845, 1858, 1869, 1882, 1894, 1903, 1910, 1922, 1938, 1956, 1974, 1989, 2006, 2025, 2039, 2059, 2071, 2095, 2118, 2136}
 
 func (i ErrorCode) String() string {
 	i -= 1
diff --git a/internal/typesinternal/types.go b/internal/typesinternal/types.go
index c3e1a39..ce7d435 100644
--- a/internal/typesinternal/types.go
+++ b/internal/typesinternal/types.go
@@ -30,10 +30,15 @@
 	return true
 }
 
-func ReadGo116ErrorData(terr types.Error) (ErrorCode, token.Pos, token.Pos, bool) {
+// ReadGo116ErrorData extracts additional information from types.Error values
+// generated by Go version 1.16 and later: the error code, start position, and
+// end position. If all positions are valid, start <= err.Pos <= end.
+//
+// If the data could not be read, the final result parameter will be false.
+func ReadGo116ErrorData(err types.Error) (code ErrorCode, start, end token.Pos, ok bool) {
 	var data [3]int
 	// By coincidence all of these fields are ints, which simplifies things.
-	v := reflect.ValueOf(terr)
+	v := reflect.ValueOf(err)
 	for i, name := range []string{"go116code", "go116start", "go116end"} {
 		f := v.FieldByName(name)
 		if !f.IsValid() {
@@ -43,3 +48,5 @@
 	}
 	return ErrorCode(data[0]), token.Pos(data[1]), token.Pos(data[2]), true
 }
+
+var SetGoVersion = func(conf *types.Config, version string) bool { return false }
diff --git a/internal/typesinternal/types_118.go b/internal/typesinternal/types_118.go
new file mode 100644
index 0000000..a42b072
--- /dev/null
+++ b/internal/typesinternal/types_118.go
@@ -0,0 +1,19 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package typesinternal
+
+import (
+	"go/types"
+)
+
+func init() {
+	SetGoVersion = func(conf *types.Config, version string) bool {
+		conf.GoVersion = version
+		return true
+	}
+}
diff --git a/refactor/eg/eg_test.go b/refactor/eg/eg_test.go
index a788361..438e6b7 100644
--- a/refactor/eg/eg_test.go
+++ b/refactor/eg/eg_test.go
@@ -115,7 +115,7 @@
 			if err != nil {
 				if shouldFail == nil {
 					t.Errorf("NewTransformer(%s): %s", filename, err)
-				} else if want := constant.StringVal(shouldFail.Val()); !strings.Contains(err.Error(), want) {
+				} else if want := constant.StringVal(shouldFail.Val()); !strings.Contains(normalizeAny(err.Error()), want) {
 					t.Errorf("NewTransformer(%s): got error %q, want error %q", filename, err, want)
 				}
 			} else if shouldFail != nil {
@@ -173,3 +173,9 @@
 		}
 	}
 }
+
+// normalizeAny replaces occurrences of interface{} with any, for consistent
+// output.
+func normalizeAny(s string) string {
+	return strings.ReplaceAll(s, "interface{}", "any")
+}
diff --git a/refactor/eg/testdata/expr_type_mismatch.template b/refactor/eg/testdata/expr_type_mismatch.template
index 2c5c3f0..00e00b1 100644
--- a/refactor/eg/testdata/expr_type_mismatch.template
+++ b/refactor/eg/testdata/expr_type_mismatch.template
@@ -9,7 +9,7 @@
 // rules this replacement should be ok, but types.Assignable doesn't work
 // in the expected way (elementwise assignability) for tuples.
 // Perhaps that's even a type-checker bug?
-const shouldFail = "(n int, err error) is not a safe replacement for (key interface{}, err error)"
+const shouldFail = "(n int, err error) is not a safe replacement for (key any, err error)"
 
 func before() (interface{}, error) { return x509.ParsePKCS8PrivateKey(nil) }
 func after() (interface{}, error)  { return fmt.Print() }
diff --git a/refactor/importgraph/graph_test.go b/refactor/importgraph/graph_test.go
index 2ab54e2..7526383 100644
--- a/refactor/importgraph/graph_test.go
+++ b/refactor/importgraph/graph_test.go
@@ -10,7 +10,9 @@
 package importgraph_test
 
 import (
+	"fmt"
 	"go/build"
+	"os"
 	"sort"
 	"strings"
 	"testing"
@@ -30,10 +32,40 @@
 
 	var gopath string
 	for _, env := range exported.Config.Env {
-		if !strings.HasPrefix(env, "GOPATH=") {
+		eq := strings.Index(env, "=")
+		if eq == 0 {
+			// We sometimes see keys with a single leading "=" in the environment on Windows.
+			// TODO(#49886): What is the correct way to parse them in general?
+			eq = strings.Index(env[1:], "=") + 1
+		}
+		if eq < 0 {
+			t.Fatalf("invalid variable in exported.Config.Env: %q", env)
+		}
+		k := env[:eq]
+		v := env[eq+1:]
+		if k == "GOPATH" {
+			gopath = v
+		}
+
+		if os.Getenv(k) == v {
 			continue
 		}
-		gopath = strings.TrimPrefix(env, "GOPATH=")
+		defer func(prev string, prevOK bool) {
+			if !prevOK {
+				if err := os.Unsetenv(k); err != nil {
+					t.Fatal(err)
+				}
+			} else {
+				if err := os.Setenv(k, prev); err != nil {
+					t.Fatal(err)
+				}
+			}
+		}(os.LookupEnv(k))
+
+		if err := os.Setenv(k, v); err != nil {
+			t.Fatal(err)
+		}
+		t.Logf("%s=%s", k, v)
 	}
 	if gopath == "" {
 		t.Fatal("Failed to fish GOPATH out of env: ", exported.Config.Env)
@@ -41,45 +73,97 @@
 
 	var buildContext = build.Default
 	buildContext.GOPATH = gopath
+	buildContext.Dir = exported.Config.Dir
 
-	forward, reverse, errors := importgraph.Build(&buildContext)
+	forward, reverse, errs := importgraph.Build(&buildContext)
+	for path, err := range errs {
+		t.Errorf("%s: %s", path, err)
+	}
+	if t.Failed() {
+		return
+	}
+
+	// Log the complete graph before the errors, so that the errors are near the
+	// end of the log (where we expect them to be).
+	nodePrinted := map[string]bool{}
+	printNode := func(direction string, from string) {
+		key := fmt.Sprintf("%s[%q]", direction, from)
+		if nodePrinted[key] {
+			return
+		}
+		nodePrinted[key] = true
+
+		var g importgraph.Graph
+		switch direction {
+		case "forward":
+			g = forward
+		case "reverse":
+			g = reverse
+		default:
+			t.Helper()
+			t.Fatalf("bad direction: %q", direction)
+		}
+
+		t.Log(key)
+		var pkgs []string
+		for pkg := range g[from] {
+			pkgs = append(pkgs, pkg)
+		}
+		sort.Strings(pkgs)
+		for _, pkg := range pkgs {
+			t.Logf("\t%s", pkg)
+		}
+	}
+
+	if testing.Verbose() {
+		printNode("forward", this)
+		printNode("reverse", this)
+	}
 
 	// Test direct edges.
 	// We throw in crypto/hmac to prove that external test files
 	// (such as this one) are inspected.
 	for _, p := range []string{"go/build", "testing", "crypto/hmac"} {
 		if !forward[this][p] {
-			t.Errorf("forward[importgraph][%s] not found", p)
+			printNode("forward", this)
+			t.Errorf("forward[%q][%q] not found", this, p)
 		}
 		if !reverse[p][this] {
-			t.Errorf("reverse[%s][importgraph] not found", p)
+			printNode("reverse", p)
+			t.Errorf("reverse[%q][%q] not found", p, this)
 		}
 	}
 
 	// Test non-existent direct edges
 	for _, p := range []string{"errors", "reflect"} {
 		if forward[this][p] {
-			t.Errorf("unexpected: forward[importgraph][%s] found", p)
+			printNode("forward", this)
+			t.Errorf("unexpected: forward[%q][%q] found", this, p)
 		}
 		if reverse[p][this] {
-			t.Errorf("unexpected: reverse[%s][importgraph] found", p)
+			printNode("reverse", p)
+			t.Errorf("unexpected: reverse[%q][%q] found", p, this)
 		}
 	}
 
 	// Test Search is reflexive.
 	if !forward.Search(this)[this] {
+		printNode("forward", this)
 		t.Errorf("irreflexive: forward.Search(importgraph)[importgraph] not found")
 	}
 	if !reverse.Search(this)[this] {
+		printNode("reverse", this)
 		t.Errorf("irrefexive: reverse.Search(importgraph)[importgraph] not found")
 	}
 
 	// Test Search is transitive.  (There is no direct edge to these packages.)
 	for _, p := range []string{"errors", "reflect", "unsafe"} {
 		if !forward.Search(this)[p] {
+			printNode("forward", this)
 			t.Errorf("intransitive: forward.Search(importgraph)[%s] not found", p)
 		}
 		if !reverse.Search(p)[this] {
+			printNode("reverse", p)
 			t.Errorf("intransitive: reverse.Search(%s)[importgraph] not found", p)
 		}
 	}
@@ -95,26 +179,10 @@
 		!forward.Search("io")["fmt"] ||
 		!reverse.Search("fmt")["io"] ||
 		!reverse.Search("io")["fmt"] {
+		printNode("forward", "fmt")
+		printNode("forward", "io")
+		printNode("reverse", "fmt")
+		printNode("reverse", "io")
 		t.Errorf("fmt and io are not mutually reachable despite being in the same SCC")
 	}
-
-	// debugging
-	if false {
-		for path, err := range errors {
-			t.Logf("%s: %s", path, err)
-		}
-		printSorted := func(direction string, g importgraph.Graph, start string) {
-			t.Log(direction)
-			var pkgs []string
-			for pkg := range g.Search(start) {
-				pkgs = append(pkgs, pkg)
-			}
-			sort.Strings(pkgs)
-			for _, pkg := range pkgs {
-				t.Logf("\t%s", pkg)
-			}
-		}
-		printSorted("forward", forward, this)
-		printSorted("reverse", reverse, this)
-	}
 }