Commits

Sebastien Binet  committed aa878b2

adjust layout

  • Participants
  • Parent commits 526f320
  • Tags go.go1

Comments (0)

Files changed (20)

File pkg/check.go

-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file implements the Check function, which typechecks a package.
-
-package types
-
-import (
-	"fmt"
-	"go/ast"
-	"go/scanner"
-	"go/token"
-	"strconv"
-)
-
-const debug = false
-
-type checker struct {
-	fset   *token.FileSet
-	errors scanner.ErrorList
-	types  map[ast.Expr]Type
-}
-
-func (c *checker) errorf(pos token.Pos, format string, args ...interface{}) string {
-	msg := fmt.Sprintf(format, args...)
-	c.errors.Add(c.fset.Position(pos), msg)
-	return msg
-}
-
-// collectFields collects struct fields tok = token.STRUCT), interface methods
-// (tok = token.INTERFACE), and function arguments/results (tok = token.FUNC).
-func (c *checker) collectFields(tok token.Token, list *ast.FieldList, cycleOk bool) (fields ObjList, tags []string, isVariadic bool) {
-	if list != nil {
-		for _, field := range list.List {
-			ftype := field.Type
-			if t, ok := ftype.(*ast.Ellipsis); ok {
-				ftype = t.Elt
-				isVariadic = true
-			}
-			typ := c.makeType(ftype, cycleOk)
-			tag := ""
-			if field.Tag != nil {
-				assert(field.Tag.Kind == token.STRING)
-				tag, _ = strconv.Unquote(field.Tag.Value)
-			}
-			if len(field.Names) > 0 {
-				// named fields
-				for _, name := range field.Names {
-					obj := name.Obj
-					obj.Type = typ
-					fields = append(fields, obj)
-					if tok == token.STRUCT {
-						tags = append(tags, tag)
-					}
-				}
-			} else {
-				// anonymous field
-				switch tok {
-				case token.STRUCT:
-					tags = append(tags, tag)
-					fallthrough
-				case token.FUNC:
-					obj := ast.NewObj(ast.Var, "")
-					obj.Type = typ
-					fields = append(fields, obj)
-				case token.INTERFACE:
-					utyp := Underlying(typ)
-					if typ, ok := utyp.(*Interface); ok {
-						// TODO(gri) This is not good enough. Check for double declarations!
-						fields = append(fields, typ.Methods...)
-					} else if _, ok := utyp.(*Bad); !ok {
-						// if utyp is Bad, don't complain (the root cause was reported before)
-						c.errorf(ftype.Pos(), "interface contains embedded non-interface type")
-					}
-				default:
-					panic("unreachable")
-				}
-			}
-		}
-	}
-	return
-}
-
-// makeType makes a new type for an AST type specification x or returns
-// the type referred to by a type name x. If cycleOk is set, a type may
-// refer to itself directly or indirectly; otherwise cycles are errors.
-//
-func (c *checker) makeType(x ast.Expr, cycleOk bool) (typ Type) {
-	if debug {
-		fmt.Printf("makeType (cycleOk = %v)\n", cycleOk)
-		ast.Print(c.fset, x)
-		defer func() {
-			fmt.Printf("-> %T %v\n\n", typ, typ)
-		}()
-	}
-
-	switch t := x.(type) {
-	case *ast.BadExpr:
-		return &Bad{}
-
-	case *ast.Ident:
-		// type name
-		obj := t.Obj
-		if obj == nil {
-			// unresolved identifier (error has been reported before)
-			return &Bad{Msg: "unresolved identifier"}
-		}
-		if obj.Kind != ast.Typ {
-			msg := c.errorf(t.Pos(), "%s is not a type", t.Name)
-			return &Bad{Msg: msg}
-		}
-		c.checkObj(obj, cycleOk)
-		if !cycleOk && obj.Type.(*Name).Underlying == nil {
-			// TODO(gri) Enable this message again once its position
-			// is independent of the underlying map implementation.
-			// msg := c.errorf(obj.Pos(), "illegal cycle in declaration of %s", obj.Name)
-			msg := "illegal cycle"
-			return &Bad{Msg: msg}
-		}
-		return obj.Type.(Type)
-
-	case *ast.ParenExpr:
-		return c.makeType(t.X, cycleOk)
-
-	case *ast.SelectorExpr:
-		// qualified identifier
-		// TODO (gri) eventually, this code belongs to expression
-		//            type checking - here for the time being
-		if ident, ok := t.X.(*ast.Ident); ok {
-			if obj := ident.Obj; obj != nil {
-				if obj.Kind != ast.Pkg {
-					msg := c.errorf(ident.Pos(), "%s is not a package", obj.Name)
-					return &Bad{Msg: msg}
-				}
-				// TODO(gri) we have a package name but don't
-				// have the mapping from package name to package
-				// scope anymore (created in ast.NewPackage).
-				return &Bad{} // for now
-			}
-		}
-		// TODO(gri) can this really happen (the parser should have excluded this)?
-		msg := c.errorf(t.Pos(), "expected qualified identifier")
-		return &Bad{Msg: msg}
-
-	case *ast.StarExpr:
-		return &Pointer{Base: c.makeType(t.X, true)}
-
-	case *ast.ArrayType:
-		if t.Len != nil {
-			// TODO(gri) compute length
-			return &Array{Elt: c.makeType(t.Elt, cycleOk)}
-		}
-		return &Slice{Elt: c.makeType(t.Elt, true)}
-
-	case *ast.StructType:
-		fields, tags, _ := c.collectFields(token.STRUCT, t.Fields, cycleOk)
-		return &Struct{Fields: fields, Tags: tags}
-
-	case *ast.FuncType:
-		params, _, _ := c.collectFields(token.FUNC, t.Params, true)
-		results, _, isVariadic := c.collectFields(token.FUNC, t.Results, true)
-		return &Func{Recv: nil, Params: params, Results: results, IsVariadic: isVariadic}
-
-	case *ast.InterfaceType:
-		methods, _, _ := c.collectFields(token.INTERFACE, t.Methods, cycleOk)
-		methods.Sort()
-		return &Interface{Methods: methods}
-
-	case *ast.MapType:
-		return &Map{Key: c.makeType(t.Key, true), Elt: c.makeType(t.Key, true)}
-
-	case *ast.ChanType:
-		return &Chan{Dir: t.Dir, Elt: c.makeType(t.Value, true)}
-	}
-
-	panic(fmt.Sprintf("unreachable (%T)", x))
-}
-
-// checkObj type checks an object.
-func (c *checker) checkObj(obj *ast.Object, ref bool) {
-	if obj.Type != nil {
-		// object has already been type checked
-		return
-	}
-
-	switch obj.Kind {
-	case ast.Bad:
-		// ignore
-
-	case ast.Con:
-		// TODO(gri) complete this
-
-	case ast.Typ:
-		typ := &Name{Obj: obj}
-		obj.Type = typ // "mark" object so recursion terminates
-		typ.Underlying = Underlying(c.makeType(obj.Decl.(*ast.TypeSpec).Type, ref))
-
-	case ast.Var:
-		// TODO(gri) complete this
-
-	case ast.Fun:
-		// TODO(gri) complete this
-
-	default:
-		panic("unreachable")
-	}
-}
-
-// Check typechecks a package.
-// It augments the AST by assigning types to all ast.Objects and returns a map
-// of types for all expression nodes in statements, and a scanner.ErrorList if
-// there are errors.
-//
-func Check(fset *token.FileSet, pkg *ast.Package) (types map[ast.Expr]Type, err error) {
-	var c checker
-	c.fset = fset
-	c.types = make(map[ast.Expr]Type)
-
-	for _, obj := range pkg.Scope.Objects {
-		c.checkObj(obj, false)
-	}
-
-	c.errors.RemoveMultiples()
-	return c.types, c.errors.Err()
-}

File pkg/check_test.go

-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file implements a typechecker test harness. The packages specified
-// in tests are typechecked. Error messages reported by the typechecker are
-// compared against the error messages expected in the test files.
-//
-// Expected errors are indicated in the test files by putting a comment
-// of the form /* ERROR "rx" */ immediately following an offending token.
-// The harness will verify that an error matching the regular expression
-// rx is reported at that source position. Consecutive comments may be
-// used to indicate multiple errors for the same token position.
-//
-// For instance, the following test file indicates that a "not declared"
-// error should be reported for the undeclared variable x:
-//
-//	package p
-//	func f() {
-//		_ = x /* ERROR "not declared" */ + 1
-//	}
-
-package types
-
-import (
-	"fmt"
-	"go/ast"
-	"go/parser"
-	"go/scanner"
-	"go/token"
-	"io/ioutil"
-	"os"
-	"regexp"
-	"testing"
-)
-
-// The test filenames do not end in .go so that they are invisible
-// to gofmt since they contain comments that must not change their
-// positions relative to surrounding tokens.
-
-var tests = []struct {
-	name  string
-	files []string
-}{
-	{"test0", []string{"testdata/test0.src"}},
-}
-
-var fset = token.NewFileSet()
-
-func getFile(filename string) (file *token.File) {
-	fset.Iterate(func(f *token.File) bool {
-		if f.Name() == filename {
-			file = f
-			return false // end iteration
-		}
-		return true
-	})
-	return file
-}
-
-func getPos(filename string, offset int) token.Pos {
-	if f := getFile(filename); f != nil {
-		return f.Pos(offset)
-	}
-	return token.NoPos
-}
-
-func parseFiles(t *testing.T, testname string, filenames []string) (map[string]*ast.File, error) {
-	files := make(map[string]*ast.File)
-	var errors scanner.ErrorList
-	for _, filename := range filenames {
-		if _, exists := files[filename]; exists {
-			t.Fatalf("%s: duplicate file %s", testname, filename)
-		}
-		file, err := parser.ParseFile(fset, filename, nil, parser.DeclarationErrors)
-		if file == nil {
-			t.Fatalf("%s: could not parse file %s", testname, filename)
-		}
-		files[filename] = file
-		if err != nil {
-			// if the parser returns a non-scanner.ErrorList error
-			// the file couldn't be read in the first place and
-			// file == nil; in that case we shouldn't reach here
-			errors = append(errors, err.(scanner.ErrorList)...)
-		}
-
-	}
-	return files, errors
-}
-
-// ERROR comments must be of the form /* ERROR "rx" */ and rx is
-// a regular expression that matches the expected error message.
-//
-var errRx = regexp.MustCompile(`^/\* *ERROR *"([^"]*)" *\*/$`)
-
-// expectedErrors collects the regular expressions of ERROR comments found
-// in files and returns them as a map of error positions to error messages.
-//
-func expectedErrors(t *testing.T, testname string, files map[string]*ast.File) map[token.Pos]string {
-	errors := make(map[token.Pos]string)
-	for filename := range files {
-		src, err := ioutil.ReadFile(filename)
-		if err != nil {
-			t.Fatalf("%s: could not read %s", testname, filename)
-		}
-
-		var s scanner.Scanner
-		// file was parsed already - do not add it again to the file
-		// set otherwise the position information returned here will
-		// not match the position information collected by the parser
-		s.Init(getFile(filename), src, nil, scanner.ScanComments)
-		var prev token.Pos // position of last non-comment, non-semicolon token
-
-	scanFile:
-		for {
-			pos, tok, lit := s.Scan()
-			switch tok {
-			case token.EOF:
-				break scanFile
-			case token.COMMENT:
-				s := errRx.FindStringSubmatch(lit)
-				if len(s) == 2 {
-					errors[prev] = string(s[1])
-				}
-			case token.SEMICOLON:
-				// ignore automatically inserted semicolon
-				if lit == "\n" {
-					break
-				}
-				fallthrough
-			default:
-				prev = pos
-			}
-		}
-	}
-	return errors
-}
-
-func eliminate(t *testing.T, expected map[token.Pos]string, errors error) {
-	if errors == nil {
-		return
-	}
-	for _, error := range errors.(scanner.ErrorList) {
-		// error.Pos is a token.Position, but we want
-		// a token.Pos so we can do a map lookup
-		pos := getPos(error.Pos.Filename, error.Pos.Offset)
-		if msg, found := expected[pos]; found {
-			// we expect a message at pos; check if it matches
-			rx, err := regexp.Compile(msg)
-			if err != nil {
-				t.Errorf("%s: %v", error.Pos, err)
-				continue
-			}
-			if match := rx.MatchString(error.Msg); !match {
-				t.Errorf("%s: %q does not match %q", error.Pos, error.Msg, msg)
-				continue
-			}
-			// we have a match - eliminate this error
-			delete(expected, pos)
-		} else {
-			// To keep in mind when analyzing failed test output:
-			// If the same error position occurs multiple times in errors,
-			// this message will be triggered (because the first error at
-			// the position removes this position from the expected errors).
-			t.Errorf("%s: no (multiple?) error expected, but found: %s", error.Pos, error.Msg)
-		}
-	}
-}
-
-func check(t *testing.T, testname string, testfiles []string) {
-	// TODO(gri) Eventually all these different phases should be
-	//           subsumed into a single function call that takes
-	//           a set of files and creates a fully resolved and
-	//           type-checked AST.
-
-	files, err := parseFiles(t, testname, testfiles)
-
-	// we are expecting the following errors
-	// (collect these after parsing the files so that
-	// they are found in the file set)
-	errors := expectedErrors(t, testname, files)
-
-	// verify errors returned by the parser
-	eliminate(t, errors, err)
-
-	// verify errors returned after resolving identifiers
-	pkg, err := ast.NewPackage(fset, files, GcImport, Universe)
-	eliminate(t, errors, err)
-
-	// verify errors returned by the typechecker
-	_, err = Check(fset, pkg)
-	eliminate(t, errors, err)
-
-	// there should be no expected errors left
-	if len(errors) > 0 {
-		t.Errorf("%s: %d errors not reported:", testname, len(errors))
-		for pos, msg := range errors {
-			t.Errorf("%s: %s\n", fset.Position(pos), msg)
-		}
-	}
-}
-
-func TestCheck(t *testing.T) {
-	// For easy debugging w/o changing the testing code,
-	// if there is a local test file, only test that file.
-	const testfile = "test.go"
-	if fi, err := os.Stat(testfile); err == nil && !fi.IsDir() {
-		fmt.Printf("WARNING: Testing only %s (remove it to run all tests)\n", testfile)
-		check(t, testfile, []string{testfile})
-		return
-	}
-
-	// Otherwise, run all the tests.
-	for _, test := range tests {
-		check(t, test.name, test.files)
-	}
-}

File pkg/const.go

-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file implements operations on ideal constants.
-
-package types
-
-import (
-	"go/token"
-	"math/big"
-	"strconv"
-)
-
-// TODO(gri) Consider changing the API so Const is an interface
-//           and operations on consts don't have to type switch.
-
-// A Const implements an ideal constant Value.
-// The zero value z for a Const is not a valid constant value.
-type Const struct {
-	// representation of constant values:
-	// ideal bool     ->  bool
-	// ideal int      ->  *big.Int
-	// ideal float    ->  *big.Rat
-	// ideal complex  ->  cmplx
-	// ideal string   ->  string
-	val interface{}
-}
-
-// Representation of complex values.
-type cmplx struct {
-	re, im *big.Rat
-}
-
-func assert(cond bool) {
-	if !cond {
-		panic("go/types internal error: assertion failed")
-	}
-}
-
-// MakeConst makes an ideal constant from a literal
-// token and the corresponding literal string.
-func MakeConst(tok token.Token, lit string) Const {
-	switch tok {
-	case token.INT:
-		var x big.Int
-		_, ok := x.SetString(lit, 0)
-		assert(ok)
-		return Const{&x}
-	case token.FLOAT:
-		var y big.Rat
-		_, ok := y.SetString(lit)
-		assert(ok)
-		return Const{&y}
-	case token.IMAG:
-		assert(lit[len(lit)-1] == 'i')
-		var im big.Rat
-		_, ok := im.SetString(lit[0 : len(lit)-1])
-		assert(ok)
-		return Const{cmplx{big.NewRat(0, 1), &im}}
-	case token.CHAR:
-		assert(lit[0] == '\'' && lit[len(lit)-1] == '\'')
-		code, _, _, err := strconv.UnquoteChar(lit[1:len(lit)-1], '\'')
-		assert(err == nil)
-		return Const{big.NewInt(int64(code))}
-	case token.STRING:
-		s, err := strconv.Unquote(lit)
-		assert(err == nil)
-		return Const{s}
-	}
-	panic("unreachable")
-}
-
-// MakeZero returns the zero constant for the given type.
-func MakeZero(typ *Type) Const {
-	// TODO(gri) fix this
-	return Const{0}
-}
-
-// Match attempts to match the internal constant representations of x and y.
-// If the attempt is successful, the result is the values of x and y,
-// if necessary converted to have the same internal representation; otherwise
-// the results are invalid.
-func (x Const) Match(y Const) (u, v Const) {
-	switch a := x.val.(type) {
-	case bool:
-		if _, ok := y.val.(bool); ok {
-			u, v = x, y
-		}
-	case *big.Int:
-		switch y.val.(type) {
-		case *big.Int:
-			u, v = x, y
-		case *big.Rat:
-			var z big.Rat
-			z.SetInt(a)
-			u, v = Const{&z}, y
-		case cmplx:
-			var z big.Rat
-			z.SetInt(a)
-			u, v = Const{cmplx{&z, big.NewRat(0, 1)}}, y
-		}
-	case *big.Rat:
-		switch y.val.(type) {
-		case *big.Int:
-			v, u = y.Match(x)
-		case *big.Rat:
-			u, v = x, y
-		case cmplx:
-			u, v = Const{cmplx{a, big.NewRat(0, 0)}}, y
-		}
-	case cmplx:
-		switch y.val.(type) {
-		case *big.Int, *big.Rat:
-			v, u = y.Match(x)
-		case cmplx:
-			u, v = x, y
-		}
-	case string:
-		if _, ok := y.val.(string); ok {
-			u, v = x, y
-		}
-	default:
-		panic("unreachable")
-	}
-	return
-}
-
-// Convert attempts to convert the constant x to a given type.
-// If the attempt is successful, the result is the new constant;
-// otherwise the result is invalid.
-func (x Const) Convert(typ *Type) Const {
-	// TODO(gri) implement this
-	switch x.val.(type) {
-	case bool:
-	case *big.Int:
-	case *big.Rat:
-	case cmplx:
-	case string:
-	}
-	return x
-}
-
-func (x Const) String() string {
-	switch x := x.val.(type) {
-	case bool:
-		if x {
-			return "true"
-		}
-		return "false"
-	case *big.Int:
-		return x.String()
-	case *big.Rat:
-		return x.FloatString(10) // 10 digits of precision after decimal point seems fine
-	case cmplx:
-		// TODO(gri) don't print 0 components
-		return x.re.FloatString(10) + " + " + x.im.FloatString(10) + "i"
-	case string:
-		return x
-	}
-	panic("unreachable")
-}
-
-func (x Const) UnaryOp(op token.Token) Const {
-	panic("unimplemented")
-}
-
-func (x Const) BinaryOp(op token.Token, y Const) Const {
-	var z interface{}
-	switch x := x.val.(type) {
-	case bool:
-		z = binaryBoolOp(x, op, y.val.(bool))
-	case *big.Int:
-		z = binaryIntOp(x, op, y.val.(*big.Int))
-	case *big.Rat:
-		z = binaryFloatOp(x, op, y.val.(*big.Rat))
-	case cmplx:
-		z = binaryCmplxOp(x, op, y.val.(cmplx))
-	case string:
-		z = binaryStringOp(x, op, y.val.(string))
-	default:
-		panic("unreachable")
-	}
-	return Const{z}
-}
-
-func binaryBoolOp(x bool, op token.Token, y bool) interface{} {
-	switch op {
-	case token.EQL:
-		return x == y
-	case token.NEQ:
-		return x != y
-	}
-	panic("unreachable")
-}
-
-func binaryIntOp(x *big.Int, op token.Token, y *big.Int) interface{} {
-	var z big.Int
-	switch op {
-	case token.ADD:
-		return z.Add(x, y)
-	case token.SUB:
-		return z.Sub(x, y)
-	case token.MUL:
-		return z.Mul(x, y)
-	case token.QUO:
-		return z.Quo(x, y)
-	case token.REM:
-		return z.Rem(x, y)
-	case token.AND:
-		return z.And(x, y)
-	case token.OR:
-		return z.Or(x, y)
-	case token.XOR:
-		return z.Xor(x, y)
-	case token.AND_NOT:
-		return z.AndNot(x, y)
-	case token.SHL:
-		panic("unimplemented")
-	case token.SHR:
-		panic("unimplemented")
-	case token.EQL:
-		return x.Cmp(y) == 0
-	case token.NEQ:
-		return x.Cmp(y) != 0
-	case token.LSS:
-		return x.Cmp(y) < 0
-	case token.LEQ:
-		return x.Cmp(y) <= 0
-	case token.GTR:
-		return x.Cmp(y) > 0
-	case token.GEQ:
-		return x.Cmp(y) >= 0
-	}
-	panic("unreachable")
-}
-
-func binaryFloatOp(x *big.Rat, op token.Token, y *big.Rat) interface{} {
-	var z big.Rat
-	switch op {
-	case token.ADD:
-		return z.Add(x, y)
-	case token.SUB:
-		return z.Sub(x, y)
-	case token.MUL:
-		return z.Mul(x, y)
-	case token.QUO:
-		return z.Quo(x, y)
-	case token.EQL:
-		return x.Cmp(y) == 0
-	case token.NEQ:
-		return x.Cmp(y) != 0
-	case token.LSS:
-		return x.Cmp(y) < 0
-	case token.LEQ:
-		return x.Cmp(y) <= 0
-	case token.GTR:
-		return x.Cmp(y) > 0
-	case token.GEQ:
-		return x.Cmp(y) >= 0
-	}
-	panic("unreachable")
-}
-
-func binaryCmplxOp(x cmplx, op token.Token, y cmplx) interface{} {
-	a, b := x.re, x.im
-	c, d := y.re, y.im
-	switch op {
-	case token.ADD:
-		// (a+c) + i(b+d)
-		var re, im big.Rat
-		re.Add(a, c)
-		im.Add(b, d)
-		return cmplx{&re, &im}
-	case token.SUB:
-		// (a-c) + i(b-d)
-		var re, im big.Rat
-		re.Sub(a, c)
-		im.Sub(b, d)
-		return cmplx{&re, &im}
-	case token.MUL:
-		// (ac-bd) + i(bc+ad)
-		var ac, bd, bc, ad big.Rat
-		ac.Mul(a, c)
-		bd.Mul(b, d)
-		bc.Mul(b, c)
-		ad.Mul(a, d)
-		var re, im big.Rat
-		re.Sub(&ac, &bd)
-		im.Add(&bc, &ad)
-		return cmplx{&re, &im}
-	case token.QUO:
-		// (ac+bd)/s + i(bc-ad)/s, with s = cc + dd
-		var ac, bd, bc, ad, s big.Rat
-		ac.Mul(a, c)
-		bd.Mul(b, d)
-		bc.Mul(b, c)
-		ad.Mul(a, d)
-		s.Add(c.Mul(c, c), d.Mul(d, d))
-		var re, im big.Rat
-		re.Add(&ac, &bd)
-		re.Quo(&re, &s)
-		im.Sub(&bc, &ad)
-		im.Quo(&im, &s)
-		return cmplx{&re, &im}
-	case token.EQL:
-		return a.Cmp(c) == 0 && b.Cmp(d) == 0
-	case token.NEQ:
-		return a.Cmp(c) != 0 || b.Cmp(d) != 0
-	}
-	panic("unreachable")
-}
-
-func binaryStringOp(x string, op token.Token, y string) interface{} {
-	switch op {
-	case token.ADD:
-		return x + y
-	case token.EQL:
-		return x == y
-	case token.NEQ:
-		return x != y
-	case token.LSS:
-		return x < y
-	case token.LEQ:
-		return x <= y
-	case token.GTR:
-		return x > y
-	case token.GEQ:
-		return x >= y
-	}
-	panic("unreachable")
-}

File pkg/exportdata.go

-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file implements FindGcExportData.
-
-package types
-
-import (
-	"bufio"
-	"errors"
-	"fmt"
-	"io"
-	"strconv"
-	"strings"
-)
-
-func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
-	// See $GOROOT/include/ar.h.
-	hdr := make([]byte, 16+12+6+6+8+10+2)
-	_, err = io.ReadFull(r, hdr)
-	if err != nil {
-		return
-	}
-	if trace {
-		fmt.Printf("header: %s", hdr)
-	}
-	s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
-	size, err = strconv.Atoi(s)
-	if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
-		err = errors.New("invalid archive header")
-		return
-	}
-	name = strings.TrimSpace(string(hdr[:16]))
-	return
-}
-
-// FindGcExportData positions the reader r at the beginning of the
-// export data section of an underlying GC-created object/archive
-// file by reading from it. The reader must be positioned at the
-// start of the file before calling this function.
-//
-func FindGcExportData(r *bufio.Reader) (err error) {
-	// Read first line to make sure this is an object file.
-	line, err := r.ReadSlice('\n')
-	if err != nil {
-		return
-	}
-	if string(line) == "!<arch>\n" {
-		// Archive file.  Scan to __.PKGDEF, which should
-		// be second archive entry.
-		var name string
-		var size int
-
-		// First entry should be __.SYMDEF.
-		// Read and discard.
-		if name, size, err = readGopackHeader(r); err != nil {
-			return
-		}
-		if name != "__.SYMDEF" {
-			err = errors.New("go archive does not begin with __.SYMDEF")
-			return
-		}
-		const block = 4096
-		tmp := make([]byte, block)
-		for size > 0 {
-			n := size
-			if n > block {
-				n = block
-			}
-			if _, err = io.ReadFull(r, tmp[:n]); err != nil {
-				return
-			}
-			size -= n
-		}
-
-		// Second entry should be __.PKGDEF.
-		if name, size, err = readGopackHeader(r); err != nil {
-			return
-		}
-		if name != "__.PKGDEF" {
-			err = errors.New("go archive is missing __.PKGDEF")
-			return
-		}
-
-		// Read first line of __.PKGDEF data, so that line
-		// is once again the first line of the input.
-		if line, err = r.ReadSlice('\n'); err != nil {
-			return
-		}
-	}
-
-	// Now at __.PKGDEF in archive or still at beginning of file.
-	// Either way, line should begin with "go object ".
-	if !strings.HasPrefix(string(line), "go object ") {
-		err = errors.New("not a go object file")
-		return
-	}
-
-	// Skip over object header to export data.
-	// Begins after first line with $$.
-	for line[0] != '$' {
-		if line, err = r.ReadSlice('\n'); err != nil {
-			return
-		}
-	}
-
-	return
-}

File pkg/gcimporter.go

-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file implements an ast.Importer for gc-generated object files.
-// TODO(gri) Eventually move this into a separate package outside types.
-
-package types
-
-import (
-	"bufio"
-	"errors"
-	"fmt"
-	"go/ast"
-	"go/build"
-	"go/token"
-	"io"
-	"math/big"
-	"os"
-	"path/filepath"
-	"strconv"
-	"strings"
-	"text/scanner"
-)
-
-const trace = false // set to true for debugging
-
-var pkgExts = [...]string{".a", ".5", ".6", ".8"}
-
-// FindPkg returns the filename and unique package id for an import
-// path based on package information provided by build.Import (using
-// the build.Default build.Context).
-// If no file was found, an empty filename is returned.
-//
-func FindPkg(path, srcDir string) (filename, id string) {
-	if len(path) == 0 {
-		return
-	}
-
-	id = path
-	var noext string
-	switch {
-	default:
-		// "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
-		bp, _ := build.Import(path, srcDir, build.FindOnly)
-		if bp.PkgObj == "" {
-			return
-		}
-		noext = bp.PkgObj
-		if strings.HasSuffix(noext, ".a") {
-			noext = noext[:len(noext)-len(".a")]
-		}
-
-	case build.IsLocalImport(path):
-		// "./x" -> "/this/directory/x.ext", "/this/directory/x"
-		noext = filepath.Join(srcDir, path)
-		id = noext
-
-	case filepath.IsAbs(path):
-		// for completeness only - go/build.Import
-		// does not support absolute imports
-		// "/x" -> "/x.ext", "/x"
-		noext = path
-	}
-
-	// try extensions
-	for _, ext := range pkgExts {
-		filename = noext + ext
-		if f, err := os.Stat(filename); err == nil && !f.IsDir() {
-			return
-		}
-	}
-
-	filename = "" // not found
-	return
-}
-
-// GcImportData imports a package by reading the gc-generated export data,
-// adds the corresponding package object to the imports map indexed by id,
-// and returns the object.
-//
-// The imports map must contains all packages already imported, and no map
-// entry with id as the key must be present. The data reader position must
-// be the beginning of the export data section. The filename is only used
-// in error messages.
-//
-func GcImportData(imports map[string]*ast.Object, filename, id string, data *bufio.Reader) (pkg *ast.Object, err error) {
-	if trace {
-		fmt.Printf("importing %s (%s)\n", id, filename)
-	}
-
-	if imports[id] != nil {
-		panic(fmt.Sprintf("package %s already imported", id))
-	}
-
-	// support for gcParser error handling
-	defer func() {
-		if r := recover(); r != nil {
-			err = r.(importError) // will re-panic if r is not an importError
-		}
-	}()
-
-	var p gcParser
-	p.init(filename, id, data, imports)
-	pkg = p.parseExport()
-
-	return
-}
-
-// GcImport imports a gc-generated package given its import path, adds the
-// corresponding package object to the imports map, and returns the object.
-// Local import paths are interpreted relative to the current working directory.
-// The imports map must contains all packages already imported.
-// GcImport satisfies the ast.Importer signature.
-//
-func GcImport(imports map[string]*ast.Object, path string) (pkg *ast.Object, err error) {
-	if path == "unsafe" {
-		return Unsafe, nil
-	}
-
-	srcDir, err := os.Getwd()
-	if err != nil {
-		return
-	}
-	filename, id := FindPkg(path, srcDir)
-	if filename == "" {
-		err = errors.New("can't find import: " + id)
-		return
-	}
-
-	if pkg = imports[id]; pkg != nil {
-		return // package was imported before
-	}
-
-	// open file
-	f, err := os.Open(filename)
-	if err != nil {
-		return
-	}
-	defer func() {
-		f.Close()
-		if err != nil {
-			// Add file name to error.
-			err = fmt.Errorf("reading export data: %s: %v", filename, err)
-		}
-	}()
-
-	buf := bufio.NewReader(f)
-	if err = FindGcExportData(buf); err != nil {
-		return
-	}
-
-	pkg, err = GcImportData(imports, filename, id, buf)
-
-	return
-}
-
-// ----------------------------------------------------------------------------
-// gcParser
-
-// gcParser parses the exports inside a gc compiler-produced
-// object/archive file and populates its scope with the results.
-type gcParser struct {
-	scanner scanner.Scanner
-	tok     rune                   // current token
-	lit     string                 // literal string; only valid for Ident, Int, String tokens
-	id      string                 // package id of imported package
-	imports map[string]*ast.Object // package id -> package object
-}
-
-func (p *gcParser) init(filename, id string, src io.Reader, imports map[string]*ast.Object) {
-	p.scanner.Init(src)
-	p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
-	p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
-	p.scanner.Whitespace = 1<<'\t' | 1<<' '
-	p.scanner.Filename = filename // for good error messages
-	p.next()
-	p.id = id
-	p.imports = imports
-}
-
-func (p *gcParser) next() {
-	p.tok = p.scanner.Scan()
-	switch p.tok {
-	case scanner.Ident, scanner.Int, scanner.String:
-		p.lit = p.scanner.TokenText()
-	default:
-		p.lit = ""
-	}
-	if trace {
-		fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
-	}
-}
-
-// Declare inserts a named object of the given kind in scope.
-func (p *gcParser) declare(scope *ast.Scope, kind ast.ObjKind, name string) *ast.Object {
-	// the object may have been imported before - if it exists
-	// already in the respective package scope, return that object
-	if obj := scope.Lookup(name); obj != nil {
-		assert(obj.Kind == kind)
-		return obj
-	}
-
-	// otherwise create a new object and insert it into the package scope
-	obj := ast.NewObj(kind, name)
-	if scope.Insert(obj) != nil {
-		p.errorf("already declared: %v %s", kind, obj.Name)
-	}
-
-	// a new type object is a named type and may be referred
-	// to before the underlying type is known - set it up
-	if kind == ast.Typ {
-		obj.Type = &Name{Obj: obj}
-	}
-
-	return obj
-}
-
-// ----------------------------------------------------------------------------
-// Error handling
-
-// Internal errors are boxed as importErrors.
-type importError struct {
-	pos scanner.Position
-	err error
-}
-
-func (e importError) Error() string {
-	return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
-}
-
-func (p *gcParser) error(err interface{}) {
-	if s, ok := err.(string); ok {
-		err = errors.New(s)
-	}
-	// panic with a runtime.Error if err is not an error
-	panic(importError{p.scanner.Pos(), err.(error)})
-}
-
-func (p *gcParser) errorf(format string, args ...interface{}) {
-	p.error(fmt.Sprintf(format, args...))
-}
-
-func (p *gcParser) expect(tok rune) string {
-	lit := p.lit
-	if p.tok != tok {
-		panic(1)
-		p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
-	}
-	p.next()
-	return lit
-}
-
-func (p *gcParser) expectSpecial(tok string) {
-	sep := 'x' // not white space
-	i := 0
-	for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' {
-		sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
-		p.next()
-		i++
-	}
-	if i < len(tok) {
-		p.errorf("expected %q, got %q", tok, tok[0:i])
-	}
-}
-
-func (p *gcParser) expectKeyword(keyword string) {
-	lit := p.expect(scanner.Ident)
-	if lit != keyword {
-		p.errorf("expected keyword %s, got %q", keyword, lit)
-	}
-}
-
-// ----------------------------------------------------------------------------
-// Import declarations
-
-// ImportPath = string_lit .
-//
-func (p *gcParser) parsePkgId() *ast.Object {
-	id, err := strconv.Unquote(p.expect(scanner.String))
-	if err != nil {
-		p.error(err)
-	}
-
-	switch id {
-	case "":
-		// id == "" stands for the imported package id
-		// (only known at time of package installation)
-		id = p.id
-	case "unsafe":
-		// package unsafe is not in the imports map - handle explicitly
-		return Unsafe
-	}
-
-	pkg := p.imports[id]
-	if pkg == nil {
-		scope = ast.NewScope(nil)
-		pkg = ast.NewObj(ast.Pkg, "")
-		pkg.Data = scope
-		p.imports[id] = pkg
-	}
-
-	return pkg
-}
-
-// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
-func (p *gcParser) parseDotIdent() string {
-	ident := ""
-	if p.tok != scanner.Int {
-		sep := 'x' // not white space
-		for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
-			ident += p.lit
-			sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
-			p.next()
-		}
-	}
-	if ident == "" {
-		p.expect(scanner.Ident) // use expect() for error handling
-	}
-	return ident
-}
-
-// ExportedName = "@" ImportPath "." dotIdentifier .
-//
-func (p *gcParser) parseExportedName() (*ast.Object, string) {
-	p.expect('@')
-	pkg := p.parsePkgId()
-	p.expect('.')
-	name := p.parseDotIdent()
-	return pkg, name
-}
-
-// ----------------------------------------------------------------------------
-// Types
-
-// BasicType = identifier .
-//
-func (p *gcParser) parseBasicType() Type {
-	id := p.expect(scanner.Ident)
-	obj := Universe.Lookup(id)
-	if obj == nil || obj.Kind != ast.Typ {
-		p.errorf("not a basic type: %s", id)
-	}
-	return obj.Type.(Type)
-}
-
-// ArrayType = "[" int_lit "]" Type .
-//
-func (p *gcParser) parseArrayType() Type {
-	// "[" already consumed and lookahead known not to be "]"
-	lit := p.expect(scanner.Int)
-	p.expect(']')
-	elt := p.parseType()
-	n, err := strconv.ParseUint(lit, 10, 64)
-	if err != nil {
-		p.error(err)
-	}
-	return &Array{Len: n, Elt: elt}
-}
-
-// MapType = "map" "[" Type "]" Type .
-//
-func (p *gcParser) parseMapType() Type {
-	p.expectKeyword("map")
-	p.expect('[')
-	key := p.parseType()
-	p.expect(']')
-	elt := p.parseType()
-	return &Map{Key: key, Elt: elt}
-}
-
-// Name = identifier | "?" | ExportedName  .
-//
-func (p *gcParser) parseName() (name string) {
-	switch p.tok {
-	case scanner.Ident:
-		name = p.lit
-		p.next()
-	case '?':
-		// anonymous
-		p.next()
-	case '@':
-		// exported name prefixed with package path
-		_, name = p.parseExportedName()
-	default:
-		p.error("name expected")
-	}
-	return
-}
-
-// Field = Name Type [ string_lit ] .
-//
-func (p *gcParser) parseField() (fld *ast.Object, tag string) {
-	name := p.parseName()
-	ftyp := p.parseType()
-	if name == "" {
-		// anonymous field - ftyp must be T or *T and T must be a type name
-		if _, ok := Deref(ftyp).(*Name); !ok {
-			p.errorf("anonymous field expected")
-		}
-	}
-	if p.tok == scanner.String {
-		tag = p.expect(scanner.String)
-	}
-	fld = ast.NewObj(ast.Var, name)
-	fld.Type = ftyp
-	return
-}
-
-// StructType = "struct" "{" [ FieldList ] "}" .
-// FieldList  = Field { ";" Field } .
-//
-func (p *gcParser) parseStructType() Type {
-	var fields []*ast.Object
-	var tags []string
-
-	parseField := func() {
-		fld, tag := p.parseField()
-		fields = append(fields, fld)
-		tags = append(tags, tag)
-	}
-
-	p.expectKeyword("struct")
-	p.expect('{')
-	if p.tok != '}' {
-		parseField()
-		for p.tok == ';' {
-			p.next()
-			parseField()
-		}
-	}
-	p.expect('}')
-
-	return &Struct{Fields: fields, Tags: tags}
-}
-
-// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
-//
-func (p *gcParser) parseParameter() (par *ast.Object, isVariadic bool) {
-	name := p.parseName()
-	if name == "" {
-		name = "_" // cannot access unnamed identifiers
-	}
-	if p.tok == '.' {
-		p.expectSpecial("...")
-		isVariadic = true
-	}
-	ptyp := p.parseType()
-	// ignore argument tag
-	if p.tok == scanner.String {
-		p.expect(scanner.String)
-	}
-	par = ast.NewObj(ast.Var, name)
-	par.Type = ptyp
-	return
-}
-
-// Parameters    = "(" [ ParameterList ] ")" .
-// ParameterList = { Parameter "," } Parameter .
-//
-func (p *gcParser) parseParameters() (list []*ast.Object, isVariadic bool) {
-	parseParameter := func() {
-		par, variadic := p.parseParameter()
-		list = append(list, par)
-		if variadic {
-			if isVariadic {
-				p.error("... not on final argument")
-			}
-			isVariadic = true
-		}
-	}
-
-	p.expect('(')
-	if p.tok != ')' {
-		parseParameter()
-		for p.tok == ',' {
-			p.next()
-			parseParameter()
-		}
-	}
-	p.expect(')')
-
-	return
-}
-
-// Signature = Parameters [ Result ] .
-// Result    = Type | Parameters .
-//
-func (p *gcParser) parseSignature() *Func {
-	params, isVariadic := p.parseParameters()
-
-	// optional result type
-	var results []*ast.Object
-	switch p.tok {
-	case scanner.Ident, '[', '*', '<', '@':
-		// single, unnamed result
-		result := ast.NewObj(ast.Var, "_")
-		result.Type = p.parseType()
-		results = []*ast.Object{result}
-	case '(':
-		// named or multiple result(s)
-		var variadic bool
-		results, variadic = p.parseParameters()
-		if variadic {
-			p.error("... not permitted on result type")
-		}
-	}
-
-	return &Func{Params: params, Results: results, IsVariadic: isVariadic}
-}
-
-// MethodOrEmbedSpec = Name [ Signature ] .
-//
-func (p *gcParser) parseMethodOrEmbedSpec() *ast.Object {
-	p.parseName()
-	if p.tok == '(' {
-		p.parseSignature()
-		// TODO(gri) compute method object
-		return ast.NewObj(ast.Fun, "_")
-	}
-	// TODO lookup name and return that type
-	return ast.NewObj(ast.Typ, "_")
-}
-
-// InterfaceType = "interface" "{" [ MethodOrEmbedList ] "}" .
-// MethodOrEmbedList = MethodOrEmbedSpec { ";" MethodOrEmbedSpec } .
-//
-func (p *gcParser) parseInterfaceType() Type {
-	var methods ObjList
-
-	parseMethod := func() {
-		switch m := p.parseMethodOrEmbedSpec(); m.Kind {
-		case ast.Typ:
-			// TODO expand embedded methods
-		case ast.Fun:
-			methods = append(methods, m)
-		}
-	}
-
-	p.expectKeyword("interface")
-	p.expect('{')
-	if p.tok != '}' {
-		parseMethod()
-		for p.tok == ';' {
-			p.next()
-			parseMethod()
-		}
-	}
-	p.expect('}')
-
-	methods.Sort()
-	return &Interface{Methods: methods}
-}
-
-// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
-//
-func (p *gcParser) parseChanType() Type {
-	dir := ast.SEND | ast.RECV
-	if p.tok == scanner.Ident {
-		p.expectKeyword("chan")
-		if p.tok == '<' {
-			p.expectSpecial("<-")
-			dir = ast.SEND
-		}
-	} else {
-		p.expectSpecial("<-")
-		p.expectKeyword("chan")
-		dir = ast.RECV
-	}
-	elt := p.parseType()
-	return &Chan{Dir: dir, Elt: elt}
-}
-
-// Type =
-//	BasicType | TypeName | ArrayType | SliceType | StructType |
-//      PointerType | FuncType | InterfaceType | MapType | ChanType |
-//      "(" Type ")" .
-// BasicType = ident .
-// TypeName = ExportedName .
-// SliceType = "[" "]" Type .
-// PointerType = "*" Type .
-// FuncType = "func" Signature .
-//
-func (p *gcParser) parseType() Type {
-	switch p.tok {
-	case scanner.Ident:
-		switch p.lit {
-		default:
-			return p.parseBasicType()
-		case "struct":
-			return p.parseStructType()
-		case "func":
-			// FuncType
-			p.next()
-			return p.parseSignature()
-		case "interface":
-			return p.parseInterfaceType()
-		case "map":
-			return p.parseMapType()
-		case "chan":
-			return p.parseChanType()
-		}
-	case '@':
-		// TypeName
-		pkg, name := p.parseExportedName()
-		return p.declare(pkg.Data.(*ast.Scope), ast.Typ, name).Type.(Type)
-	case '[':
-		p.next() // look ahead
-		if p.tok == ']' {
-			// SliceType
-			p.next()
-			return &Slice{Elt: p.parseType()}
-		}
-		return p.parseArrayType()
-	case '*':
-		// PointerType
-		p.next()
-		return &Pointer{Base: p.parseType()}
-	case '<':
-		return p.parseChanType()
-	case '(':
-		// "(" Type ")"
-		p.next()
-		typ := p.parseType()
-		p.expect(')')
-		return typ
-	}
-	p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
-	return nil
-}
-
-// ----------------------------------------------------------------------------
-// Declarations
-
-// ImportDecl = "import" identifier string_lit .
-//
-func (p *gcParser) parseImportDecl() {
-	p.expectKeyword("import")
-	// The identifier has no semantic meaning in the import data.
-	// It exists so that error messages can print the real package
-	// name: binary.ByteOrder instead of "encoding/binary".ByteOrder.
-	name := p.expect(scanner.Ident)
-	pkg := p.parsePkgId()
-	assert(pkg.Name == "" || pkg.Name == name)
-	pkg.Name = name
-}
-
-// int_lit = [ "+" | "-" ] { "0" ... "9" } .
-//
-func (p *gcParser) parseInt() (sign, val string) {
-	switch p.tok {
-	case '-':
-		p.next()
-		sign = "-"
-	case '+':
-		p.next()
-	}
-	val = p.expect(scanner.Int)
-	return
-}
-
-// number = int_lit [ "p" int_lit ] .
-//
-func (p *gcParser) parseNumber() Const {
-	// mantissa
-	sign, val := p.parseInt()
-	mant, ok := new(big.Int).SetString(sign+val, 10)
-	assert(ok)
-
-	if p.lit == "p" {
-		// exponent (base 2)
-		p.next()
-		sign, val = p.parseInt()
-		exp64, err := strconv.ParseUint(val, 10, 0)
-		if err != nil {
-			p.error(err)
-		}
-		exp := uint(exp64)
-		if sign == "-" {
-			denom := big.NewInt(1)
-			denom.Lsh(denom, exp)
-			return Const{new(big.Rat).SetFrac(mant, denom)}
-		}
-		if exp > 0 {
-			mant.Lsh(mant, exp)
-		}
-		return Const{new(big.Rat).SetInt(mant)}
-	}
-
-	return Const{mant}
-}
-
-// ConstDecl   = "const" ExportedName [ Type ] "=" Literal .
-// Literal     = bool_lit | int_lit | float_lit | complex_lit | string_lit .
-// bool_lit    = "true" | "false" .
-// complex_lit = "(" float_lit "+" float_lit ")" .
-// rune_lit = "(" int_lit "+" int_lit ")" .
-// string_lit  = `"` { unicode_char } `"` .
-//
-func (p *gcParser) parseConstDecl() {
-	p.expectKeyword("const")
-	pkg, name := p.parseExportedName()
-	obj := p.declare(pkg.Data.(*ast.Scope), ast.Con, name)
-	var x Const
-	var typ Type
-	if p.tok != '=' {
-		obj.Type = p.parseType()
-	}
-	p.expect('=')
-	switch p.tok {
-	case scanner.Ident:
-		// bool_lit
-		if p.lit != "true" && p.lit != "false" {
-			p.error("expected true or false")
-		}
-		x = Const{p.lit == "true"}
-		typ = Bool.Underlying
-		p.next()
-	case '-', scanner.Int:
-		// int_lit
-		x = p.parseNumber()
-		typ = Int.Underlying
-		if _, ok := x.val.(*big.Rat); ok {
-			typ = Float64.Underlying
-		}
-	case '(':
-		// complex_lit or rune_lit
-		p.next()
-		if p.tok == scanner.Char {
-			p.next()
-			p.expect('+')
-			p.parseNumber()
-			p.expect(')')
-			// TODO: x = ...
-			break
-		}
-		re := p.parseNumber()
-		p.expect('+')
-		im := p.parseNumber()
-		p.expect(')')
-		x = Const{cmplx{re.val.(*big.Rat), im.val.(*big.Rat)}}
-		typ = Complex128.Underlying
-	case scanner.Char:
-		// TODO: x = ...
-		p.next()
-	case scanner.String:
-		// string_lit
-		x = MakeConst(token.STRING, p.lit)
-		p.next()
-		typ = String.Underlying
-	default:
-		p.errorf("expected literal got %s", scanner.TokenString(p.tok))
-	}
-	if obj.Type == nil {
-		obj.Type = typ
-	}
-	obj.Data = x
-}
-
-// TypeDecl = "type" ExportedName Type .
-//
-func (p *gcParser) parseTypeDecl() {
-	p.expectKeyword("type")
-	pkg, name := p.parseExportedName()
-	obj := p.declare(pkg.Data.(*ast.Scope), ast.Typ, name)
-
-	// The type object may have been imported before and thus already
-	// have a type associated with it. We still need to parse the type
-	// structure, but throw it away if the object already has a type.
-	// This ensures that all imports refer to the same type object for
-	// a given type declaration.
-	typ := p.parseType()
-
-	if name := obj.Type.(*Name); name.Underlying == nil {
-		assert(Underlying(typ) == typ)
-		name.Underlying = typ
-	}
-}
-
-// VarDecl = "var" ExportedName Type .
-//
-func (p *gcParser) parseVarDecl() {
-	p.expectKeyword("var")
-	pkg, name := p.parseExportedName()
-	obj := p.declare(pkg.Data.(*ast.Scope), ast.Var, name)
-	obj.Type = p.parseType()
-}
-
-// FuncBody = "{" ... "}" .
-//
-func (p *gcParser) parseFuncBody() {
-	p.expect('{')
-	for i := 1; i > 0; p.next() {
-		switch p.tok {
-		case '{':
-			i++
-		case '}':
-			i--
-		}
-	}
-}
-
-// FuncDecl = "func" ExportedName Signature [ FuncBody ] .
-//
-func (p *gcParser) parseFuncDecl() {
-	// "func" already consumed
-	pkg, name := p.parseExportedName()
-	obj := p.declare(pkg.Data.(*ast.Scope), ast.Fun, name)
-	obj.Type = p.parseSignature()
-	if p.tok == '{' {
-		p.parseFuncBody()
-	}
-}
-
-// MethodDecl = "func" Receiver Name Signature .
-// Receiver   = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" [ FuncBody ].
-//
-func (p *gcParser) parseMethodDecl() {
-	// "func" already consumed
-	p.expect('(')
-	p.parseParameter() // receiver
-	p.expect(')')
-	p.parseName() // unexported method names in imports are qualified with their package.
-	p.parseSignature()
-	if p.tok == '{' {
-		p.parseFuncBody()
-	}
-}
-
-// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
-//
-func (p *gcParser) parseDecl() {
-	switch p.lit {
-	case "import":
-		p.parseImportDecl()
-	case "const":
-		p.parseConstDecl()
-	case "type":
-		p.parseTypeDecl()
-	case "var":
-		p.parseVarDecl()
-	case "func":
-		p.next() // look ahead
-		if p.tok == '(' {
-			p.parseMethodDecl()
-		} else {
-			p.parseFuncDecl()
-		}
-	}
-	p.expect('\n')
-}
-
-// ----------------------------------------------------------------------------
-// Export
-
-// Export        = "PackageClause { Decl } "$$" .
-// PackageClause = "package" identifier [ "safe" ] "\n" .
-//
-func (p *gcParser) parseExport() *ast.Object {
-	p.expectKeyword("package")
-	name := p.expect(scanner.Ident)
-	if p.tok != '\n' {
-		// A package is safe if it was compiled with the -u flag,
-		// which disables the unsafe package.
-		// TODO(gri) remember "safe" package
-		p.expectKeyword("safe")
-	}
-	p.expect('\n')
-
-	assert(p.imports[p.id] == nil)
-	pkg := ast.NewObj(ast.Pkg, name)
-	pkg.Data = ast.NewScope(nil)
-	p.imports[p.id] = pkg
-
-	for p.tok != '$' && p.tok != scanner.EOF {
-		p.parseDecl()
-	}
-
-	if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
-		// don't call next()/expect() since reading past the
-		// export data may cause scanner errors (e.g. NUL chars)
-		p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
-	}
-
-	if n := p.scanner.ErrorCount; n != 0 {
-		p.errorf("expected no scanner errors, got %d", n)
-	}
-
-	return pkg
-}

File pkg/gcimporter_test.go

-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package types
-
-import (
-	"go/ast"
-	"go/build"
-	"io/ioutil"
-	"os"
-	"os/exec"
-	"path/filepath"
-	"runtime"
-	"strings"
-	"testing"
-	"time"
-)
-
-var gcPath string // Go compiler path
-
-func init() {
-	// determine compiler
-	var gc string
-	switch runtime.GOARCH {
-	case "386":
-		gc = "8g"
-	case "amd64":
-		gc = "6g"
-	case "arm":
-		gc = "5g"
-	default:
-		gcPath = "unknown-GOARCH-compiler"
-		return
-	}
-	gcPath = filepath.Join(build.ToolDir, gc)
-}
-
-func compile(t *testing.T, dirname, filename string) {
-	cmd := exec.Command(gcPath, filename)
-	cmd.Dir = dirname
-	out, err := cmd.CombinedOutput()
-	if err != nil {
-		t.Errorf("%s %s failed: %s", gcPath, filename, err)
-		return
-	}
-	t.Logf("%s", string(out))
-}
-
-// Use the same global imports map for all tests. The effect is
-// as if all tested packages were imported into a single package.
-var imports = make(map[string]*ast.Object)
-
-func testPath(t *testing.T, path string) bool {
-	_, err := GcImport(imports, path)
-	if err != nil {
-		t.Errorf("testPath(%s): %s", path, err)
-		return false
-	}
-	return true
-}
-
-const maxTime = 3 * time.Second
-
-func testDir(t *testing.T, dir string, endTime time.Time) (nimports int) {
-	dirname := filepath.Join(runtime.GOROOT(), "pkg", runtime.GOOS+"_"+runtime.GOARCH, dir)
-	list, err := ioutil.ReadDir(dirname)
-	if err != nil {
-		t.Errorf("testDir(%s): %s", dirname, err)
-	}
-	for _, f := range list {
-		if time.Now().After(endTime) {
-			t.Log("testing time used up")
-			return
-		}
-		switch {
-		case !f.IsDir():
-			// try extensions
-			for _, ext := range pkgExts {
-				if strings.HasSuffix(f.Name(), ext) {
-					name := f.Name()[0 : len(f.Name())-len(ext)] // remove extension
-					if testPath(t, filepath.Join(dir, name)) {
-						nimports++
-					}
-				}
-			}
-		case f.IsDir():
-			nimports += testDir(t, filepath.Join(dir, f.Name()), endTime)
-		}
-	}
-	return
-}
-
-func TestGcImport(t *testing.T) {
-	// On cross-compile builds, the path will not exist.
-	// Need to use GOHOSTOS, which is not available.
-	if _, err := os.Stat(gcPath); err != nil {
-		t.Logf("skipping test: %v", err)
-		return
-	}
-
-	compile(t, "testdata", "exports.go")
-
-	nimports := 0
-	if testPath(t, "./testdata/exports") {
-		nimports++
-	}
-	nimports += testDir(t, "", time.Now().Add(maxTime)) // installed packages
-	t.Logf("tested %d imports", nimports)
-}

File pkg/testdata/exports.go

-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file is used to generate an object file which
-// serves as test file for gcimporter_test.go.
-
-package exports
-
-import (
-	"go/ast"
-)
-
-const (
-	C0 int = 0
-	C1     = 3.14159265
-	C2     = 2.718281828i
-	C3     = -123.456e-789
-	C4     = +123.456E+789
-	C5     = 1234i
-	C6     = "foo\n"
-	C7     = `bar\n`
-)
-
-type (
-	T1  int
-	T2  [10]int
-	T3  []int
-	T4  *int
-	T5  chan int
-	T6a chan<- int
-	T6b chan (<-chan int)
-	T6c chan<- (chan int)
-	T7  <-chan *ast.File
-	T8  struct{}
-	T9  struct {
-		a    int
-		b, c float32
-		d    []string `go:"tag"`
-	}
-	T10 struct {
-		T8
-		T9
-		_ *T10
-	}
-	T11 map[int]string
-	T12 interface{}
-	T13 interface {
-		m1()
-		m2(int) float32
-	}
-	T14 interface {
-		T12
-		T13
-		m3(x ...struct{}) []T9
-	}
-	T15 func()
-	T16 func(int)
-	T17 func(x int)
-	T18 func() float32
-	T19 func() (x float32)
-	T20 func(...interface{})
-	T21 struct{ next *T21 }
-	T22 struct{ link *T23 }
-	T23 struct{ link *T22 }
-	T24 *T24
-	T25 *T26
-	T26 *T27
-	T27 *T25
-	T28 func(T28) T28
-)
-
-var (
-	V0 int
-	V1 = -991.0
-)
-
-func F1()         {}
-func F2(x int)    {}
-func F3() int     { return 0 }
-func F4() float32 { return 0 }
-func F5(a, b, c int, u, v, w struct{ x, y T1 }, more ...interface{}) (p, q, r chan<- T10)
-
-func (p *T1) M1()

File pkg/testdata/test0.src

-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// type declarations
-
-package test0
-
-import "unsafe"
-
-const pi = 3.1415
-
-type (
-	N undeclared /* ERROR "undeclared" */
-	B bool
-	I int32
-	A [10]P
-	T struct {
-		x, y P
-	}
-	P *T
-	R (*R)
-	F func(A) I
-	Y interface {
-		f(A) I
-	}
-	S [](((P)))
-	M map[I]F
-	C chan<- I
-)
-
-
-type (
-	p1 pi /* ERROR "not a package" */ .foo
-	p2 unsafe.Pointer
-)
-
-
-type (
-	Pi pi /* ERROR "not a type" */
-
-	a /* DISABLED "illegal cycle" */ a
-	a /* ERROR "redeclared" */ int
-
-	// where the cycle error appears depends on the
-	// order in which declarations are processed
-	// (which depends on the order in which a map
-	// is iterated through)
-	b c
-	c /* DISABLED "illegal cycle" */ d
-	d e
-	e b
-
-	t *t
-
-	U V
-	V *W
-	W U
-
-	P1 *S2
-	P2 P1
-
-	S0 struct {
-	}
-	S1 struct {
-		a, b, c int
-		u, v, a /* ERROR "redeclared" */ float32
-	}
-	S2 struct {
-		U // anonymous field
-		// TODO(gri) recognize double-declaration below
-		// U /* ERROR "redeclared" */ int
-	}
-	S3 struct {
-		x S2
-	}
-	S4/* DISABLED "illegal cycle" */ struct {
-		S4
-	}
-	S5 struct {
-		S6
-	}
-	S6 /* DISABLED "illegal cycle" */ struct {
-		field S7
-	}
-	S7 struct {
-		S5
-	}
-
-	L1 []L1
-	L2 []int
-
-	A1 [10]int
-	A2 /* DISABLED "illegal cycle" */ [10]A2
-	A3 /* DISABLED "illegal cycle" */ [10]struct {
-		x A4
-	}
-	A4 [10]A3
-
-	F1 func()
-	F2 func(x, y, z float32)
-	F3 func(x, y, x /* ERROR "redeclared" */ float32)
-	F4 func() (x, y, x /* ERROR "redeclared" */ float32)
-	F5 func(x int) (x /* ERROR "redeclared" */ float32)
-	F6 func(x ...int)
-
-	I1 interface{}
-	I2 interface {
-		m1()
-	}
-	I3 interface {
-		m1()
-		m1 /* ERROR "redeclared" */ ()
-	}
-	I4 interface {
-		m1(x, y, x /* ERROR "redeclared" */ float32)
-		m2() (x, y, x /* ERROR "redeclared" */ float32)
-		m3(x int) (x /* ERROR "redeclared" */ float32)
-	}
-	I5 interface {
-		m1(I5)
-	}
-	I6 interface {
-		S0 /* ERROR "non-interface" */
-	}
-	I7 interface {
-		I1
-		I1
-	}
-	I8 /* DISABLED "illegal cycle" */ interface {
-		I8
-	}
-	I9 /* DISABLED "illegal cycle" */ interface {
-		I10
-	}
-	I10 interface {
-		I11
-	}
-	I11 interface {
-		I9
-	}
-
-	C1 chan int
-	C2 <-chan int
-	C3 chan<- C3
-	C4 chan C5
-	C5 chan C6
-	C6 chan C4
-
-	M1 map[Last]string
-	M2 map[string]M2
-
-	Last int
-)

File pkg/types.go

-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package types declares the types used to represent Go types
-// (UNDER CONSTRUCTION). ANY AND ALL PARTS MAY CHANGE.
-//
-package types
-
-import (
-	"go/ast"
-	"sort"
-)
-
-// All types implement the Type interface.
-type Type interface {
-	isType()
-}
-
-// All concrete types embed ImplementsType which
-// ensures that all types implement the Type interface.
-type ImplementsType struct{}
-
-func (t *ImplementsType) isType() {}
-
-// A Bad type is a non-nil placeholder type when we don't know a type.
-type Bad struct {
-	ImplementsType
-	Msg string // for better error reporting/debugging
-}
-
-// A Basic represents a (unnamed) basic type.
-type Basic struct {
-	ImplementsType
-	// TODO(gri) need a field specifying the exact basic type
-}
-
-// An Array represents an array type [Len]Elt.
-type Array struct {
-	ImplementsType
-	Len uint64
-	Elt Type
-}
-
-// A Slice represents a slice type []Elt.
-type Slice struct {
-	ImplementsType
-	Elt Type
-}
-
-// A Struct represents a struct type struct{...}.
-// Anonymous fields are represented by objects with empty names.
-type Struct struct {
-	ImplementsType
-	Fields ObjList  // struct fields; or nil
-	Tags   []string // corresponding tags; or nil
-	// TODO(gri) This type needs some rethinking:
-	// - at the moment anonymous fields are marked with "" object names,
-	//   and their names have to be reconstructed
-	// - there is no scope for fast lookup (but the parser creates one)
-}
-
-// A Pointer represents a pointer type *Base.
-type Pointer struct {
-	ImplementsType
-	Base Type
-}
-
-// A Func represents a function type func(...) (...).
-// Unnamed parameters are represented by objects with empty names.
-type Func struct {
-	ImplementsType
-	Recv       *ast.Object // nil if not a method
-	Params     ObjList     // (incoming) parameters from left to right; or nil
-	Results    ObjList     // (outgoing) results from left to right; or nil
-	IsVariadic bool        // true if the last parameter's type is of the form ...T
-}
-
-// An Interface represents an interface type interface{...}.
-type Interface struct {
-	ImplementsType
-	Methods ObjList // interface methods sorted by name; or nil