You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1325 lines
38 KiB

  1. // Copyright 2013 The Go Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style
  3. // license that can be found in the LICENSE file.
  4. package pointer
  5. // This file defines the constraint generation phase.
  6. // TODO(adonovan): move the constraint definitions and the store() etc
  7. // functions which add them (and are also used by the solver) into a
  8. // new file, constraints.go.
  9. import (
  10. "fmt"
  11. "go/token"
  12. "go/types"
  13. "golang.org/x/tools/go/callgraph"
  14. "golang.org/x/tools/go/ssa"
  15. )
  16. var (
  17. tEface = types.NewInterface(nil, nil).Complete()
  18. tInvalid = types.Typ[types.Invalid]
  19. tUnsafePtr = types.Typ[types.UnsafePointer]
  20. )
  21. // ---------- Node creation ----------
  22. // nextNode returns the index of the next unused node.
  23. func (a *analysis) nextNode() nodeid {
  24. return nodeid(len(a.nodes))
  25. }
  26. // addNodes creates nodes for all scalar elements in type typ, and
  27. // returns the id of the first one, or zero if the type was
  28. // analytically uninteresting.
  29. //
  30. // comment explains the origin of the nodes, as a debugging aid.
  31. //
  32. func (a *analysis) addNodes(typ types.Type, comment string) nodeid {
  33. id := a.nextNode()
  34. for _, fi := range a.flatten(typ) {
  35. a.addOneNode(fi.typ, comment, fi)
  36. }
  37. if id == a.nextNode() {
  38. return 0 // type contained no pointers
  39. }
  40. return id
  41. }
  42. // addOneNode creates a single node with type typ, and returns its id.
  43. //
  44. // typ should generally be scalar (except for tagged.T nodes
  45. // and struct/array identity nodes). Use addNodes for non-scalar types.
  46. //
  47. // comment explains the origin of the nodes, as a debugging aid.
  48. // subelement indicates the subelement, e.g. ".a.b[*].c".
  49. //
  50. func (a *analysis) addOneNode(typ types.Type, comment string, subelement *fieldInfo) nodeid {
  51. id := a.nextNode()
  52. a.nodes = append(a.nodes, &node{typ: typ, subelement: subelement, solve: new(solverState)})
  53. if a.log != nil {
  54. fmt.Fprintf(a.log, "\tcreate n%d %s for %s%s\n",
  55. id, typ, comment, subelement.path())
  56. }
  57. return id
  58. }
  59. // setValueNode associates node id with the value v.
  60. // cgn identifies the context iff v is a local variable.
  61. //
  62. func (a *analysis) setValueNode(v ssa.Value, id nodeid, cgn *cgnode) {
  63. if cgn != nil {
  64. a.localval[v] = id
  65. } else {
  66. a.globalval[v] = id
  67. }
  68. if a.log != nil {
  69. fmt.Fprintf(a.log, "\tval[%s] = n%d (%T)\n", v.Name(), id, v)
  70. }
  71. // Due to context-sensitivity, we may encounter the same Value
  72. // in many contexts. We merge them to a canonical node, since
  73. // that's what all clients want.
  74. // Record the (v, id) relation if the client has queried pts(v).
  75. if _, ok := a.config.Queries[v]; ok {
  76. t := v.Type()
  77. ptr, ok := a.result.Queries[v]
  78. if !ok {
  79. // First time? Create the canonical query node.
  80. ptr = Pointer{a, a.addNodes(t, "query")}
  81. a.result.Queries[v] = ptr
  82. }
  83. a.result.Queries[v] = ptr
  84. a.copy(ptr.n, id, a.sizeof(t))
  85. }
  86. // Record the (*v, id) relation if the client has queried pts(*v).
  87. if _, ok := a.config.IndirectQueries[v]; ok {
  88. t := v.Type()
  89. ptr, ok := a.result.IndirectQueries[v]
  90. if !ok {
  91. // First time? Create the canonical indirect query node.
  92. ptr = Pointer{a, a.addNodes(v.Type(), "query.indirect")}
  93. a.result.IndirectQueries[v] = ptr
  94. }
  95. a.genLoad(cgn, ptr.n, v, 0, a.sizeof(t))
  96. }
  97. for _, query := range a.config.extendedQueries[v] {
  98. t, nid := a.evalExtendedQuery(v.Type().Underlying(), id, query.ops)
  99. if query.ptr.a == nil {
  100. query.ptr.a = a
  101. query.ptr.n = a.addNodes(t, "query.extended")
  102. }
  103. a.copy(query.ptr.n, nid, a.sizeof(t))
  104. }
  105. }
  106. // endObject marks the end of a sequence of calls to addNodes denoting
  107. // a single object allocation.
  108. //
  109. // obj is the start node of the object, from a prior call to nextNode.
  110. // Its size, flags and optional data will be updated.
  111. //
  112. func (a *analysis) endObject(obj nodeid, cgn *cgnode, data interface{}) *object {
  113. // Ensure object is non-empty by padding;
  114. // the pad will be the object node.
  115. size := uint32(a.nextNode() - obj)
  116. if size == 0 {
  117. a.addOneNode(tInvalid, "padding", nil)
  118. }
  119. objNode := a.nodes[obj]
  120. o := &object{
  121. size: size, // excludes padding
  122. cgn: cgn,
  123. data: data,
  124. }
  125. objNode.obj = o
  126. return o
  127. }
  128. // makeFunctionObject creates and returns a new function object
  129. // (contour) for fn, and returns the id of its first node. It also
  130. // enqueues fn for subsequent constraint generation.
  131. //
  132. // For a context-sensitive contour, callersite identifies the sole
  133. // callsite; for shared contours, caller is nil.
  134. //
  135. func (a *analysis) makeFunctionObject(fn *ssa.Function, callersite *callsite) nodeid {
  136. if a.log != nil {
  137. fmt.Fprintf(a.log, "\t---- makeFunctionObject %s\n", fn)
  138. }
  139. // obj is the function object (identity, params, results).
  140. obj := a.nextNode()
  141. cgn := a.makeCGNode(fn, obj, callersite)
  142. sig := fn.Signature
  143. a.addOneNode(sig, "func.cgnode", nil) // (scalar with Signature type)
  144. if recv := sig.Recv(); recv != nil {
  145. a.addNodes(recv.Type(), "func.recv")
  146. }
  147. a.addNodes(sig.Params(), "func.params")
  148. a.addNodes(sig.Results(), "func.results")
  149. a.endObject(obj, cgn, fn).flags |= otFunction
  150. if a.log != nil {
  151. fmt.Fprintf(a.log, "\t----\n")
  152. }
  153. // Queue it up for constraint processing.
  154. a.genq = append(a.genq, cgn)
  155. return obj
  156. }
  157. // makeTagged creates a tagged object of type typ.
  158. func (a *analysis) makeTagged(typ types.Type, cgn *cgnode, data interface{}) nodeid {
  159. obj := a.addOneNode(typ, "tagged.T", nil) // NB: type may be non-scalar!
  160. a.addNodes(typ, "tagged.v")
  161. a.endObject(obj, cgn, data).flags |= otTagged
  162. return obj
  163. }
  164. // makeRtype returns the canonical tagged object of type *rtype whose
  165. // payload points to the sole rtype object for T.
  166. //
  167. // TODO(adonovan): move to reflect.go; it's part of the solver really.
  168. //
  169. func (a *analysis) makeRtype(T types.Type) nodeid {
  170. if v := a.rtypes.At(T); v != nil {
  171. return v.(nodeid)
  172. }
  173. // Create the object for the reflect.rtype itself, which is
  174. // ordinarily a large struct but here a single node will do.
  175. obj := a.nextNode()
  176. a.addOneNode(T, "reflect.rtype", nil)
  177. a.endObject(obj, nil, T)
  178. id := a.makeTagged(a.reflectRtypePtr, nil, T)
  179. a.nodes[id+1].typ = T // trick (each *rtype tagged object is a singleton)
  180. a.addressOf(a.reflectRtypePtr, id+1, obj)
  181. a.rtypes.Set(T, id)
  182. return id
  183. }
  184. // rtypeValue returns the type of the *reflect.rtype-tagged object obj.
  185. func (a *analysis) rtypeTaggedValue(obj nodeid) types.Type {
  186. tDyn, t, _ := a.taggedValue(obj)
  187. if tDyn != a.reflectRtypePtr {
  188. panic(fmt.Sprintf("not a *reflect.rtype-tagged object: obj=n%d tag=%v payload=n%d", obj, tDyn, t))
  189. }
  190. return a.nodes[t].typ
  191. }
  192. // valueNode returns the id of the value node for v, creating it (and
  193. // the association) as needed. It may return zero for uninteresting
  194. // values containing no pointers.
  195. //
  196. func (a *analysis) valueNode(v ssa.Value) nodeid {
  197. // Value nodes for locals are created en masse by genFunc.
  198. if id, ok := a.localval[v]; ok {
  199. return id
  200. }
  201. // Value nodes for globals are created on demand.
  202. id, ok := a.globalval[v]
  203. if !ok {
  204. var comment string
  205. if a.log != nil {
  206. comment = v.String()
  207. }
  208. id = a.addNodes(v.Type(), comment)
  209. if obj := a.objectNode(nil, v); obj != 0 {
  210. a.addressOf(v.Type(), id, obj)
  211. }
  212. a.setValueNode(v, id, nil)
  213. }
  214. return id
  215. }
  216. // valueOffsetNode ascertains the node for tuple/struct value v,
  217. // then returns the node for its subfield #index.
  218. //
  219. func (a *analysis) valueOffsetNode(v ssa.Value, index int) nodeid {
  220. id := a.valueNode(v)
  221. if id == 0 {
  222. panic(fmt.Sprintf("cannot offset within n0: %s = %s", v.Name(), v))
  223. }
  224. return id + nodeid(a.offsetOf(v.Type(), index))
  225. }
  226. // isTaggedObject reports whether object obj is a tagged object.
  227. func (a *analysis) isTaggedObject(obj nodeid) bool {
  228. return a.nodes[obj].obj.flags&otTagged != 0
  229. }
  230. // taggedValue returns the dynamic type tag, the (first node of the)
  231. // payload, and the indirect flag of the tagged object starting at id.
  232. // Panic ensues if !isTaggedObject(id).
  233. //
  234. func (a *analysis) taggedValue(obj nodeid) (tDyn types.Type, v nodeid, indirect bool) {
  235. n := a.nodes[obj]
  236. flags := n.obj.flags
  237. if flags&otTagged == 0 {
  238. panic(fmt.Sprintf("not a tagged object: n%d", obj))
  239. }
  240. return n.typ, obj + 1, flags&otIndirect != 0
  241. }
  242. // funcParams returns the first node of the params (P) block of the
  243. // function whose object node (obj.flags&otFunction) is id.
  244. //
  245. func (a *analysis) funcParams(id nodeid) nodeid {
  246. n := a.nodes[id]
  247. if n.obj == nil || n.obj.flags&otFunction == 0 {
  248. panic(fmt.Sprintf("funcParams(n%d): not a function object block", id))
  249. }
  250. return id + 1
  251. }
  252. // funcResults returns the first node of the results (R) block of the
  253. // function whose object node (obj.flags&otFunction) is id.
  254. //
  255. func (a *analysis) funcResults(id nodeid) nodeid {
  256. n := a.nodes[id]
  257. if n.obj == nil || n.obj.flags&otFunction == 0 {
  258. panic(fmt.Sprintf("funcResults(n%d): not a function object block", id))
  259. }
  260. sig := n.typ.(*types.Signature)
  261. id += 1 + nodeid(a.sizeof(sig.Params()))
  262. if sig.Recv() != nil {
  263. id += nodeid(a.sizeof(sig.Recv().Type()))
  264. }
  265. return id
  266. }
  267. // ---------- Constraint creation ----------
  268. // copy creates a constraint of the form dst = src.
  269. // sizeof is the width (in logical fields) of the copied type.
  270. //
  271. func (a *analysis) copy(dst, src nodeid, sizeof uint32) {
  272. if src == dst || sizeof == 0 {
  273. return // trivial
  274. }
  275. if src == 0 || dst == 0 {
  276. panic(fmt.Sprintf("ill-typed copy dst=n%d src=n%d", dst, src))
  277. }
  278. for i := uint32(0); i < sizeof; i++ {
  279. a.addConstraint(&copyConstraint{dst, src})
  280. src++
  281. dst++
  282. }
  283. }
  284. // addressOf creates a constraint of the form id = &obj.
  285. // T is the type of the address.
  286. func (a *analysis) addressOf(T types.Type, id, obj nodeid) {
  287. if id == 0 {
  288. panic("addressOf: zero id")
  289. }
  290. if obj == 0 {
  291. panic("addressOf: zero obj")
  292. }
  293. if a.shouldTrack(T) {
  294. a.addConstraint(&addrConstraint{id, obj})
  295. }
  296. }
  297. // load creates a load constraint of the form dst = src[offset].
  298. // offset is the pointer offset in logical fields.
  299. // sizeof is the width (in logical fields) of the loaded type.
  300. //
  301. func (a *analysis) load(dst, src nodeid, offset, sizeof uint32) {
  302. if dst == 0 {
  303. return // load of non-pointerlike value
  304. }
  305. if src == 0 && dst == 0 {
  306. return // non-pointerlike operation
  307. }
  308. if src == 0 || dst == 0 {
  309. panic(fmt.Sprintf("ill-typed load dst=n%d src=n%d", dst, src))
  310. }
  311. for i := uint32(0); i < sizeof; i++ {
  312. a.addConstraint(&loadConstraint{offset, dst, src})
  313. offset++
  314. dst++
  315. }
  316. }
  317. // store creates a store constraint of the form dst[offset] = src.
  318. // offset is the pointer offset in logical fields.
  319. // sizeof is the width (in logical fields) of the stored type.
  320. //
  321. func (a *analysis) store(dst, src nodeid, offset uint32, sizeof uint32) {
  322. if src == 0 {
  323. return // store of non-pointerlike value
  324. }
  325. if src == 0 && dst == 0 {
  326. return // non-pointerlike operation
  327. }
  328. if src == 0 || dst == 0 {
  329. panic(fmt.Sprintf("ill-typed store dst=n%d src=n%d", dst, src))
  330. }
  331. for i := uint32(0); i < sizeof; i++ {
  332. a.addConstraint(&storeConstraint{offset, dst, src})
  333. offset++
  334. src++
  335. }
  336. }
  337. // offsetAddr creates an offsetAddr constraint of the form dst = &src.#offset.
  338. // offset is the field offset in logical fields.
  339. // T is the type of the address.
  340. //
  341. func (a *analysis) offsetAddr(T types.Type, dst, src nodeid, offset uint32) {
  342. if !a.shouldTrack(T) {
  343. return
  344. }
  345. if offset == 0 {
  346. // Simplify dst = &src->f0
  347. // to dst = src
  348. // (NB: this optimisation is defeated by the identity
  349. // field prepended to struct and array objects.)
  350. a.copy(dst, src, 1)
  351. } else {
  352. a.addConstraint(&offsetAddrConstraint{offset, dst, src})
  353. }
  354. }
  355. // typeAssert creates a typeFilter or untag constraint of the form dst = src.(T):
  356. // typeFilter for an interface, untag for a concrete type.
  357. // The exact flag is specified as for untagConstraint.
  358. //
  359. func (a *analysis) typeAssert(T types.Type, dst, src nodeid, exact bool) {
  360. if isInterface(T) {
  361. a.addConstraint(&typeFilterConstraint{T, dst, src})
  362. } else {
  363. a.addConstraint(&untagConstraint{T, dst, src, exact})
  364. }
  365. }
  366. // addConstraint adds c to the constraint set.
  367. func (a *analysis) addConstraint(c constraint) {
  368. a.constraints = append(a.constraints, c)
  369. if a.log != nil {
  370. fmt.Fprintf(a.log, "\t%s\n", c)
  371. }
  372. }
  373. // copyElems generates load/store constraints for *dst = *src,
  374. // where src and dst are slices or *arrays.
  375. //
  376. func (a *analysis) copyElems(cgn *cgnode, typ types.Type, dst, src ssa.Value) {
  377. tmp := a.addNodes(typ, "copy")
  378. sz := a.sizeof(typ)
  379. a.genLoad(cgn, tmp, src, 1, sz)
  380. a.genStore(cgn, dst, tmp, 1, sz)
  381. }
  382. // ---------- Constraint generation ----------
  383. // genConv generates constraints for the conversion operation conv.
  384. func (a *analysis) genConv(conv *ssa.Convert, cgn *cgnode) {
  385. res := a.valueNode(conv)
  386. if res == 0 {
  387. return // result is non-pointerlike
  388. }
  389. tSrc := conv.X.Type()
  390. tDst := conv.Type()
  391. switch utSrc := tSrc.Underlying().(type) {
  392. case *types.Slice:
  393. // []byte/[]rune -> string?
  394. return
  395. case *types.Pointer:
  396. // *T -> unsafe.Pointer?
  397. if tDst.Underlying() == tUnsafePtr {
  398. return // we don't model unsafe aliasing (unsound)
  399. }
  400. case *types.Basic:
  401. switch tDst.Underlying().(type) {
  402. case *types.Pointer:
  403. // Treat unsafe.Pointer->*T conversions like
  404. // new(T) and create an unaliased object.
  405. if utSrc == tUnsafePtr {
  406. obj := a.addNodes(mustDeref(tDst), "unsafe.Pointer conversion")
  407. a.endObject(obj, cgn, conv)
  408. a.addressOf(tDst, res, obj)
  409. return
  410. }
  411. case *types.Slice:
  412. // string -> []byte/[]rune (or named aliases)?
  413. if utSrc.Info()&types.IsString != 0 {
  414. obj := a.addNodes(sliceToArray(tDst), "convert")
  415. a.endObject(obj, cgn, conv)
  416. a.addressOf(tDst, res, obj)
  417. return
  418. }
  419. case *types.Basic:
  420. // All basic-to-basic type conversions are no-ops.
  421. // This includes uintptr<->unsafe.Pointer conversions,
  422. // which we (unsoundly) ignore.
  423. return
  424. }
  425. }
  426. panic(fmt.Sprintf("illegal *ssa.Convert %s -> %s: %s", tSrc, tDst, conv.Parent()))
  427. }
  428. // genAppend generates constraints for a call to append.
  429. func (a *analysis) genAppend(instr *ssa.Call, cgn *cgnode) {
  430. // Consider z = append(x, y). y is optional.
  431. // This may allocate a new [1]T array; call its object w.
  432. // We get the following constraints:
  433. // z = x
  434. // z = &w
  435. // *z = *y
  436. x := instr.Call.Args[0]
  437. z := instr
  438. a.copy(a.valueNode(z), a.valueNode(x), 1) // z = x
  439. if len(instr.Call.Args) == 1 {
  440. return // no allocation for z = append(x) or _ = append(x).
  441. }
  442. // TODO(adonovan): test append([]byte, ...string) []byte.
  443. y := instr.Call.Args[1]
  444. tArray := sliceToArray(instr.Call.Args[0].Type())
  445. var w nodeid
  446. w = a.nextNode()
  447. a.addNodes(tArray, "append")
  448. a.endObject(w, cgn, instr)
  449. a.copyElems(cgn, tArray.Elem(), z, y) // *z = *y
  450. a.addressOf(instr.Type(), a.valueNode(z), w) // z = &w
  451. }
  452. // genBuiltinCall generates contraints for a call to a built-in.
  453. func (a *analysis) genBuiltinCall(instr ssa.CallInstruction, cgn *cgnode) {
  454. call := instr.Common()
  455. switch call.Value.(*ssa.Builtin).Name() {
  456. case "append":
  457. // Safe cast: append cannot appear in a go or defer statement.
  458. a.genAppend(instr.(*ssa.Call), cgn)
  459. case "copy":
  460. tElem := call.Args[0].Type().Underlying().(*types.Slice).Elem()
  461. a.copyElems(cgn, tElem, call.Args[0], call.Args[1])
  462. case "panic":
  463. a.copy(a.panicNode, a.valueNode(call.Args[0]), 1)
  464. case "recover":
  465. if v := instr.Value(); v != nil {
  466. a.copy(a.valueNode(v), a.panicNode, 1)
  467. }
  468. case "print":
  469. // In the tests, the probe might be the sole reference
  470. // to its arg, so make sure we create nodes for it.
  471. if len(call.Args) > 0 {
  472. a.valueNode(call.Args[0])
  473. }
  474. case "ssa:wrapnilchk":
  475. a.copy(a.valueNode(instr.Value()), a.valueNode(call.Args[0]), 1)
  476. default:
  477. // No-ops: close len cap real imag complex print println delete.
  478. }
  479. }
  480. // shouldUseContext defines the context-sensitivity policy. It
  481. // returns true if we should analyse all static calls to fn anew.
  482. //
  483. // Obviously this interface rather limits how much freedom we have to
  484. // choose a policy. The current policy, rather arbitrarily, is true
  485. // for intrinsics and accessor methods (actually: short, single-block,
  486. // call-free functions). This is just a starting point.
  487. //
  488. func (a *analysis) shouldUseContext(fn *ssa.Function) bool {
  489. if a.findIntrinsic(fn) != nil {
  490. return true // treat intrinsics context-sensitively
  491. }
  492. if len(fn.Blocks) != 1 {
  493. return false // too expensive
  494. }
  495. blk := fn.Blocks[0]
  496. if len(blk.Instrs) > 10 {
  497. return false // too expensive
  498. }
  499. if fn.Synthetic != "" && (fn.Pkg == nil || fn != fn.Pkg.Func("init")) {
  500. return true // treat synthetic wrappers context-sensitively
  501. }
  502. for _, instr := range blk.Instrs {
  503. switch instr := instr.(type) {
  504. case ssa.CallInstruction:
  505. // Disallow function calls (except to built-ins)
  506. // because of the danger of unbounded recursion.
  507. if _, ok := instr.Common().Value.(*ssa.Builtin); !ok {
  508. return false
  509. }
  510. }
  511. }
  512. return true
  513. }
  514. // genStaticCall generates constraints for a statically dispatched function call.
  515. func (a *analysis) genStaticCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
  516. fn := call.StaticCallee()
  517. // Special cases for inlined intrinsics.
  518. switch fn {
  519. case a.runtimeSetFinalizer:
  520. // Inline SetFinalizer so the call appears direct.
  521. site.targets = a.addOneNode(tInvalid, "SetFinalizer.targets", nil)
  522. a.addConstraint(&runtimeSetFinalizerConstraint{
  523. targets: site.targets,
  524. x: a.valueNode(call.Args[0]),
  525. f: a.valueNode(call.Args[1]),
  526. })
  527. return
  528. case a.reflectValueCall:
  529. // Inline (reflect.Value).Call so the call appears direct.
  530. dotdotdot := false
  531. ret := reflectCallImpl(a, caller, site, a.valueNode(call.Args[0]), a.valueNode(call.Args[1]), dotdotdot)
  532. if result != 0 {
  533. a.addressOf(fn.Signature.Results().At(0).Type(), result, ret)
  534. }
  535. return
  536. }
  537. // Ascertain the context (contour/cgnode) for a particular call.
  538. var obj nodeid
  539. if a.shouldUseContext(fn) {
  540. obj = a.makeFunctionObject(fn, site) // new contour
  541. } else {
  542. obj = a.objectNode(nil, fn) // shared contour
  543. }
  544. a.callEdge(caller, site, obj)
  545. sig := call.Signature()
  546. // Copy receiver, if any.
  547. params := a.funcParams(obj)
  548. args := call.Args
  549. if sig.Recv() != nil {
  550. sz := a.sizeof(sig.Recv().Type())
  551. a.copy(params, a.valueNode(args[0]), sz)
  552. params += nodeid(sz)
  553. args = args[1:]
  554. }
  555. // Copy actual parameters into formal params block.
  556. // Must loop, since the actuals aren't contiguous.
  557. for i, arg := range args {
  558. sz := a.sizeof(sig.Params().At(i).Type())
  559. a.copy(params, a.valueNode(arg), sz)
  560. params += nodeid(sz)
  561. }
  562. // Copy formal results block to actual result.
  563. if result != 0 {
  564. a.copy(result, a.funcResults(obj), a.sizeof(sig.Results()))
  565. }
  566. }
  567. // genDynamicCall generates constraints for a dynamic function call.
  568. func (a *analysis) genDynamicCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
  569. // pts(targets) will be the set of possible call targets.
  570. site.targets = a.valueNode(call.Value)
  571. // We add dynamic closure rules that store the arguments into
  572. // the P-block and load the results from the R-block of each
  573. // function discovered in pts(targets).
  574. sig := call.Signature()
  575. var offset uint32 = 1 // P/R block starts at offset 1
  576. for i, arg := range call.Args {
  577. sz := a.sizeof(sig.Params().At(i).Type())
  578. a.genStore(caller, call.Value, a.valueNode(arg), offset, sz)
  579. offset += sz
  580. }
  581. if result != 0 {
  582. a.genLoad(caller, result, call.Value, offset, a.sizeof(sig.Results()))
  583. }
  584. }
  585. // genInvoke generates constraints for a dynamic method invocation.
  586. func (a *analysis) genInvoke(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
  587. if call.Value.Type() == a.reflectType {
  588. a.genInvokeReflectType(caller, site, call, result)
  589. return
  590. }
  591. sig := call.Signature()
  592. // Allocate a contiguous targets/params/results block for this call.
  593. block := a.nextNode()
  594. // pts(targets) will be the set of possible call targets
  595. site.targets = a.addOneNode(sig, "invoke.targets", nil)
  596. p := a.addNodes(sig.Params(), "invoke.params")
  597. r := a.addNodes(sig.Results(), "invoke.results")
  598. // Copy the actual parameters into the call's params block.
  599. for i, n := 0, sig.Params().Len(); i < n; i++ {
  600. sz := a.sizeof(sig.Params().At(i).Type())
  601. a.copy(p, a.valueNode(call.Args[i]), sz)
  602. p += nodeid(sz)
  603. }
  604. // Copy the call's results block to the actual results.
  605. if result != 0 {
  606. a.copy(result, r, a.sizeof(sig.Results()))
  607. }
  608. // We add a dynamic invoke constraint that will connect the
  609. // caller's and the callee's P/R blocks for each discovered
  610. // call target.
  611. a.addConstraint(&invokeConstraint{call.Method, a.valueNode(call.Value), block})
  612. }
  613. // genInvokeReflectType is a specialization of genInvoke where the
  614. // receiver type is a reflect.Type, under the assumption that there
  615. // can be at most one implementation of this interface, *reflect.rtype.
  616. //
  617. // (Though this may appear to be an instance of a pattern---method
  618. // calls on interfaces known to have exactly one implementation---in
  619. // practice it occurs rarely, so we special case for reflect.Type.)
  620. //
  621. // In effect we treat this:
  622. // var rt reflect.Type = ...
  623. // rt.F()
  624. // as this:
  625. // rt.(*reflect.rtype).F()
  626. //
  627. func (a *analysis) genInvokeReflectType(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
  628. // Unpack receiver into rtype
  629. rtype := a.addOneNode(a.reflectRtypePtr, "rtype.recv", nil)
  630. recv := a.valueNode(call.Value)
  631. a.typeAssert(a.reflectRtypePtr, rtype, recv, true)
  632. // Look up the concrete method.
  633. fn := a.prog.LookupMethod(a.reflectRtypePtr, call.Method.Pkg(), call.Method.Name())
  634. obj := a.makeFunctionObject(fn, site) // new contour for this call
  635. a.callEdge(caller, site, obj)
  636. // From now on, it's essentially a static call, but little is
  637. // gained by factoring together the code for both cases.
  638. sig := fn.Signature // concrete method
  639. targets := a.addOneNode(sig, "call.targets", nil)
  640. a.addressOf(sig, targets, obj) // (a singleton)
  641. // Copy receiver.
  642. params := a.funcParams(obj)
  643. a.copy(params, rtype, 1)
  644. params++
  645. // Copy actual parameters into formal P-block.
  646. // Must loop, since the actuals aren't contiguous.
  647. for i, arg := range call.Args {
  648. sz := a.sizeof(sig.Params().At(i).Type())
  649. a.copy(params, a.valueNode(arg), sz)
  650. params += nodeid(sz)
  651. }
  652. // Copy formal R-block to actual R-block.
  653. if result != 0 {
  654. a.copy(result, a.funcResults(obj), a.sizeof(sig.Results()))
  655. }
  656. }
  657. // genCall generates constraints for call instruction instr.
  658. func (a *analysis) genCall(caller *cgnode, instr ssa.CallInstruction) {
  659. call := instr.Common()
  660. // Intrinsic implementations of built-in functions.
  661. if _, ok := call.Value.(*ssa.Builtin); ok {
  662. a.genBuiltinCall(instr, caller)
  663. return
  664. }
  665. var result nodeid
  666. if v := instr.Value(); v != nil {
  667. result = a.valueNode(v)
  668. }
  669. site := &callsite{instr: instr}
  670. if call.StaticCallee() != nil {
  671. a.genStaticCall(caller, site, call, result)
  672. } else if call.IsInvoke() {
  673. a.genInvoke(caller, site, call, result)
  674. } else {
  675. a.genDynamicCall(caller, site, call, result)
  676. }
  677. caller.sites = append(caller.sites, site)
  678. if a.log != nil {
  679. // TODO(adonovan): debug: improve log message.
  680. fmt.Fprintf(a.log, "\t%s to targets %s from %s\n", site, site.targets, caller)
  681. }
  682. }
  683. // objectNode returns the object to which v points, if known.
  684. // In other words, if the points-to set of v is a singleton, it
  685. // returns the sole label, zero otherwise.
  686. //
  687. // We exploit this information to make the generated constraints less
  688. // dynamic. For example, a complex load constraint can be replaced by
  689. // a simple copy constraint when the sole destination is known a priori.
  690. //
  691. // Some SSA instructions always have singletons points-to sets:
  692. // Alloc, Function, Global, MakeChan, MakeClosure, MakeInterface, MakeMap, MakeSlice.
  693. // Others may be singletons depending on their operands:
  694. // FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice.
  695. //
  696. // Idempotent. Objects are created as needed, possibly via recursion
  697. // down the SSA value graph, e.g IndexAddr(FieldAddr(Alloc))).
  698. //
  699. func (a *analysis) objectNode(cgn *cgnode, v ssa.Value) nodeid {
  700. switch v.(type) {
  701. case *ssa.Global, *ssa.Function, *ssa.Const, *ssa.FreeVar:
  702. // Global object.
  703. obj, ok := a.globalobj[v]
  704. if !ok {
  705. switch v := v.(type) {
  706. case *ssa.Global:
  707. obj = a.nextNode()
  708. a.addNodes(mustDeref(v.Type()), "global")
  709. a.endObject(obj, nil, v)
  710. case *ssa.Function:
  711. obj = a.makeFunctionObject(v, nil)
  712. case *ssa.Const:
  713. // not addressable
  714. case *ssa.FreeVar:
  715. // not addressable
  716. }
  717. if a.log != nil {
  718. fmt.Fprintf(a.log, "\tglobalobj[%s] = n%d\n", v, obj)
  719. }
  720. a.globalobj[v] = obj
  721. }
  722. return obj
  723. }
  724. // Local object.
  725. obj, ok := a.localobj[v]
  726. if !ok {
  727. switch v := v.(type) {
  728. case *ssa.Alloc:
  729. obj = a.nextNode()
  730. a.addNodes(mustDeref(v.Type()), "alloc")
  731. a.endObject(obj, cgn, v)
  732. case *ssa.MakeSlice:
  733. obj = a.nextNode()
  734. a.addNodes(sliceToArray(v.Type()), "makeslice")
  735. a.endObject(obj, cgn, v)
  736. case *ssa.MakeChan:
  737. obj = a.nextNode()
  738. a.addNodes(v.Type().Underlying().(*types.Chan).Elem(), "makechan")
  739. a.endObject(obj, cgn, v)
  740. case *ssa.MakeMap:
  741. obj = a.nextNode()
  742. tmap := v.Type().Underlying().(*types.Map)
  743. a.addNodes(tmap.Key(), "makemap.key")
  744. elem := a.addNodes(tmap.Elem(), "makemap.value")
  745. // To update the value field, MapUpdate
  746. // generates store-with-offset constraints which
  747. // the presolver can't model, so we must mark
  748. // those nodes indirect.
  749. for id, end := elem, elem+nodeid(a.sizeof(tmap.Elem())); id < end; id++ {
  750. a.mapValues = append(a.mapValues, id)
  751. }
  752. a.endObject(obj, cgn, v)
  753. case *ssa.MakeInterface:
  754. tConc := v.X.Type()
  755. obj = a.makeTagged(tConc, cgn, v)
  756. // Copy the value into it, if nontrivial.
  757. if x := a.valueNode(v.X); x != 0 {
  758. a.copy(obj+1, x, a.sizeof(tConc))
  759. }
  760. case *ssa.FieldAddr:
  761. if xobj := a.objectNode(cgn, v.X); xobj != 0 {
  762. obj = xobj + nodeid(a.offsetOf(mustDeref(v.X.Type()), v.Field))
  763. }
  764. case *ssa.IndexAddr:
  765. if xobj := a.objectNode(cgn, v.X); xobj != 0 {
  766. obj = xobj + 1
  767. }
  768. case *ssa.Slice:
  769. obj = a.objectNode(cgn, v.X)
  770. case *ssa.Convert:
  771. // TODO(adonovan): opt: handle these cases too:
  772. // - unsafe.Pointer->*T conversion acts like Alloc
  773. // - string->[]byte/[]rune conversion acts like MakeSlice
  774. }
  775. if a.log != nil {
  776. fmt.Fprintf(a.log, "\tlocalobj[%s] = n%d\n", v.Name(), obj)
  777. }
  778. a.localobj[v] = obj
  779. }
  780. return obj
  781. }
  782. // genLoad generates constraints for result = *(ptr + val).
  783. func (a *analysis) genLoad(cgn *cgnode, result nodeid, ptr ssa.Value, offset, sizeof uint32) {
  784. if obj := a.objectNode(cgn, ptr); obj != 0 {
  785. // Pre-apply loadConstraint.solve().
  786. a.copy(result, obj+nodeid(offset), sizeof)
  787. } else {
  788. a.load(result, a.valueNode(ptr), offset, sizeof)
  789. }
  790. }
  791. // genOffsetAddr generates constraints for a 'v=ptr.field' (FieldAddr)
  792. // or 'v=ptr[*]' (IndexAddr) instruction v.
  793. func (a *analysis) genOffsetAddr(cgn *cgnode, v ssa.Value, ptr nodeid, offset uint32) {
  794. dst := a.valueNode(v)
  795. if obj := a.objectNode(cgn, v); obj != 0 {
  796. // Pre-apply offsetAddrConstraint.solve().
  797. a.addressOf(v.Type(), dst, obj)
  798. } else {
  799. a.offsetAddr(v.Type(), dst, ptr, offset)
  800. }
  801. }
  802. // genStore generates constraints for *(ptr + offset) = val.
  803. func (a *analysis) genStore(cgn *cgnode, ptr ssa.Value, val nodeid, offset, sizeof uint32) {
  804. if obj := a.objectNode(cgn, ptr); obj != 0 {
  805. // Pre-apply storeConstraint.solve().
  806. a.copy(obj+nodeid(offset), val, sizeof)
  807. } else {
  808. a.store(a.valueNode(ptr), val, offset, sizeof)
  809. }
  810. }
  811. // genInstr generates constraints for instruction instr in context cgn.
  812. func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) {
  813. if a.log != nil {
  814. var prefix string
  815. if val, ok := instr.(ssa.Value); ok {
  816. prefix = val.Name() + " = "
  817. }
  818. fmt.Fprintf(a.log, "; %s%s\n", prefix, instr)
  819. }
  820. switch instr := instr.(type) {
  821. case *ssa.DebugRef:
  822. // no-op.
  823. case *ssa.UnOp:
  824. switch instr.Op {
  825. case token.ARROW: // <-x
  826. // We can ignore instr.CommaOk because the node we're
  827. // altering is always at zero offset relative to instr
  828. tElem := instr.X.Type().Underlying().(*types.Chan).Elem()
  829. a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(tElem))
  830. case token.MUL: // *x
  831. a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(instr.Type()))
  832. default:
  833. // NOT, SUB, XOR: no-op.
  834. }
  835. case *ssa.BinOp:
  836. // All no-ops.
  837. case ssa.CallInstruction: // *ssa.Call, *ssa.Go, *ssa.Defer
  838. a.genCall(cgn, instr)
  839. case *ssa.ChangeType:
  840. a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
  841. case *ssa.Convert:
  842. a.genConv(instr, cgn)
  843. case *ssa.Extract:
  844. a.copy(a.valueNode(instr),
  845. a.valueOffsetNode(instr.Tuple, instr.Index),
  846. a.sizeof(instr.Type()))
  847. case *ssa.FieldAddr:
  848. a.genOffsetAddr(cgn, instr, a.valueNode(instr.X),
  849. a.offsetOf(mustDeref(instr.X.Type()), instr.Field))
  850. case *ssa.IndexAddr:
  851. a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), 1)
  852. case *ssa.Field:
  853. a.copy(a.valueNode(instr),
  854. a.valueOffsetNode(instr.X, instr.Field),
  855. a.sizeof(instr.Type()))
  856. case *ssa.Index:
  857. a.copy(a.valueNode(instr), 1+a.valueNode(instr.X), a.sizeof(instr.Type()))
  858. case *ssa.Select:
  859. recv := a.valueOffsetNode(instr, 2) // instr : (index, recvOk, recv0, ... recv_n-1)
  860. for _, st := range instr.States {
  861. elemSize := a.sizeof(st.Chan.Type().Underlying().(*types.Chan).Elem())
  862. switch st.Dir {
  863. case types.RecvOnly:
  864. a.genLoad(cgn, recv, st.Chan, 0, elemSize)
  865. recv += nodeid(elemSize)
  866. case types.SendOnly:
  867. a.genStore(cgn, st.Chan, a.valueNode(st.Send), 0, elemSize)
  868. }
  869. }
  870. case *ssa.Return:
  871. results := a.funcResults(cgn.obj)
  872. for _, r := range instr.Results {
  873. sz := a.sizeof(r.Type())
  874. a.copy(results, a.valueNode(r), sz)
  875. results += nodeid(sz)
  876. }
  877. case *ssa.Send:
  878. a.genStore(cgn, instr.Chan, a.valueNode(instr.X), 0, a.sizeof(instr.X.Type()))
  879. case *ssa.Store:
  880. a.genStore(cgn, instr.Addr, a.valueNode(instr.Val), 0, a.sizeof(instr.Val.Type()))
  881. case *ssa.Alloc, *ssa.MakeSlice, *ssa.MakeChan, *ssa.MakeMap, *ssa.MakeInterface:
  882. v := instr.(ssa.Value)
  883. a.addressOf(v.Type(), a.valueNode(v), a.objectNode(cgn, v))
  884. case *ssa.ChangeInterface:
  885. a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
  886. case *ssa.TypeAssert:
  887. a.typeAssert(instr.AssertedType, a.valueNode(instr), a.valueNode(instr.X), true)
  888. case *ssa.Slice:
  889. a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
  890. case *ssa.If, *ssa.Jump:
  891. // no-op.
  892. case *ssa.Phi:
  893. sz := a.sizeof(instr.Type())
  894. for _, e := range instr.Edges {
  895. a.copy(a.valueNode(instr), a.valueNode(e), sz)
  896. }
  897. case *ssa.MakeClosure:
  898. fn := instr.Fn.(*ssa.Function)
  899. a.copy(a.valueNode(instr), a.valueNode(fn), 1)
  900. // Free variables are treated like global variables.
  901. for i, b := range instr.Bindings {
  902. a.copy(a.valueNode(fn.FreeVars[i]), a.valueNode(b), a.sizeof(b.Type()))
  903. }
  904. case *ssa.RunDefers:
  905. // The analysis is flow insensitive, so we just "call"
  906. // defers as we encounter them.
  907. case *ssa.Range:
  908. // Do nothing. Next{Iter: *ssa.Range} handles this case.
  909. case *ssa.Next:
  910. if !instr.IsString { // map
  911. // Assumes that Next is always directly applied to a Range result.
  912. theMap := instr.Iter.(*ssa.Range).X
  913. tMap := theMap.Type().Underlying().(*types.Map)
  914. ksize := a.sizeof(tMap.Key())
  915. vsize := a.sizeof(tMap.Elem())
  916. // The k/v components of the Next tuple may each be invalid.
  917. tTuple := instr.Type().(*types.Tuple)
  918. // Load from the map's (k,v) into the tuple's (ok, k, v).
  919. osrc := uint32(0) // offset within map object
  920. odst := uint32(1) // offset within tuple (initially just after 'ok bool')
  921. sz := uint32(0) // amount to copy
  922. // Is key valid?
  923. if tTuple.At(1).Type() != tInvalid {
  924. sz += ksize
  925. } else {
  926. odst += ksize
  927. osrc += ksize
  928. }
  929. // Is value valid?
  930. if tTuple.At(2).Type() != tInvalid {
  931. sz += vsize
  932. }
  933. a.genLoad(cgn, a.valueNode(instr)+nodeid(odst), theMap, osrc, sz)
  934. }
  935. case *ssa.Lookup:
  936. if tMap, ok := instr.X.Type().Underlying().(*types.Map); ok {
  937. // CommaOk can be ignored: field 0 is a no-op.
  938. ksize := a.sizeof(tMap.Key())
  939. vsize := a.sizeof(tMap.Elem())
  940. a.genLoad(cgn, a.valueNode(instr), instr.X, ksize, vsize)
  941. }
  942. case *ssa.MapUpdate:
  943. tmap := instr.Map.Type().Underlying().(*types.Map)
  944. ksize := a.sizeof(tmap.Key())
  945. vsize := a.sizeof(tmap.Elem())
  946. a.genStore(cgn, instr.Map, a.valueNode(instr.Key), 0, ksize)
  947. a.genStore(cgn, instr.Map, a.valueNode(instr.Value), ksize, vsize)
  948. case *ssa.Panic:
  949. a.copy(a.panicNode, a.valueNode(instr.X), 1)
  950. default:
  951. panic(fmt.Sprintf("unimplemented: %T", instr))
  952. }
  953. }
  954. func (a *analysis) makeCGNode(fn *ssa.Function, obj nodeid, callersite *callsite) *cgnode {
  955. cgn := &cgnode{fn: fn, obj: obj, callersite: callersite}
  956. a.cgnodes = append(a.cgnodes, cgn)
  957. return cgn
  958. }
  959. // genRootCalls generates the synthetic root of the callgraph and the
  960. // initial calls from it to the analysis scope, such as main, a test
  961. // or a library.
  962. //
  963. func (a *analysis) genRootCalls() *cgnode {
  964. r := a.prog.NewFunction("<root>", new(types.Signature), "root of callgraph")
  965. root := a.makeCGNode(r, 0, nil)
  966. // TODO(adonovan): make an ssa utility to construct an actual
  967. // root function so we don't need to special-case site-less
  968. // call edges.
  969. // For each main package, call main.init(), main.main().
  970. for _, mainPkg := range a.config.Mains {
  971. main := mainPkg.Func("main")
  972. if main == nil {
  973. panic(fmt.Sprintf("%s has no main function", mainPkg))
  974. }
  975. targets := a.addOneNode(main.Signature, "root.targets", nil)
  976. site := &callsite{targets: targets}
  977. root.sites = append(root.sites, site)
  978. for _, fn := range [2]*ssa.Function{mainPkg.Func("init"), main} {
  979. if a.log != nil {
  980. fmt.Fprintf(a.log, "\troot call to %s:\n", fn)
  981. }
  982. a.copy(targets, a.valueNode(fn), 1)
  983. }
  984. }
  985. return root
  986. }
  987. // genFunc generates constraints for function fn.
  988. func (a *analysis) genFunc(cgn *cgnode) {
  989. fn := cgn.fn
  990. impl := a.findIntrinsic(fn)
  991. if a.log != nil {
  992. fmt.Fprintf(a.log, "\n\n==== Generating constraints for %s, %s\n", cgn, cgn.contour())
  993. // Hack: don't display body if intrinsic.
  994. if impl != nil {
  995. fn2 := *cgn.fn // copy
  996. fn2.Locals = nil
  997. fn2.Blocks = nil
  998. fn2.WriteTo(a.log)
  999. } else {
  1000. cgn.fn.WriteTo(a.log)
  1001. }
  1002. }
  1003. if impl != nil {
  1004. impl(a, cgn)
  1005. return
  1006. }
  1007. if fn.Blocks == nil {
  1008. // External function with no intrinsic treatment.
  1009. // We'll warn about calls to such functions at the end.
  1010. return
  1011. }
  1012. if a.log != nil {
  1013. fmt.Fprintln(a.log, "; Creating nodes for local values")
  1014. }
  1015. a.localval = make(map[ssa.Value]nodeid)
  1016. a.localobj = make(map[ssa.Value]nodeid)
  1017. // The value nodes for the params are in the func object block.
  1018. params := a.funcParams(cgn.obj)
  1019. for _, p := range fn.Params {
  1020. a.setValueNode(p, params, cgn)
  1021. params += nodeid(a.sizeof(p.Type()))
  1022. }
  1023. // Free variables have global cardinality:
  1024. // the outer function sets them with MakeClosure;
  1025. // the inner function accesses them with FreeVar.
  1026. //
  1027. // TODO(adonovan): treat free vars context-sensitively.
  1028. // Create value nodes for all value instructions
  1029. // since SSA may contain forward references.
  1030. var space [10]*ssa.Value
  1031. for _, b := range fn.Blocks {
  1032. for _, instr := range b.Instrs {
  1033. switch instr := instr.(type) {
  1034. case *ssa.Range:
  1035. // do nothing: it has a funky type,
  1036. // and *ssa.Next does all the work.
  1037. case ssa.Value:
  1038. var comment string
  1039. if a.log != nil {
  1040. comment = instr.Name()
  1041. }
  1042. id := a.addNodes(instr.Type(), comment)
  1043. a.setValueNode(instr, id, cgn)
  1044. }
  1045. // Record all address-taken functions (for presolver).
  1046. rands := instr.Operands(space[:0])
  1047. if call, ok := instr.(ssa.CallInstruction); ok && !call.Common().IsInvoke() {
  1048. // Skip CallCommon.Value in "call" mode.
  1049. // TODO(adonovan): fix: relies on unspecified ordering. Specify it.
  1050. rands = rands[1:]
  1051. }
  1052. for _, rand := range rands {
  1053. if atf, ok := (*rand).(*ssa.Function); ok {
  1054. a.atFuncs[atf] = true
  1055. }
  1056. }
  1057. }
  1058. }
  1059. // Generate constraints for instructions.
  1060. for _, b := range fn.Blocks {
  1061. for _, instr := range b.Instrs {
  1062. a.genInstr(cgn, instr)
  1063. }
  1064. }
  1065. a.localval = nil
  1066. a.localobj = nil
  1067. }
  1068. // genMethodsOf generates nodes and constraints for all methods of type T.
  1069. func (a *analysis) genMethodsOf(T types.Type) {
  1070. itf := isInterface(T)
  1071. // TODO(adonovan): can we skip this entirely if itf is true?
  1072. // I think so, but the answer may depend on reflection.
  1073. mset := a.prog.MethodSets.MethodSet(T)
  1074. for i, n := 0, mset.Len(); i < n; i++ {
  1075. m := a.prog.MethodValue(mset.At(i))
  1076. a.valueNode(m)
  1077. if !itf {
  1078. // Methods of concrete types are address-taken functions.
  1079. a.atFuncs[m] = true
  1080. }
  1081. }
  1082. }
  1083. // generate generates offline constraints for the entire program.
  1084. func (a *analysis) generate() {
  1085. start("Constraint generation")
  1086. if a.log != nil {
  1087. fmt.Fprintln(a.log, "==== Generating constraints")
  1088. }
  1089. // Create a dummy node since we use the nodeid 0 for
  1090. // non-pointerlike variables.
  1091. a.addNodes(tInvalid, "(zero)")
  1092. // Create the global node for panic values.
  1093. a.panicNode = a.addNodes(tEface, "panic")
  1094. // Create nodes and constraints for all methods of reflect.rtype.
  1095. // (Shared contours are used by dynamic calls to reflect.Type
  1096. // methods---typically just String().)
  1097. if rtype := a.reflectRtypePtr; rtype != nil {
  1098. a.genMethodsOf(rtype)
  1099. }
  1100. root := a.genRootCalls()
  1101. if a.config.BuildCallGraph {
  1102. a.result.CallGraph = callgraph.New(root.fn)
  1103. }
  1104. // Create nodes and constraints for all methods of all types
  1105. // that are dynamically accessible via reflection or interfaces.
  1106. for _, T := range a.prog.RuntimeTypes() {
  1107. a.genMethodsOf(T)
  1108. }
  1109. // Generate constraints for functions as they become reachable
  1110. // from the roots. (No constraints are generated for functions
  1111. // that are dead in this analysis scope.)
  1112. for len(a.genq) > 0 {
  1113. cgn := a.genq[0]
  1114. a.genq = a.genq[1:]
  1115. a.genFunc(cgn)
  1116. }
  1117. // The runtime magically allocates os.Args; so should we.
  1118. if os := a.prog.ImportedPackage("os"); os != nil {
  1119. // In effect: os.Args = new([1]string)[:]
  1120. T := types.NewSlice(types.Typ[types.String])
  1121. obj := a.addNodes(sliceToArray(T), "<command-line args>")
  1122. a.endObject(obj, nil, "<command-line args>")
  1123. a.addressOf(T, a.objectNode(nil, os.Var("Args")), obj)
  1124. }
  1125. // Discard generation state, to avoid confusion after node renumbering.
  1126. a.panicNode = 0
  1127. a.globalval = nil
  1128. a.localval = nil
  1129. a.localobj = nil
  1130. stop("Constraint generation")
  1131. }