interpreter.go raw

   1  package interp
   2  
   3  import (
   4  	"errors"
   5  	"fmt"
   6  	"math"
   7  	"os"
   8  	"strconv"
   9  	"strings"
  10  	"time"
  11  
  12  	"tinygo.org/x/go-llvm"
  13  )
  14  
  15  func (r *runner) run(fn *function, params []value, parentMem *memoryView, indent string) (value, memoryView, *Error) {
  16  	mem := memoryView{r: r, parent: parentMem}
  17  	locals := make([]value, len(fn.locals))
  18  	r.callsExecuted++
  19  
  20  	// Parameters are considered a kind of local values.
  21  	for i, param := range params {
  22  		locals[i] = param
  23  	}
  24  
  25  	// Track what blocks have run instructions at runtime.
  26  	// This is used to prevent unrolling.
  27  	var runtimeBlocks map[int]struct{}
  28  
  29  	// Start with the first basic block and the first instruction.
  30  	// Branch instructions may modify both bb and instIndex when branching.
  31  	bb := fn.blocks[0]
  32  	currentBB := 0
  33  	lastBB := -1 // last basic block is undefined, only defined after a branch
  34  	var operands []value
  35  	startRTInsts := len(mem.instructions)
  36  	for instIndex := 0; instIndex < len(bb.instructions); instIndex++ {
  37  		if instIndex == 0 {
  38  			// This is the start of a new basic block.
  39  			if len(mem.instructions) != startRTInsts {
  40  				if _, ok := runtimeBlocks[lastBB]; ok {
  41  					// This loop has been unrolled.
  42  					// Avoid doing this, as it can result in a large amount of extra machine code.
  43  					// This currently uses the branch from the last block, as there is no available information to give a better location.
  44  					lastBBInsts := fn.blocks[lastBB].instructions
  45  					return nil, mem, r.errorAt(lastBBInsts[len(lastBBInsts)-1], errLoopUnrolled)
  46  				}
  47  
  48  				// Flag the last block as having run stuff at runtime.
  49  				if runtimeBlocks == nil {
  50  					runtimeBlocks = make(map[int]struct{})
  51  				}
  52  				runtimeBlocks[lastBB] = struct{}{}
  53  
  54  				// Reset the block-start runtime instructions counter.
  55  				startRTInsts = len(mem.instructions)
  56  			}
  57  
  58  			// There may be PHI nodes that need to be resolved. Resolve all PHI
  59  			// nodes before continuing with regular instructions.
  60  			// PHI nodes need to be treated specially because they can have a
  61  			// mutual dependency:
  62  			//   for.loop:
  63  			//     %a = phi i8 [ 1, %entry ], [ %b, %for.loop ]
  64  			//     %b = phi i8 [ 3, %entry ], [ %a, %for.loop ]
  65  			// If these PHI nodes are processed like a regular instruction, %a
  66  			// and %b are both 3 on the second iteration of the loop because %b
  67  			// loads the value of %a from the second iteration, while it should
  68  			// load the value from the previous iteration. The correct behavior
  69  			// is that these two values swap each others place on each
  70  			// iteration.
  71  			var phiValues []value
  72  			var phiIndices []int
  73  			for _, inst := range bb.phiNodes {
  74  				var result value
  75  				for i := 0; i < len(inst.operands); i += 2 {
  76  					if int(inst.operands[i].(literalValue).value.(uint32)) == lastBB {
  77  						incoming := inst.operands[i+1]
  78  						if local, ok := incoming.(localValue); ok {
  79  							result = locals[fn.locals[local.value]]
  80  						} else {
  81  							result = incoming
  82  						}
  83  						break
  84  					}
  85  				}
  86  				if r.debug {
  87  					fmt.Fprintln(os.Stderr, indent+"phi", inst.operands, "->", result)
  88  				}
  89  				if result == nil {
  90  					panic("could not find PHI input")
  91  				}
  92  				phiValues = append(phiValues, result)
  93  				phiIndices = append(phiIndices, inst.localIndex)
  94  			}
  95  			for i, value := range phiValues {
  96  				locals[phiIndices[i]] = value
  97  			}
  98  		}
  99  
 100  		inst := bb.instructions[instIndex]
 101  		operands = operands[:0]
 102  		isRuntimeInst := false
 103  		if inst.opcode != llvm.PHI {
 104  			for _, v := range inst.operands {
 105  				if v, ok := v.(localValue); ok {
 106  					index, ok := fn.locals[v.value]
 107  					if !ok {
 108  						// This is a localValue that is not local to the
 109  						// function. An example would be an inline assembly call
 110  						// operand.
 111  						isRuntimeInst = true
 112  						break
 113  					}
 114  					localVal := locals[index]
 115  					if localVal == nil {
 116  						// Trying to read a function-local value before it is
 117  						// set.
 118  						return nil, mem, r.errorAt(inst, errors.New("interp: local not defined"))
 119  					} else {
 120  						operands = append(operands, localVal)
 121  						if _, ok := localVal.(localValue); ok {
 122  							// The function-local value is still just a
 123  							// localValue (which can't be interpreted at compile
 124  							// time). Not sure whether this ever happens in
 125  							// practice.
 126  							isRuntimeInst = true
 127  							break
 128  						}
 129  						continue
 130  					}
 131  				}
 132  				operands = append(operands, v)
 133  			}
 134  		}
 135  		if isRuntimeInst {
 136  			err := r.runAtRuntime(fn, inst, locals, &mem, indent)
 137  			if err != nil {
 138  				return nil, mem, err
 139  			}
 140  			continue
 141  		}
 142  		switch inst.opcode {
 143  		case llvm.Ret:
 144  			if time.Since(r.start) > r.timeout {
 145  				// Running for more than the allowed timeout; This shouldn't happen, but it does.
 146  				// See moxie/issues/2124
 147  				return nil, mem, r.errorAt(fn.blocks[0].instructions[0], fmt.Errorf("interp: running for more than %s, timing out (executed calls: %d)", r.timeout, r.callsExecuted))
 148  			}
 149  
 150  			if len(operands) != 0 {
 151  				if r.debug {
 152  					fmt.Fprintln(os.Stderr, indent+"ret", operands[0])
 153  				}
 154  				// Return instruction has a value to return.
 155  				return operands[0], mem, nil
 156  			}
 157  			if r.debug {
 158  				fmt.Fprintln(os.Stderr, indent+"ret")
 159  			}
 160  			// Return instruction doesn't return anything, it's just 'ret void'.
 161  			return nil, mem, nil
 162  		case llvm.Br:
 163  			switch len(operands) {
 164  			case 1:
 165  				// Unconditional branch: [nextBB]
 166  				lastBB = currentBB
 167  				currentBB = int(operands[0].(literalValue).value.(uint32))
 168  				bb = fn.blocks[currentBB]
 169  				instIndex = -1 // start at 0 the next cycle
 170  				if r.debug {
 171  					fmt.Fprintln(os.Stderr, indent+"br", operands, "->", currentBB)
 172  				}
 173  			case 3:
 174  				// Conditional branch: [cond, thenBB, elseBB]
 175  				lastBB = currentBB
 176  				switch operands[0].Uint(r) {
 177  				case 1: // true -> thenBB
 178  					currentBB = int(operands[1].(literalValue).value.(uint32))
 179  				case 0: // false -> elseBB
 180  					currentBB = int(operands[2].(literalValue).value.(uint32))
 181  				default:
 182  					panic("bool should be 0 or 1")
 183  				}
 184  				if r.debug {
 185  					fmt.Fprintln(os.Stderr, indent+"br", operands, "->", currentBB)
 186  				}
 187  				bb = fn.blocks[currentBB]
 188  				instIndex = -1 // start at 0 the next cycle
 189  			default:
 190  				panic("unknown operands length")
 191  			}
 192  		case llvm.Switch:
 193  			// Switch statement: [value, defaultLabel, case0, label0, case1, label1, ...]
 194  			value := operands[0].Uint(r)
 195  			targetLabel := operands[1].Uint(r) // default label
 196  			// Do a lazy switch by iterating over all cases.
 197  			for i := 2; i < len(operands); i += 2 {
 198  				if value == operands[i].Uint(r) {
 199  					targetLabel = operands[i+1].Uint(r)
 200  					break
 201  				}
 202  			}
 203  			lastBB = currentBB
 204  			currentBB = int(targetLabel)
 205  			bb = fn.blocks[currentBB]
 206  			instIndex = -1 // start at 0 the next cycle
 207  			if r.debug {
 208  				fmt.Fprintln(os.Stderr, indent+"switch", operands, "->", currentBB)
 209  			}
 210  		case llvm.Select:
 211  			// Select is much like a ternary operator: it picks a result from
 212  			// the second and third operand based on the boolean first operand.
 213  			var result value
 214  			switch operands[0].Uint(r) {
 215  			case 1:
 216  				result = operands[1]
 217  			case 0:
 218  				result = operands[2]
 219  			default:
 220  				panic("boolean must be 0 or 1")
 221  			}
 222  			locals[inst.localIndex] = result
 223  			if r.debug {
 224  				fmt.Fprintln(os.Stderr, indent+"select", operands, "->", result)
 225  			}
 226  		case llvm.Call:
 227  			// A call instruction can either be a regular call or a runtime intrinsic.
 228  			fnPtr, err := operands[0].asPointer(r)
 229  			if err != nil {
 230  				return nil, mem, r.errorAt(inst, err)
 231  			}
 232  			callFn := r.getFunction(fnPtr.llvmValue(&mem))
 233  			switch {
 234  			case callFn.name == "runtime.trackPointer":
 235  				// Allocas and such are created as globals, so don't need a
 236  				// runtime.trackPointer.
 237  				// Unless the object is allocated at runtime for example, in
 238  				// which case this call won't even get to this point but will
 239  				// already be emitted in initAll.
 240  				continue
 241  			case strings.HasPrefix(callFn.name, "runtime.print") || callFn.name == "runtime._panic" || callFn.name == "runtime.hashmapGet" || callFn.name == "runtime.hashmapInterfaceHash" ||
 242  				callFn.name == "os.runtime_args" || callFn.name == "internal/task.start" || callFn.name == "internal/task.Current" ||
 243  				callFn.name == "time.startTimer" || callFn.name == "time.stopTimer" || callFn.name == "time.resetTimer":
 244  				// These functions should be run at runtime. Specifically:
 245  				//   * Print and panic functions are best emitted directly without
 246  				//     interpreting them, otherwise we get a ton of putchar (etc.)
 247  				//     calls.
 248  				//   * runtime.hashmapGet tries to access the map value directly.
 249  				//     This is not possible as the map value is treated as a special
 250  				//     kind of object in this package.
 251  				//   * os.runtime_args reads globals that are initialized outside
 252  				//     the view of the interp package so it always needs to be run
 253  				//     at runtime.
 254  				//   * internal/task.start, internal/task.Current: start and read shcheduler state,
 255  				//     which is modified elsewhere.
 256  				//   * Timer functions access runtime internal state which may
 257  				//     not be initialized.
 258  				err := r.runAtRuntime(fn, inst, locals, &mem, indent)
 259  				if err != nil {
 260  					return nil, mem, err
 261  				}
 262  			case callFn.name == "internal/task.Pause":
 263  				// Task scheduling isn't possible at compile time.
 264  				return nil, mem, r.errorAt(inst, errUnsupportedRuntimeInst)
 265  			case callFn.name == "runtime.nanotime" && r.pkgName == "time":
 266  				// The time package contains a call to runtime.nanotime.
 267  				// This appears to be to work around a limitation in Windows
 268  				// Server 2008:
 269  				//   > Monotonic times are reported as offsets from startNano.
 270  				//   > We initialize startNano to runtimeNano() - 1 so that on systems where
 271  				//   > monotonic time resolution is fairly low (e.g. Windows 2008
 272  				//   > which appears to have a default resolution of 15ms),
 273  				//   > we avoid ever reporting a monotonic time of 0.
 274  				//   > (Callers may want to use 0 as "time not set".)
 275  				// Simply let runtime.nanotime return 0 in this case, which
 276  				// should be fine and avoids a call to runtime.nanotime. It
 277  				// means that monotonic time in the time package is counted from
 278  				// time.Time{}.Sub(1), which should be fine.
 279  				locals[inst.localIndex] = literalValue{uint64(0)}
 280  			case callFn.name == "runtime.alloc":
 281  				// Allocate heap memory. At compile time, this is instead done
 282  				// by creating a global variable.
 283  
 284  				// Get the requested memory size to be allocated.
 285  				size := operands[1].Uint(r)
 286  
 287  				// Get the object layout, if it is available.
 288  				llvmLayoutType := r.getLLVMTypeFromLayout(operands[2])
 289  
 290  				// Get the alignment of the memory to be allocated.
 291  				alignment := 0 // use default alignment if unset
 292  				alignAttr := inst.llvmInst.GetCallSiteEnumAttribute(0, llvm.AttributeKindID("align"))
 293  				if !alignAttr.IsNil() {
 294  					alignment = int(alignAttr.GetEnumValue())
 295  				}
 296  
 297  				// Create the object.
 298  				alloc := object{
 299  					globalName:     r.pkgName + "$alloc",
 300  					align:          alignment,
 301  					llvmLayoutType: llvmLayoutType,
 302  					buffer:         newRawValue(uint32(size)),
 303  					size:           uint32(size),
 304  				}
 305  				index := len(r.objects)
 306  				r.objects = append(r.objects, alloc)
 307  
 308  				// And create a pointer to this object, for working with it (so
 309  				// that stores to it copy it, etc).
 310  				ptr := newPointerValue(r, index, 0)
 311  				if r.debug {
 312  					fmt.Fprintln(os.Stderr, indent+"runtime.alloc:", size, "->", ptr)
 313  				}
 314  				locals[inst.localIndex] = ptr
 315  			case callFn.name == "runtime.sliceCopy":
 316  				// sliceCopy implements the built-in copy function for slices.
 317  				// It is implemented here so that it can be used even if the
 318  				// runtime implementation is not available. Doing it this way
 319  				// may also be faster.
 320  				// Code:
 321  				// func sliceCopy(dst, src unsafe.Pointer, dstLen, srcLen uintptr, elemSize uintptr) int {
 322  				//     n := srcLen
 323  				//     if n > dstLen {
 324  				//         n = dstLen
 325  				//     }
 326  				//     memmove(dst, src, n*elemSize)
 327  				//     return int(n)
 328  				// }
 329  				dstLen := operands[3].Uint(r)
 330  				srcLen := operands[4].Uint(r)
 331  				elemSize := operands[5].Uint(r)
 332  				n := srcLen
 333  				if n > dstLen {
 334  					n = dstLen
 335  				}
 336  				if r.debug {
 337  					fmt.Fprintln(os.Stderr, indent+"copy:", operands[1], operands[2], n)
 338  				}
 339  				if n != 0 {
 340  					// Only try to copy bytes when there are any bytes to copy.
 341  					// This is not just an optimization. If one of the slices
 342  					// (or both) are nil, the asPointer method call will fail
 343  					// even though copying a nil slice is allowed.
 344  					dst, err := operands[1].asPointer(r)
 345  					if err != nil {
 346  						return nil, mem, r.errorAt(inst, err)
 347  					}
 348  					src, err := operands[2].asPointer(r)
 349  					if err != nil {
 350  						return nil, mem, r.errorAt(inst, err)
 351  					}
 352  					if mem.hasExternalStore(src) || mem.hasExternalLoadOrStore(dst) {
 353  						// These are the same checks as there are on llvm.Load
 354  						// and llvm.Store in the interpreter. Copying is
 355  						// essentially loading from the source array and storing
 356  						// to the destination array, hence why we need to do the
 357  						// same checks here.
 358  						// This fixes the following bug:
 359  						// https://moxie/issues/3890
 360  						err := r.runAtRuntime(fn, inst, locals, &mem, indent)
 361  						if err != nil {
 362  							return nil, mem, err
 363  						}
 364  						continue
 365  					}
 366  					nBytes := uint32(n * elemSize)
 367  					srcObj := mem.get(src.index())
 368  					dstObj := mem.getWritable(dst.index())
 369  					if srcObj.buffer == nil || dstObj.buffer == nil {
 370  						// If the buffer is nil, it means the slice is external.
 371  						// This can happen for example when copying data out of
 372  						// a //go:embed slice, which is not available at interp
 373  						// time.
 374  						// See: https://moxie/issues/4895
 375  						err := r.runAtRuntime(fn, inst, locals, &mem, indent)
 376  						if err != nil {
 377  							return nil, mem, err
 378  						}
 379  						continue
 380  					}
 381  					dstBuf := dstObj.buffer.asRawValue(r)
 382  					srcBuf := srcObj.buffer.asRawValue(r)
 383  					copy(dstBuf.buf[dst.offset():dst.offset()+nBytes], srcBuf.buf[src.offset():])
 384  					dstObj.buffer = dstBuf
 385  					mem.put(dst.index(), dstObj)
 386  				}
 387  				locals[inst.localIndex] = makeLiteralInt(n, inst.llvmInst.Type().IntTypeWidth())
 388  			case strings.HasPrefix(callFn.name, "llvm.memcpy.p0") || strings.HasPrefix(callFn.name, "llvm.memmove.p0"):
 389  				// Copy a block of memory from one pointer to another.
 390  				dst, err := operands[1].asPointer(r)
 391  				if err != nil {
 392  					return nil, mem, r.errorAt(inst, err)
 393  				}
 394  				src, err := operands[2].asPointer(r)
 395  				if err != nil {
 396  					return nil, mem, r.errorAt(inst, err)
 397  				}
 398  				nBytes := uint32(operands[3].Uint(r))
 399  				dstObj := mem.getWritable(dst.index())
 400  				dstBuf := dstObj.buffer.asRawValue(r)
 401  				if mem.get(src.index()).buffer == nil {
 402  					// Looks like the source buffer is not defined.
 403  					// This can happen with //extern or //go:embed.
 404  					return nil, mem, r.errorAt(inst, errUnsupportedRuntimeInst)
 405  				}
 406  				srcBuf := mem.get(src.index()).buffer.asRawValue(r)
 407  				copy(dstBuf.buf[dst.offset():dst.offset()+nBytes], srcBuf.buf[src.offset():])
 408  				dstObj.buffer = dstBuf
 409  				mem.put(dst.index(), dstObj)
 410  			case callFn.name == "runtime.typeAssert":
 411  				// This function must be implemented manually as it is normally
 412  				// implemented by the interface lowering pass.
 413  				if r.debug {
 414  					fmt.Fprintln(os.Stderr, indent+"typeassert:", operands[1:])
 415  				}
 416  				assertedType, err := operands[2].toLLVMValue(inst.llvmInst.Operand(1).Type(), &mem)
 417  				if err != nil {
 418  					return nil, mem, r.errorAt(inst, err)
 419  				}
 420  				actualType, err := operands[1].toLLVMValue(inst.llvmInst.Operand(0).Type(), &mem)
 421  				if err != nil {
 422  					return nil, mem, r.errorAt(inst, err)
 423  				}
 424  				if !actualType.IsAConstantInt().IsNil() && actualType.ZExtValue() == 0 {
 425  					locals[inst.localIndex] = literalValue{uint8(0)}
 426  					break
 427  				}
 428  				// Strip pointer casts (bitcast, getelementptr).
 429  				for !actualType.IsAConstantExpr().IsNil() {
 430  					opcode := actualType.Opcode()
 431  					if opcode != llvm.GetElementPtr && opcode != llvm.BitCast {
 432  						break
 433  					}
 434  					actualType = actualType.Operand(0)
 435  				}
 436  				if strings.TrimPrefix(actualType.Name(), "reflect/types.type:") == strings.TrimPrefix(assertedType.Name(), "reflect/types.typeid:") {
 437  					locals[inst.localIndex] = literalValue{uint8(1)}
 438  				} else {
 439  					locals[inst.localIndex] = literalValue{uint8(0)}
 440  				}
 441  			case callFn.name == "__moxie_interp_raise_test_error":
 442  				// Special function that will trigger an error.
 443  				// This is used to test error reporting.
 444  				return nil, mem, r.errorAt(inst, errors.New("test error"))
 445  			case strings.HasSuffix(callFn.name, ".$typeassert"):
 446  				if r.debug {
 447  					fmt.Fprintln(os.Stderr, indent+"interface assert:", operands[1:])
 448  				}
 449  
 450  				// Load various values for the interface implements check below.
 451  				typecodePtr, err := operands[1].asPointer(r)
 452  				if err != nil {
 453  					return nil, mem, r.errorAt(inst, err)
 454  				}
 455  				// typecodePtr always point to the numMethod field in the type
 456  				// description struct. The methodSet, when present, comes right
 457  				// before the numMethod field (the compiler doesn't generate
 458  				// method sets for concrete types without methods).
 459  				// Considering that the compiler doesn't emit interface type
 460  				// asserts for interfaces with no methods (as the always succeed)
 461  				// then if the offset is zero, this assert must always fail.
 462  				if typecodePtr.offset() == 0 {
 463  					locals[inst.localIndex] = literalValue{uint8(0)}
 464  					break
 465  				}
 466  				typecodePtrOffset, err := typecodePtr.addOffset(-int64(r.pointerSize))
 467  				if err != nil {
 468  					return nil, mem, r.errorAt(inst, err)
 469  				}
 470  				methodSetPtr, err := mem.load(typecodePtrOffset, r.pointerSize).asPointer(r)
 471  				if err != nil {
 472  					return nil, mem, r.errorAt(inst, err)
 473  				}
 474  				methodSet := mem.get(methodSetPtr.index()).llvmGlobal.Initializer()
 475  				numMethods := int(r.builder.CreateExtractValue(methodSet, 0, "").ZExtValue())
 476  				llvmFn := inst.llvmInst.CalledValue()
 477  				methodSetAttr := llvmFn.GetStringAttributeAtIndex(-1, "moxie-methods")
 478  				methodSetString := methodSetAttr.GetStringValue()
 479  
 480  				// Make a set of all the methods on the concrete type, for
 481  				// easier checking in the next step.
 482  				concreteTypeMethods := map[string]struct{}{}
 483  				for i := 0; i < numMethods; i++ {
 484  					methodInfo := r.builder.CreateExtractValue(methodSet, 1, "")
 485  					name := r.builder.CreateExtractValue(methodInfo, i, "").Name()
 486  					concreteTypeMethods[name] = struct{}{}
 487  				}
 488  
 489  				// Check whether all interface methods are also in the list
 490  				// of defined methods calculated above. This is the interface
 491  				// assert itself.
 492  				assertOk := uint8(1) // i1 true
 493  				for _, name := range strings.Split(methodSetString, "; ") {
 494  					if _, ok := concreteTypeMethods[name]; !ok {
 495  						// There is a method on the interface that is not
 496  						// implemented by the type. The assertion will fail.
 497  						assertOk = 0 // i1 false
 498  						break
 499  					}
 500  				}
 501  				// If assertOk is still 1, the assertion succeeded.
 502  				locals[inst.localIndex] = literalValue{assertOk}
 503  			case strings.HasSuffix(callFn.name, "$invoke"):
 504  				// This thunk is the interface method dispatcher: it is called
 505  				// with all regular parameters and a type code. It will then
 506  				// call the concrete method for it.
 507  				if r.debug {
 508  					fmt.Fprintln(os.Stderr, indent+"invoke method:", operands[1:])
 509  				}
 510  
 511  				// Load the type code and method set of the interface value.
 512  				typecodePtr, err := operands[len(operands)-2].asPointer(r)
 513  				if err != nil {
 514  					return nil, mem, r.errorAt(inst, err)
 515  				}
 516  				typecodePtrOffset, err := typecodePtr.addOffset(-int64(r.pointerSize))
 517  				if err != nil {
 518  					return nil, mem, r.errorAt(inst, err)
 519  				}
 520  				methodSetPtr, err := mem.load(typecodePtrOffset, r.pointerSize).asPointer(r)
 521  				if err != nil {
 522  					return nil, mem, r.errorAt(inst, err)
 523  				}
 524  				methodSet := mem.get(methodSetPtr.index()).llvmGlobal.Initializer()
 525  
 526  				// We don't need to load the interface method set.
 527  
 528  				// Load the signature of the to-be-called function.
 529  				llvmFn := inst.llvmInst.CalledValue()
 530  				invokeAttr := llvmFn.GetStringAttributeAtIndex(-1, "moxie-invoke")
 531  				invokeName := invokeAttr.GetStringValue()
 532  				signature := r.mod.NamedGlobal(invokeName)
 533  
 534  				// Iterate through all methods, looking for the one method that
 535  				// should be returned.
 536  				numMethods := int(r.builder.CreateExtractValue(methodSet, 0, "").ZExtValue())
 537  				var method llvm.Value
 538  				for i := 0; i < numMethods; i++ {
 539  					methodSignatureAgg := r.builder.CreateExtractValue(methodSet, 1, "")
 540  					methodSignature := r.builder.CreateExtractValue(methodSignatureAgg, i, "")
 541  					if methodSignature == signature {
 542  						methodAgg := r.builder.CreateExtractValue(methodSet, 2, "")
 543  						method = r.builder.CreateExtractValue(methodAgg, i, "")
 544  					}
 545  				}
 546  				if method.IsNil() {
 547  					return nil, mem, r.errorAt(inst, errors.New("could not find method: "+invokeName))
 548  				}
 549  
 550  				// Change the to-be-called function to the underlying method to
 551  				// be called and fall through to the default case.
 552  				callFn = r.getFunction(method)
 553  				fallthrough
 554  			default:
 555  				if len(callFn.blocks) == 0 {
 556  					// Call to a function declaration without a definition
 557  					// available.
 558  					err := r.runAtRuntime(fn, inst, locals, &mem, indent)
 559  					if err != nil {
 560  						return nil, mem, err
 561  					}
 562  					continue
 563  				}
 564  				// Call a function with a definition available. Run it as usual,
 565  				// possibly trying to recover from it if it failed to execute.
 566  				if r.debug {
 567  					argStrings := make([]string, len(operands)-1)
 568  					for i, v := range operands[1:] {
 569  						argStrings[i] = v.String()
 570  					}
 571  					fmt.Fprintln(os.Stderr, indent+"call:", callFn.name+"("+strings.Join(argStrings, ", ")+")")
 572  				}
 573  				retval, callMem, callErr := r.run(callFn, operands[1:], &mem, indent+"    ")
 574  				if callErr != nil {
 575  					if isRecoverableError(callErr.Err) {
 576  						// This error can be recovered by doing the call at
 577  						// runtime instead of at compile time. But we need to
 578  						// revert any changes made by the call first.
 579  						if r.debug {
 580  							fmt.Fprintln(os.Stderr, indent+"!! revert because of error:", callErr.Error())
 581  						}
 582  						callMem.revert()
 583  						err := r.runAtRuntime(fn, inst, locals, &mem, indent)
 584  						if err != nil {
 585  							return nil, mem, err
 586  						}
 587  						continue
 588  					}
 589  					// Add to the traceback, so that error handling code can see
 590  					// how this function got called.
 591  					callErr.Traceback = append(callErr.Traceback, ErrorLine{
 592  						Pos:  getPosition(inst.llvmInst),
 593  						Inst: inst.llvmInst.String(),
 594  					})
 595  					return nil, mem, callErr
 596  				}
 597  				locals[inst.localIndex] = retval
 598  				mem.extend(callMem)
 599  			}
 600  		case llvm.Load:
 601  			// Load instruction, loading some data from the topmost memory view.
 602  			ptr, err := operands[0].asPointer(r)
 603  			if err != nil {
 604  				return nil, mem, r.errorAt(inst, err)
 605  			}
 606  			size := operands[1].(literalValue).value.(uint64)
 607  			if inst.llvmInst.IsVolatile() || inst.llvmInst.Ordering() != llvm.AtomicOrderingNotAtomic || mem.hasExternalStore(ptr) {
 608  				// If there could be an external store (for example, because a
 609  				// pointer to the object was passed to a function that could not
 610  				// be interpreted at compile time) then the load must be done at
 611  				// runtime.
 612  				err := r.runAtRuntime(fn, inst, locals, &mem, indent)
 613  				if err != nil {
 614  					return nil, mem, err
 615  				}
 616  				continue
 617  			}
 618  			result := mem.load(ptr, uint32(size))
 619  			if result == nil {
 620  				err := r.runAtRuntime(fn, inst, locals, &mem, indent)
 621  				if err != nil {
 622  					return nil, mem, err
 623  				}
 624  				continue
 625  			}
 626  			if r.debug {
 627  				fmt.Fprintln(os.Stderr, indent+"load:", ptr, "->", result)
 628  			}
 629  			locals[inst.localIndex] = result
 630  		case llvm.Store:
 631  			// Store instruction. Create a new object in the memory view and
 632  			// store to that, to make it possible to roll back this store.
 633  			ptr, err := operands[1].asPointer(r)
 634  			if err != nil {
 635  				return nil, mem, r.errorAt(inst, err)
 636  			}
 637  			if inst.llvmInst.IsVolatile() || inst.llvmInst.Ordering() != llvm.AtomicOrderingNotAtomic || mem.hasExternalLoadOrStore(ptr) {
 638  				err := r.runAtRuntime(fn, inst, locals, &mem, indent)
 639  				if err != nil {
 640  					return nil, mem, err
 641  				}
 642  				continue
 643  			}
 644  			val := operands[0]
 645  			if r.debug {
 646  				fmt.Fprintln(os.Stderr, indent+"store:", val, ptr)
 647  			}
 648  			ok := mem.store(val, ptr)
 649  			if !ok {
 650  				// Could not store the value, do it at runtime.
 651  				err := r.runAtRuntime(fn, inst, locals, &mem, indent)
 652  				if err != nil {
 653  					return nil, mem, err
 654  				}
 655  			}
 656  		case llvm.Alloca:
 657  			// Alloca normally allocates some stack memory. In the interpreter,
 658  			// it allocates a global instead.
 659  			// This can likely be optimized, as all it really needs is an alloca
 660  			// in the initAll function and creating a global is wasteful for
 661  			// this purpose.
 662  
 663  			// Create the new object.
 664  			size := operands[0].(literalValue).value.(uint64)
 665  			alloca := object{
 666  				llvmType:   inst.llvmInst.AllocatedType(),
 667  				globalName: r.pkgName + "$alloca",
 668  				buffer:     newRawValue(uint32(size)),
 669  				size:       uint32(size),
 670  				align:      inst.llvmInst.Alignment(),
 671  			}
 672  			index := len(r.objects)
 673  			r.objects = append(r.objects, alloca)
 674  
 675  			// Create a pointer to this object (an alloca produces a pointer).
 676  			ptr := newPointerValue(r, index, 0)
 677  			if r.debug {
 678  				fmt.Fprintln(os.Stderr, indent+"alloca:", operands, "->", ptr)
 679  			}
 680  			locals[inst.localIndex] = ptr
 681  		case llvm.GetElementPtr:
 682  			// GetElementPtr does pointer arithmetic, changing the offset of the
 683  			// pointer into the underlying object.
 684  			var offset int64
 685  			for i := 1; i < len(operands); i += 2 {
 686  				index := operands[i].Int(r)
 687  				elementSize := operands[i+1].Int(r)
 688  				if elementSize < 0 {
 689  					// This is a struct field.
 690  					offset += index
 691  				} else {
 692  					// This is a normal GEP, probably an array index.
 693  					offset += elementSize * index
 694  				}
 695  			}
 696  			ptr, err := operands[0].asPointer(r)
 697  			if err != nil {
 698  				if err != errIntegerAsPointer {
 699  					return nil, mem, r.errorAt(inst, err)
 700  				}
 701  				// GEP on fixed pointer value (for example, memory-mapped I/O).
 702  				ptrValue := operands[0].Uint(r) + uint64(offset)
 703  				locals[inst.localIndex] = makeLiteralInt(ptrValue, int(operands[0].len(r)*8))
 704  				continue
 705  			}
 706  			ptr, err = ptr.addOffset(int64(offset))
 707  			if err != nil {
 708  				return nil, mem, r.errorAt(inst, err)
 709  			}
 710  			locals[inst.localIndex] = ptr
 711  			if r.debug {
 712  				fmt.Fprintln(os.Stderr, indent+"gep:", operands, "->", ptr)
 713  			}
 714  		case llvm.BitCast, llvm.IntToPtr, llvm.PtrToInt:
 715  			// Various bitcast-like instructions that all keep the same bits
 716  			// while changing the LLVM type.
 717  			// Because interp doesn't preserve the type, these operations are
 718  			// identity operations.
 719  			if r.debug {
 720  				fmt.Fprintln(os.Stderr, indent+instructionNameMap[inst.opcode]+":", operands[0])
 721  			}
 722  			locals[inst.localIndex] = operands[0]
 723  		case llvm.ExtractValue:
 724  			agg := operands[0].asRawValue(r)
 725  			offset := operands[1].(literalValue).value.(uint64)
 726  			size := operands[2].(literalValue).value.(uint64)
 727  			elt := rawValue{
 728  				buf: agg.buf[offset : offset+size],
 729  			}
 730  			if r.debug {
 731  				fmt.Fprintln(os.Stderr, indent+"extractvalue:", operands, "->", elt)
 732  			}
 733  			locals[inst.localIndex] = elt
 734  		case llvm.InsertValue:
 735  			agg := operands[0].asRawValue(r)
 736  			elt := operands[1].asRawValue(r)
 737  			offset := int(operands[2].(literalValue).value.(uint64))
 738  			newagg := newRawValue(uint32(len(agg.buf)))
 739  			copy(newagg.buf, agg.buf)
 740  			copy(newagg.buf[offset:], elt.buf)
 741  			if r.debug {
 742  				fmt.Fprintln(os.Stderr, indent+"insertvalue:", operands, "->", newagg)
 743  			}
 744  			locals[inst.localIndex] = newagg
 745  		case llvm.ICmp:
 746  			predicate := llvm.IntPredicate(operands[2].(literalValue).value.(uint8))
 747  			lhs := operands[0]
 748  			rhs := operands[1]
 749  			result := r.interpretICmp(lhs, rhs, predicate)
 750  			if result {
 751  				locals[inst.localIndex] = literalValue{uint8(1)}
 752  			} else {
 753  				locals[inst.localIndex] = literalValue{uint8(0)}
 754  			}
 755  			if r.debug {
 756  				fmt.Fprintln(os.Stderr, indent+"icmp:", operands[0], intPredicateString(predicate), operands[1], "->", result)
 757  			}
 758  		case llvm.FCmp:
 759  			predicate := llvm.FloatPredicate(operands[2].(literalValue).value.(uint8))
 760  			var result bool
 761  			var lhs, rhs float64
 762  			switch operands[0].len(r) {
 763  			case 8:
 764  				lhs = math.Float64frombits(operands[0].Uint(r))
 765  				rhs = math.Float64frombits(operands[1].Uint(r))
 766  			case 4:
 767  				lhs = float64(math.Float32frombits(uint32(operands[0].Uint(r))))
 768  				rhs = float64(math.Float32frombits(uint32(operands[1].Uint(r))))
 769  			default:
 770  				panic("unknown float type")
 771  			}
 772  			switch predicate {
 773  			case llvm.FloatOEQ:
 774  				result = lhs == rhs
 775  			case llvm.FloatUNE:
 776  				result = lhs != rhs
 777  			case llvm.FloatOGT:
 778  				result = lhs > rhs
 779  			case llvm.FloatOGE:
 780  				result = lhs >= rhs
 781  			case llvm.FloatOLT:
 782  				result = lhs < rhs
 783  			case llvm.FloatOLE:
 784  				result = lhs <= rhs
 785  			default:
 786  				return nil, mem, r.errorAt(inst, errors.New("interp: unsupported fcmp"))
 787  			}
 788  			if result {
 789  				locals[inst.localIndex] = literalValue{uint8(1)}
 790  			} else {
 791  				locals[inst.localIndex] = literalValue{uint8(0)}
 792  			}
 793  			if r.debug {
 794  				fmt.Fprintln(os.Stderr, indent+"fcmp:", operands[0], predicate, operands[1], "->", result)
 795  			}
 796  		case llvm.Add, llvm.Sub, llvm.Mul, llvm.UDiv, llvm.SDiv, llvm.URem, llvm.SRem, llvm.Shl, llvm.LShr, llvm.AShr, llvm.And, llvm.Or, llvm.Xor:
 797  			// Integer binary operations.
 798  			lhs := operands[0]
 799  			rhs := operands[1]
 800  			lhsPtr, err := lhs.asPointer(r)
 801  			if err == nil {
 802  				// The lhs is a pointer. This sometimes happens for particular
 803  				// pointer tricks.
 804  				if inst.opcode == llvm.Add {
 805  					// This likely means this is part of a
 806  					// unsafe.Pointer(uintptr(ptr) + offset) pattern.
 807  					lhsPtr, err = lhsPtr.addOffset(int64(rhs.Uint(r)))
 808  					if err != nil {
 809  						return nil, mem, r.errorAt(inst, err)
 810  					}
 811  					locals[inst.localIndex] = lhsPtr
 812  				} else if inst.opcode == llvm.Xor && rhs.Uint(r) == 0 {
 813  					// Special workaround for strings.noescape, see
 814  					// src/strings/builder.go in the Go source tree. This is
 815  					// the identity operator, so we can return the input.
 816  					locals[inst.localIndex] = lhs
 817  				} else if inst.opcode == llvm.And && rhs.Uint(r) < 8 {
 818  					// This is probably part of a pattern to get the lower bits
 819  					// of a pointer for pointer tagging, like this:
 820  					//     uintptr(unsafe.Pointer(t)) & 0b11
 821  					// We can actually support this easily by ANDing with the
 822  					// pointer offset.
 823  					result := uint64(lhsPtr.offset()) & rhs.Uint(r)
 824  					locals[inst.localIndex] = makeLiteralInt(result, int(lhs.len(r)*8))
 825  				} else {
 826  					// Catch-all for weird operations that should just be done
 827  					// at runtime.
 828  					err := r.runAtRuntime(fn, inst, locals, &mem, indent)
 829  					if err != nil {
 830  						return nil, mem, err
 831  					}
 832  				}
 833  				continue
 834  			}
 835  			var result uint64
 836  			switch inst.opcode {
 837  			case llvm.Add:
 838  				result = lhs.Uint(r) + rhs.Uint(r)
 839  			case llvm.Sub:
 840  				result = lhs.Uint(r) - rhs.Uint(r)
 841  			case llvm.Mul:
 842  				result = lhs.Uint(r) * rhs.Uint(r)
 843  			case llvm.UDiv:
 844  				result = lhs.Uint(r) / rhs.Uint(r)
 845  			case llvm.SDiv:
 846  				result = uint64(lhs.Int(r) / rhs.Int(r))
 847  			case llvm.URem:
 848  				result = lhs.Uint(r) % rhs.Uint(r)
 849  			case llvm.SRem:
 850  				result = uint64(lhs.Int(r) % rhs.Int(r))
 851  			case llvm.Shl:
 852  				result = lhs.Uint(r) << rhs.Uint(r)
 853  			case llvm.LShr:
 854  				result = lhs.Uint(r) >> rhs.Uint(r)
 855  			case llvm.AShr:
 856  				result = uint64(lhs.Int(r) >> rhs.Uint(r))
 857  			case llvm.And:
 858  				result = lhs.Uint(r) & rhs.Uint(r)
 859  			case llvm.Or:
 860  				result = lhs.Uint(r) | rhs.Uint(r)
 861  			case llvm.Xor:
 862  				result = lhs.Uint(r) ^ rhs.Uint(r)
 863  			default:
 864  				panic("unreachable")
 865  			}
 866  			locals[inst.localIndex] = makeLiteralInt(result, int(lhs.len(r)*8))
 867  			if r.debug {
 868  				fmt.Fprintln(os.Stderr, indent+instructionNameMap[inst.opcode]+":", lhs, rhs, "->", result)
 869  			}
 870  		case llvm.SExt, llvm.ZExt, llvm.Trunc:
 871  			// Change the size of an integer to a larger or smaller bit width.
 872  			// We make use of the fact that the Uint() function already
 873  			// zero-extends the value and that Int() already sign-extends the
 874  			// value, so we only need to truncate it to the appropriate bit
 875  			// width. This means we can implement sext, zext and trunc in the
 876  			// same way, by first {zero,sign}extending all the way up to uint64
 877  			// and then truncating it as necessary.
 878  			var value uint64
 879  			if inst.opcode == llvm.SExt {
 880  				value = uint64(operands[0].Int(r))
 881  			} else {
 882  				value = operands[0].Uint(r)
 883  			}
 884  			bitwidth := operands[1].Uint(r)
 885  			if r.debug {
 886  				fmt.Fprintln(os.Stderr, indent+instructionNameMap[inst.opcode]+":", value, bitwidth)
 887  			}
 888  			locals[inst.localIndex] = makeLiteralInt(value, int(bitwidth))
 889  		case llvm.SIToFP, llvm.UIToFP:
 890  			var value float64
 891  			switch inst.opcode {
 892  			case llvm.SIToFP:
 893  				value = float64(operands[0].Int(r))
 894  			case llvm.UIToFP:
 895  				value = float64(operands[0].Uint(r))
 896  			}
 897  			bitwidth := operands[1].Uint(r)
 898  			if r.debug {
 899  				fmt.Fprintln(os.Stderr, indent+instructionNameMap[inst.opcode]+":", value, bitwidth)
 900  			}
 901  			switch bitwidth {
 902  			case 64:
 903  				locals[inst.localIndex] = literalValue{math.Float64bits(value)}
 904  			case 32:
 905  				locals[inst.localIndex] = literalValue{math.Float32bits(float32(value))}
 906  			default:
 907  				panic("unknown integer size in sitofp/uitofp")
 908  			}
 909  		default:
 910  			if r.debug {
 911  				fmt.Fprintln(os.Stderr, indent+inst.String())
 912  			}
 913  			return nil, mem, r.errorAt(inst, errUnsupportedInst)
 914  		}
 915  	}
 916  	return nil, mem, r.errorAt(bb.instructions[len(bb.instructions)-1], errors.New("interp: reached end of basic block without terminator"))
 917  }
 918  
 919  // Interpret an icmp instruction. Doesn't have side effects, only returns the
 920  // output of the comparison.
 921  func (r *runner) interpretICmp(lhs, rhs value, predicate llvm.IntPredicate) bool {
 922  	switch predicate {
 923  	case llvm.IntEQ, llvm.IntNE:
 924  		var result bool
 925  		lhsPointer, lhsErr := lhs.asPointer(r)
 926  		rhsPointer, rhsErr := rhs.asPointer(r)
 927  		if (lhsErr == nil) != (rhsErr == nil) {
 928  			// Fast path: only one is a pointer, so they can't be equal.
 929  			result = false
 930  		} else if lhsErr == nil {
 931  			// Both must be nil, so both are pointers.
 932  			// Compare them directly.
 933  			result = lhsPointer.equal(rhsPointer)
 934  		} else {
 935  			// Fall back to generic comparison.
 936  			result = lhs.asRawValue(r).equal(rhs.asRawValue(r))
 937  		}
 938  		if predicate == llvm.IntNE {
 939  			result = !result
 940  		}
 941  		return result
 942  	case llvm.IntUGT:
 943  		return lhs.Uint(r) > rhs.Uint(r)
 944  	case llvm.IntUGE:
 945  		return lhs.Uint(r) >= rhs.Uint(r)
 946  	case llvm.IntULT:
 947  		return lhs.Uint(r) < rhs.Uint(r)
 948  	case llvm.IntULE:
 949  		return lhs.Uint(r) <= rhs.Uint(r)
 950  	case llvm.IntSGT:
 951  		return lhs.Int(r) > rhs.Int(r)
 952  	case llvm.IntSGE:
 953  		return lhs.Int(r) >= rhs.Int(r)
 954  	case llvm.IntSLT:
 955  		return lhs.Int(r) < rhs.Int(r)
 956  	case llvm.IntSLE:
 957  		return lhs.Int(r) <= rhs.Int(r)
 958  	default:
 959  		// _should_ be unreachable, until LLVM adds new icmp operands (unlikely)
 960  		panic("interp: unsupported icmp")
 961  	}
 962  }
 963  
 964  func (r *runner) runAtRuntime(fn *function, inst instruction, locals []value, mem *memoryView, indent string) *Error {
 965  	numOperands := inst.llvmInst.OperandsCount()
 966  	operands := make([]llvm.Value, numOperands)
 967  	for i := 0; i < numOperands; i++ {
 968  		operand := inst.llvmInst.Operand(i)
 969  		if !operand.IsAInstruction().IsNil() || !operand.IsAArgument().IsNil() {
 970  			var err error
 971  			operand, err = locals[fn.locals[operand]].toLLVMValue(operand.Type(), mem)
 972  			if err != nil {
 973  				return r.errorAt(inst, err)
 974  			}
 975  		}
 976  		operands[i] = operand
 977  	}
 978  	if r.debug {
 979  		fmt.Fprintln(os.Stderr, indent+inst.String())
 980  	}
 981  	var result llvm.Value
 982  	switch inst.opcode {
 983  	case llvm.Call:
 984  		llvmFn := operands[len(operands)-1]
 985  		args := operands[:len(operands)-1]
 986  		for _, op := range operands {
 987  			if op.Type().TypeKind() == llvm.PointerTypeKind {
 988  				err := mem.markExternalStore(op)
 989  				if err != nil {
 990  					return r.errorAt(inst, err)
 991  				}
 992  			}
 993  		}
 994  		result = r.builder.CreateCall(inst.llvmInst.CalledFunctionType(), llvmFn, args, inst.name)
 995  	case llvm.Load:
 996  		err := mem.markExternalLoad(operands[0])
 997  		if err != nil {
 998  			return r.errorAt(inst, err)
 999  		}
1000  		result = r.builder.CreateLoad(inst.llvmInst.Type(), operands[0], inst.name)
1001  		if inst.llvmInst.IsVolatile() {
1002  			result.SetVolatile(true)
1003  		}
1004  		if ordering := inst.llvmInst.Ordering(); ordering != llvm.AtomicOrderingNotAtomic {
1005  			result.SetOrdering(ordering)
1006  		}
1007  	case llvm.Store:
1008  		err := mem.markExternalStore(operands[1])
1009  		if err != nil {
1010  			return r.errorAt(inst, err)
1011  		}
1012  		result = r.builder.CreateStore(operands[0], operands[1])
1013  		if inst.llvmInst.IsVolatile() {
1014  			result.SetVolatile(true)
1015  		}
1016  		if ordering := inst.llvmInst.Ordering(); ordering != llvm.AtomicOrderingNotAtomic {
1017  			result.SetOrdering(ordering)
1018  		}
1019  	case llvm.BitCast:
1020  		result = r.builder.CreateBitCast(operands[0], inst.llvmInst.Type(), inst.name)
1021  	case llvm.ExtractValue:
1022  		indices := inst.llvmInst.Indices()
1023  		// Note: the Go LLVM API doesn't support multiple indices, so simulate
1024  		// this operation with some extra extractvalue instructions. Hopefully
1025  		// this is optimized to a single instruction.
1026  		agg := operands[0]
1027  		for i := 0; i < len(indices)-1; i++ {
1028  			agg = r.builder.CreateExtractValue(agg, int(indices[i]), inst.name+".agg")
1029  			mem.instructions = append(mem.instructions, agg)
1030  		}
1031  		result = r.builder.CreateExtractValue(agg, int(indices[len(indices)-1]), inst.name)
1032  	case llvm.InsertValue:
1033  		indices := inst.llvmInst.Indices()
1034  		// Similar to extractvalue, we're working around a limitation in the Go
1035  		// LLVM API here by splitting the insertvalue into multiple instructions
1036  		// if there is more than one operand.
1037  		agg := operands[0]
1038  		aggregates := []llvm.Value{agg}
1039  		for i := 0; i < len(indices)-1; i++ {
1040  			agg = r.builder.CreateExtractValue(agg, int(indices[i]), inst.name+".agg"+strconv.Itoa(i))
1041  			aggregates = append(aggregates, agg)
1042  			mem.instructions = append(mem.instructions, agg)
1043  		}
1044  		result = operands[1]
1045  		for i := len(indices) - 1; i >= 0; i-- {
1046  			agg := aggregates[i]
1047  			result = r.builder.CreateInsertValue(agg, result, int(indices[i]), inst.name+".insertvalue"+strconv.Itoa(i))
1048  			if i != 0 { // don't add last result to mem.instructions as it will be done at the end already
1049  				mem.instructions = append(mem.instructions, result)
1050  			}
1051  		}
1052  
1053  	case llvm.Add:
1054  		result = r.builder.CreateAdd(operands[0], operands[1], inst.name)
1055  	case llvm.Sub:
1056  		result = r.builder.CreateSub(operands[0], operands[1], inst.name)
1057  	case llvm.Mul:
1058  		result = r.builder.CreateMul(operands[0], operands[1], inst.name)
1059  	case llvm.UDiv:
1060  		result = r.builder.CreateUDiv(operands[0], operands[1], inst.name)
1061  	case llvm.SDiv:
1062  		result = r.builder.CreateSDiv(operands[0], operands[1], inst.name)
1063  	case llvm.URem:
1064  		result = r.builder.CreateURem(operands[0], operands[1], inst.name)
1065  	case llvm.SRem:
1066  		result = r.builder.CreateSRem(operands[0], operands[1], inst.name)
1067  	case llvm.ZExt:
1068  		result = r.builder.CreateZExt(operands[0], inst.llvmInst.Type(), inst.name)
1069  	default:
1070  		return r.errorAt(inst, errUnsupportedRuntimeInst)
1071  	}
1072  	locals[inst.localIndex] = localValue{result}
1073  	mem.instructions = append(mem.instructions, result)
1074  	return nil
1075  }
1076  
1077  func intPredicateString(predicate llvm.IntPredicate) string {
1078  	switch predicate {
1079  	case llvm.IntEQ:
1080  		return "eq"
1081  	case llvm.IntNE:
1082  		return "ne"
1083  	case llvm.IntUGT:
1084  		return "ugt"
1085  	case llvm.IntUGE:
1086  		return "uge"
1087  	case llvm.IntULT:
1088  		return "ult"
1089  	case llvm.IntULE:
1090  		return "ule"
1091  	case llvm.IntSGT:
1092  		return "sgt"
1093  	case llvm.IntSGE:
1094  		return "sge"
1095  	case llvm.IntSLT:
1096  		return "slt"
1097  	case llvm.IntSLE:
1098  		return "sle"
1099  	default:
1100  		return "cmp?"
1101  	}
1102  }
1103