...

Source file src/github.com/cockroachdb/apd/v3/bigint.go

Documentation: github.com/cockroachdb/apd/v3

     1  // Copyright 2022 The Cockroach Authors.
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //     http://www.apache.org/licenses/LICENSE-2.0
     8  //
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
    12  // implied. See the License for the specific language governing
    13  // permissions and limitations under the License.
    14  
    15  package apd
    16  
    17  import (
    18  	"fmt"
    19  	"math/big"
    20  	"math/bits"
    21  	"math/rand"
    22  	"strconv"
    23  	"unsafe"
    24  )
    25  
    26  // The inlineWords capacity is set to accommodate any value that would fit in a
    27  // 128-bit integer (i.e. values with an absolute value up to 2^128 - 1).
    28  const inlineWords = 128 / bits.UintSize
    29  
    30  // BigInt is a wrapper around big.Int. It minimizes memory allocation by using
    31  // an inline array to back the big.Int's variable-length "nat" slice when the
    32  // integer's value is sufficiently small.
    33  // The zero value is ready to use.
    34  type BigInt struct {
    35  	// A wrapped big.Int. Only set to the BigInt's value when the value exceeds
    36  	// what is representable in the _inline array.
    37  	//
    38  	// When the BigInt's value is still small enough to use the _inline array,
    39  	// this field doubles as integer's negative flag. See negSentinel.
    40  	//
    41  	// Methods should access this field through inner.
    42  	_inner *big.Int
    43  
    44  	// The inlined backing array use for short-lived, stack-allocated big.Int
    45  	// structs during arithmetic when the value is small.
    46  	//
    47  	// Each BigInt maintains (through big.Int) an internal reference to a
    48  	// variable-length integer value, which is represented by a []big.Word. The
    49  	// _inline field and the inner and updateInner methods combine to allow
    50  	// BigInt to inline this variable-length integer array within the BigInt
    51  	// struct when its value is sufficiently small. In the inner method, we
    52  	// point a temporary big.Int's nat slice at this _inline array. big.Int will
    53  	// avoid re-allocating this array until it is provided with a value that
    54  	// exceeds the initial capacity. Later in updateInner, we detect whether the
    55  	// array has been re-allocated. If so, we switch to using the _inner. If
    56  	// not, we continue to use this array.
    57  	_inline [inlineWords]big.Word
    58  }
    59  
    60  // NewBigInt allocates and returns a new BigInt set to x.
    61  //
    62  // NOTE: BigInt jumps through hoops to avoid escaping to the heap. As such, most
    63  // users of BigInt should not need this function. They should instead declare a
    64  // zero-valued BigInt directly on the stack and interact with references to this
    65  // stack-allocated value. Recall that the zero-valued BigInt is ready to use.
    66  func NewBigInt(x int64) *BigInt {
    67  	return new(BigInt).SetInt64(x)
    68  }
    69  
    70  // Set as the value of BigInt._inner as a "sentinel" flag to indicate that a
    71  // BigInt is negative ((big.Int).Sign() < 0) but the absolute value is still
    72  // small enough to represent in the _inline array.
    73  var negSentinel = new(big.Int)
    74  
    75  // isInline returns whether the BigInt stores its value in its _inline array.
    76  //gcassert:inline
    77  func (z *BigInt) isInline() bool {
    78  	return z._inner == nil || z._inner == negSentinel
    79  }
    80  
    81  // The memory representation of big.Int. Used for unsafe modification below.
    82  type intStruct struct {
    83  	neg bool
    84  	abs []big.Word
    85  }
    86  
    87  // noescape hides a pointer from escape analysis. noescape is the identity
    88  // function but escape analysis doesn't think the output depends on the input.
    89  // noescape is inlined and currently compiles down to zero instructions.
    90  //
    91  // USE CAREFULLY!
    92  //
    93  // This was copied from strings.Builder, which has identical code which was
    94  // itself copied from the runtime.
    95  // For more, see issues #23382 and #7921 in github.com/golang/go.
    96  //go:nosplit
    97  //go:nocheckptr
    98  func noescape(p unsafe.Pointer) unsafe.Pointer {
    99  	x := uintptr(p)
   100  	//lint:ignore SA4016 intentional no-op to hide pointer from escape analysis.
   101  	return unsafe.Pointer(x ^ 0)
   102  }
   103  
   104  // inner returns the BigInt's current value as a *big.Int.
   105  //
   106  // NOTE: this was carefully written to permit function inlining. Modify with
   107  // care.
   108  //gcassert:inline
   109  func (z *BigInt) inner(tmp *big.Int) *big.Int {
   110  	// Point the big.Int at the inline array. When doing so, use noescape to
   111  	// avoid forcing the BigInt to escape to the heap. Go's escape analysis
   112  	// struggles with self-referential pointers, and it can't prove that we
   113  	// only assign _inner to a heap-allocated object (which must not contain
   114  	// pointers that reference the stack or the GC explodes) if the big.Int's
   115  	// backing array has been re-allocated onto the heap first.
   116  	//
   117  	// NOTE: SetBits sets the neg field to false, so this must come before the
   118  	// negSentinel handling.
   119  	tmp.SetBits((*[inlineWords]big.Word)(noescape(unsafe.Pointer(&z._inline[0])))[:])
   120  
   121  	if z._inner != nil {
   122  		if z._inner != negSentinel {
   123  			// The variable-length big.Int reference is set.
   124  			return z._inner
   125  		}
   126  
   127  		// This is the negative sentinel, which indicates that the integer is
   128  		// negative but still stored inline. Update the big.Int accordingly. We
   129  		// use unsafe because (*big.Int).Neg is too complex and prevents this
   130  		// method from being inlined.
   131  		(*intStruct)(unsafe.Pointer(tmp)).neg = true
   132  	}
   133  	return tmp
   134  }
   135  
   136  // innerOrNil is like inner, but returns a nil *big.Int if the receiver is nil.
   137  // NOTE: this is not inlined.
   138  func (z *BigInt) innerOrNil(tmp *big.Int) *big.Int {
   139  	if z == nil {
   140  		return nil
   141  	}
   142  	return z.inner(tmp)
   143  }
   144  
   145  // innerOrAlias is like inner, but returns the provided *big.Int if the receiver
   146  // and the other *BigInt argument reference the same object.
   147  // NOTE: this is not inlined.
   148  func (z *BigInt) innerOrAlias(tmp *big.Int, a *BigInt, ai *big.Int) *big.Int {
   149  	if a == z {
   150  		return ai
   151  	}
   152  	return z.inner(tmp)
   153  }
   154  
   155  // innerOrNilOrAlias is like inner, but with the added semantics specified for
   156  // both innerOrNil and innerOrAlias.
   157  // NOTE: this is not inlined.
   158  func (z *BigInt) innerOrNilOrAlias(tmp *big.Int, a *BigInt, ai *big.Int) *big.Int {
   159  	if z == nil {
   160  		return nil
   161  	} else if z == a {
   162  		return ai
   163  	}
   164  	return z.inner(tmp)
   165  }
   166  
   167  // updateInner updates the BigInt's current value with the provided *big.Int.
   168  //
   169  // NOTE: this was carefully written to permit function inlining. Modify with
   170  // care.
   171  //gcassert:inline
   172  func (z *BigInt) updateInner(src *big.Int) {
   173  	if z._inner == src {
   174  		return
   175  	}
   176  
   177  	bits := src.Bits()
   178  	bitsLen := len(bits)
   179  	if bitsLen > 0 && &z._inline[0] != &bits[0] {
   180  		// The big.Int re-allocated its backing array during arithmetic because
   181  		// the value grew beyond what could fit in the _inline array. Switch to
   182  		// a heap-allocated, variable-length big.Int and store that in _inner.
   183  		// From now on, all arithmetic will use this big.Int directly.
   184  		//
   185  		// Allocate a new big.Int and perform a shallow-copy of the argument to
   186  		// prevent it from escaping off the stack.
   187  		z._inner = new(big.Int)
   188  		*z._inner = *src
   189  	} else {
   190  		// Zero out all words beyond the end of the big.Int's current Word
   191  		// slice. big.Int arithmetic can sometimes leave these words "dirty".
   192  		// They would cause issues when the _inline array is injected into the
   193  		// next big.Int if not cleared.
   194  		for bitsLen < len(z._inline) {
   195  			z._inline[bitsLen] = 0
   196  			bitsLen++
   197  		}
   198  
   199  		// Set or unset the negative sentinel, according to the argument's sign.
   200  		// We use unsafe because (*big.Int).Sign is too complex and prevents
   201  		// this method from being inlined.
   202  		if (*intStruct)(unsafe.Pointer(src)).neg {
   203  			z._inner = negSentinel
   204  		} else {
   205  			z._inner = nil
   206  		}
   207  	}
   208  }
   209  
   210  const wordsInUint64 = 64 / bits.UintSize
   211  
   212  func init() {
   213  	if inlineWords < wordsInUint64 {
   214  		panic("inline array must be at least 64 bits large")
   215  	}
   216  }
   217  
   218  // innerAsUint64 returns the BigInt's current absolute value as a uint64 and a
   219  // flag indicating whether the value is negative. If the value is not stored
   220  // inline or if it can not fit in a uint64, false is returned.
   221  //
   222  // NOTE: this was carefully written to permit function inlining. Modify with
   223  // care.
   224  //gcassert:inline
   225  func (z *BigInt) innerAsUint64() (val uint64, neg bool, ok bool) {
   226  	if !z.isInline() {
   227  		// The value is not stored inline.
   228  		return 0, false, false
   229  	}
   230  	if wordsInUint64 == 1 && inlineWords == 2 {
   231  		// Manually unrolled loop for current inlineWords setting.
   232  		if z._inline[1] != 0 {
   233  			// The value can not fit in a uint64.
   234  			return 0, false, false
   235  		}
   236  	} else {
   237  		// Fallback for other values of inlineWords.
   238  		for i := wordsInUint64; i < len(z._inline); i++ {
   239  			if z._inline[i] != 0 {
   240  				// The value can not fit in a uint64.
   241  				return 0, false, false
   242  			}
   243  		}
   244  	}
   245  
   246  	val = uint64(z._inline[0])
   247  	if wordsInUint64 == 2 {
   248  		// From big.low64.
   249  		val = uint64(z._inline[1])<<32 | val
   250  	}
   251  	neg = z._inner == negSentinel
   252  	return val, neg, true
   253  }
   254  
   255  // updateInnerFromUint64 updates the BigInt's current value with the provided
   256  // absolute value and sign.
   257  //
   258  // NOTE: this was carefully written to permit function inlining. Modify with
   259  // care.
   260  //gcassert:inline
   261  func (z *BigInt) updateInnerFromUint64(val uint64, neg bool) {
   262  	// Set the inline value.
   263  	z._inline[0] = big.Word(val)
   264  	if wordsInUint64 == 2 {
   265  		// From (big.nat).setUint64.
   266  		z._inline[1] = big.Word(val >> 32)
   267  	}
   268  
   269  	// Clear out all other words in the inline array.
   270  	if wordsInUint64 == 1 && inlineWords == 2 {
   271  		// Manually unrolled loop for current inlineWords setting.
   272  		z._inline[1] = 0
   273  	} else {
   274  		// Fallback for other values of inlineWords.
   275  		for i := wordsInUint64; i < len(z._inline); i++ {
   276  			z._inline[i] = 0
   277  		}
   278  	}
   279  
   280  	// Set or unset the negative sentinel.
   281  	if neg {
   282  		z._inner = negSentinel
   283  	} else {
   284  		z._inner = nil
   285  	}
   286  }
   287  
   288  const (
   289  	bigIntSize     = unsafe.Sizeof(BigInt{})
   290  	mathBigIntSize = unsafe.Sizeof(big.Int{})
   291  	mathWordSize   = unsafe.Sizeof(big.Word(0))
   292  )
   293  
   294  // Size returns the total memory footprint of z in bytes.
   295  func (z *BigInt) Size() uintptr {
   296  	if z.isInline() {
   297  		return bigIntSize
   298  	}
   299  	return bigIntSize + mathBigIntSize + uintptr(cap(z._inner.Bits()))*mathWordSize
   300  }
   301  
   302  ///////////////////////////////////////////////////////////////////////////////
   303  //                    inline arithmetic for small values                     //
   304  ///////////////////////////////////////////////////////////////////////////////
   305  
   306  //gcassert:inline
   307  func addInline(xVal, yVal uint64, xNeg, yNeg bool) (zVal uint64, zNeg, ok bool) {
   308  	if xNeg == yNeg {
   309  		sum, carry := bits.Add64(xVal, yVal, 0)
   310  		overflow := carry != 0
   311  		return sum, xNeg, !overflow
   312  	}
   313  
   314  	diff, borrow := bits.Sub64(xVal, yVal, 0)
   315  	if borrow != 0 { // underflow
   316  		xNeg = !xNeg
   317  		diff = yVal - xVal
   318  	}
   319  	if diff == 0 {
   320  		xNeg = false
   321  	}
   322  	return diff, xNeg, true
   323  }
   324  
   325  //gcassert:inline
   326  func mulInline(xVal, yVal uint64, xNeg, yNeg bool) (zVal uint64, zNeg, ok bool) {
   327  	hi, lo := bits.Mul64(xVal, yVal)
   328  	neg := xNeg != yNeg
   329  	overflow := hi != 0
   330  	return lo, neg, !overflow
   331  }
   332  
   333  //gcassert:inline
   334  func quoInline(xVal, yVal uint64, xNeg, yNeg bool) (quoVal uint64, quoNeg, ok bool) {
   335  	if yVal == 0 { // divide by 0
   336  		return 0, false, false
   337  	}
   338  	quo := xVal / yVal
   339  	neg := xNeg != yNeg
   340  	return quo, neg, true
   341  }
   342  
   343  //gcassert:inline
   344  func remInline(xVal, yVal uint64, xNeg, yNeg bool) (remVal uint64, remNeg, ok bool) {
   345  	if yVal == 0 { // divide by 0
   346  		return 0, false, false
   347  	}
   348  	rem := xVal % yVal
   349  	return rem, xNeg, true
   350  }
   351  
   352  ///////////////////////////////////////////////////////////////////////////////
   353  //                        big.Int API wrapper methods                        //
   354  ///////////////////////////////////////////////////////////////////////////////
   355  
   356  // Abs calls (big.Int).Abs.
   357  func (z *BigInt) Abs(x *BigInt) *BigInt {
   358  	if x.isInline() {
   359  		z._inline = x._inline
   360  		z._inner = nil // !negSentinel
   361  		return z
   362  	}
   363  	var tmp1, tmp2 big.Int //gcassert:noescape
   364  	zi := z.inner(&tmp1)
   365  	zi.Abs(x.inner(&tmp2))
   366  	z.updateInner(zi)
   367  	return z
   368  }
   369  
   370  // Add calls (big.Int).Add.
   371  func (z *BigInt) Add(x, y *BigInt) *BigInt {
   372  	if xVal, xNeg, ok := x.innerAsUint64(); ok {
   373  		if yVal, yNeg, ok := y.innerAsUint64(); ok {
   374  			if zVal, zNeg, ok := addInline(xVal, yVal, xNeg, yNeg); ok {
   375  				z.updateInnerFromUint64(zVal, zNeg)
   376  				return z
   377  			}
   378  		}
   379  	}
   380  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
   381  	zi := z.inner(&tmp1)
   382  	zi.Add(x.inner(&tmp2), y.inner(&tmp3))
   383  	z.updateInner(zi)
   384  	return z
   385  }
   386  
   387  // And calls (big.Int).And.
   388  func (z *BigInt) And(x, y *BigInt) *BigInt {
   389  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
   390  	zi := z.inner(&tmp1)
   391  	zi.And(x.inner(&tmp2), y.inner(&tmp3))
   392  	z.updateInner(zi)
   393  	return z
   394  }
   395  
   396  // AndNot calls (big.Int).AndNot.
   397  func (z *BigInt) AndNot(x, y *BigInt) *BigInt {
   398  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
   399  	zi := z.inner(&tmp1)
   400  	zi.AndNot(x.inner(&tmp2), y.inner(&tmp3))
   401  	z.updateInner(zi)
   402  	return z
   403  }
   404  
   405  // Append calls (big.Int).Append.
   406  func (z *BigInt) Append(buf []byte, base int) []byte {
   407  	if z == nil {
   408  		// Fast-path that avoids innerOrNil, allowing inner to be inlined.
   409  		return append(buf, "<nil>"...)
   410  	}
   411  	if zVal, zNeg, ok := z.innerAsUint64(); ok {
   412  		// Check if the base is supported by strconv.AppendUint.
   413  		if base >= 2 && base <= 36 {
   414  			if zNeg {
   415  				buf = append(buf, '-')
   416  			}
   417  			return strconv.AppendUint(buf, zVal, base)
   418  		}
   419  	}
   420  	var tmp1 big.Int //gcassert:noescape
   421  	return z.inner(&tmp1).Append(buf, base)
   422  }
   423  
   424  // Binomial calls (big.Int).Binomial.
   425  func (z *BigInt) Binomial(n, k int64) *BigInt {
   426  	var tmp1 big.Int //gcassert:noescape
   427  	zi := z.inner(&tmp1)
   428  	zi.Binomial(n, k)
   429  	z.updateInner(zi)
   430  	return z
   431  }
   432  
   433  // Bit calls (big.Int).Bit.
   434  func (z *BigInt) Bit(i int) uint {
   435  	if i == 0 && z.isInline() {
   436  		// Optimization for common case: odd/even test of z.
   437  		return uint(z._inline[0] & 1)
   438  	}
   439  	var tmp1 big.Int //gcassert:noescape
   440  	return z.inner(&tmp1).Bit(i)
   441  }
   442  
   443  // BitLen calls (big.Int).BitLen.
   444  func (z *BigInt) BitLen() int {
   445  	if z.isInline() {
   446  		// Find largest non-zero inline word.
   447  		for i := len(z._inline) - 1; i >= 0; i-- {
   448  			if z._inline[i] != 0 {
   449  				return i*bits.UintSize + bits.Len(uint(z._inline[i]))
   450  			}
   451  		}
   452  		return 0
   453  	}
   454  	var tmp1 big.Int //gcassert:noescape
   455  	return z.inner(&tmp1).BitLen()
   456  }
   457  
   458  // Bits calls (big.Int).Bits.
   459  func (z *BigInt) Bits() []big.Word {
   460  	var tmp1 big.Int //gcassert:noescape
   461  	return z.inner(&tmp1).Bits()
   462  }
   463  
   464  // Bytes calls (big.Int).Bytes.
   465  func (z *BigInt) Bytes() []byte {
   466  	var tmp1 big.Int //gcassert:noescape
   467  	return z.inner(&tmp1).Bytes()
   468  }
   469  
   470  // Cmp calls (big.Int).Cmp.
   471  func (z *BigInt) Cmp(y *BigInt) (r int) {
   472  	if zVal, zNeg, ok := z.innerAsUint64(); ok {
   473  		if yVal, yNeg, ok := y.innerAsUint64(); ok {
   474  			switch {
   475  			case zNeg == yNeg:
   476  				switch {
   477  				case zVal < yVal:
   478  					r = -1
   479  				case zVal > yVal:
   480  					r = 1
   481  				}
   482  				if zNeg {
   483  					r = -r
   484  				}
   485  			case zNeg:
   486  				r = -1
   487  			default:
   488  				r = 1
   489  			}
   490  			return r
   491  		}
   492  	}
   493  	var tmp1, tmp2 big.Int //gcassert:noescape
   494  	return z.inner(&tmp1).Cmp(y.inner(&tmp2))
   495  }
   496  
   497  // CmpAbs calls (big.Int).CmpAbs.
   498  func (z *BigInt) CmpAbs(y *BigInt) (r int) {
   499  	if zVal, _, ok := z.innerAsUint64(); ok {
   500  		if yVal, _, ok := y.innerAsUint64(); ok {
   501  			switch {
   502  			case zVal < yVal:
   503  				r = -1
   504  			case zVal > yVal:
   505  				r = 1
   506  			}
   507  			return r
   508  		}
   509  	}
   510  	var tmp1, tmp2 big.Int //gcassert:noescape
   511  	return z.inner(&tmp1).CmpAbs(y.inner(&tmp2))
   512  }
   513  
   514  // Div calls (big.Int).Div.
   515  func (z *BigInt) Div(x, y *BigInt) *BigInt {
   516  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
   517  	zi := z.inner(&tmp1)
   518  	zi.Div(x.inner(&tmp2), y.inner(&tmp3))
   519  	z.updateInner(zi)
   520  	return z
   521  }
   522  
   523  // DivMod calls (big.Int).DivMod.
   524  func (z *BigInt) DivMod(x, y, m *BigInt) (*BigInt, *BigInt) {
   525  	var tmp1, tmp2, tmp3, tmp4 big.Int //gcassert:noescape
   526  	zi := z.inner(&tmp1)
   527  	mi := m.inner(&tmp2)
   528  	// NOTE: innerOrAlias for the y param because (big.Int).DivMod needs to
   529  	// detect when y is aliased to the receiver.
   530  	zi.DivMod(x.inner(&tmp3), y.innerOrAlias(&tmp4, z, zi), mi)
   531  	z.updateInner(zi)
   532  	m.updateInner(mi)
   533  	return z, m
   534  }
   535  
   536  // Exp calls (big.Int).Exp.
   537  func (z *BigInt) Exp(x, y, m *BigInt) *BigInt {
   538  	var tmp1, tmp2, tmp3, tmp4 big.Int //gcassert:noescape
   539  	zi := z.inner(&tmp1)
   540  	if zi.Exp(x.inner(&tmp2), y.inner(&tmp3), m.innerOrNil(&tmp4)) == nil {
   541  		return nil
   542  	}
   543  	z.updateInner(zi)
   544  	return z
   545  }
   546  
   547  // Format calls (big.Int).Format.
   548  func (z *BigInt) Format(s fmt.State, ch rune) {
   549  	var tmp1 big.Int //gcassert:noescape
   550  	z.innerOrNil(&tmp1).Format(s, ch)
   551  }
   552  
   553  // GCD calls (big.Int).GCD.
   554  func (z *BigInt) GCD(x, y, a, b *BigInt) *BigInt {
   555  	var tmp1, tmp2, tmp3, tmp4, tmp5 big.Int //gcassert:noescape
   556  	zi := z.inner(&tmp1)
   557  	ai := a.inner(&tmp2)
   558  	bi := b.inner(&tmp3)
   559  	xi := x.innerOrNil(&tmp4)
   560  	// NOTE: innerOrNilOrAlias for the y param because (big.Int).GCD needs to
   561  	// detect when y is aliased to b. See "avoid aliasing b" in lehmerGCD.
   562  	yi := y.innerOrNilOrAlias(&tmp5, b, bi)
   563  	zi.GCD(xi, yi, ai, bi)
   564  	z.updateInner(zi)
   565  	if xi != nil {
   566  		x.updateInner(xi)
   567  	}
   568  	if yi != nil {
   569  		y.updateInner(yi)
   570  	}
   571  	return z
   572  }
   573  
   574  // GobEncode calls (big.Int).GobEncode.
   575  func (z *BigInt) GobEncode() ([]byte, error) {
   576  	var tmp1 big.Int //gcassert:noescape
   577  	return z.innerOrNil(&tmp1).GobEncode()
   578  }
   579  
   580  // GobDecode calls (big.Int).GobDecode.
   581  func (z *BigInt) GobDecode(buf []byte) error {
   582  	var tmp1 big.Int //gcassert:noescape
   583  	zi := z.inner(&tmp1)
   584  	if err := zi.GobDecode(buf); err != nil {
   585  		return err
   586  	}
   587  	z.updateInner(zi)
   588  	return nil
   589  }
   590  
   591  // Int64 calls (big.Int).Int64.
   592  func (z *BigInt) Int64() int64 {
   593  	if zVal, zNeg, ok := z.innerAsUint64(); ok {
   594  		// The unchecked cast from uint64 to int64 looks unsafe, but it is
   595  		// allowed and is identical to the logic in (big.Int).Int64. Per the
   596  		// method's contract:
   597  		// > If z cannot be represented in an int64, the result is undefined.
   598  		zi := int64(zVal)
   599  		if zNeg {
   600  			zi = -zi
   601  		}
   602  		return zi
   603  	}
   604  	var tmp1 big.Int //gcassert:noescape
   605  	return z.inner(&tmp1).Int64()
   606  }
   607  
   608  // IsInt64 calls (big.Int).IsInt64.
   609  func (z *BigInt) IsInt64() bool {
   610  	if zVal, zNeg, ok := z.innerAsUint64(); ok {
   611  		// From (big.Int).IsInt64.
   612  		zi := int64(zVal)
   613  		return zi >= 0 || zNeg && zi == -zi
   614  	}
   615  	var tmp1 big.Int //gcassert:noescape
   616  	return z.inner(&tmp1).IsInt64()
   617  }
   618  
   619  // IsUint64 calls (big.Int).IsUint64.
   620  func (z *BigInt) IsUint64() bool {
   621  	if _, zNeg, ok := z.innerAsUint64(); ok {
   622  		return !zNeg
   623  	}
   624  	var tmp1 big.Int //gcassert:noescape
   625  	return z.inner(&tmp1).IsUint64()
   626  }
   627  
   628  // Lsh calls (big.Int).Lsh.
   629  func (z *BigInt) Lsh(x *BigInt, n uint) *BigInt {
   630  	var tmp1, tmp2 big.Int //gcassert:noescape
   631  	zi := z.inner(&tmp1)
   632  	zi.Lsh(x.inner(&tmp2), n)
   633  	z.updateInner(zi)
   634  	return z
   635  }
   636  
   637  // MarshalJSON calls (big.Int).MarshalJSON.
   638  func (z *BigInt) MarshalJSON() ([]byte, error) {
   639  	var tmp1 big.Int //gcassert:noescape
   640  	return z.innerOrNil(&tmp1).MarshalJSON()
   641  }
   642  
   643  // MarshalText calls (big.Int).MarshalText.
   644  func (z *BigInt) MarshalText() (text []byte, err error) {
   645  	var tmp1 big.Int //gcassert:noescape
   646  	return z.innerOrNil(&tmp1).MarshalText()
   647  }
   648  
   649  // Mod calls (big.Int).Mod.
   650  func (z *BigInt) Mod(x, y *BigInt) *BigInt {
   651  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
   652  	zi := z.inner(&tmp1)
   653  	// NOTE: innerOrAlias for the y param because (big.Int).Mod needs to detect
   654  	// when y is aliased to the receiver.
   655  	zi.Mod(x.inner(&tmp2), y.innerOrAlias(&tmp3, z, zi))
   656  	z.updateInner(zi)
   657  	return z
   658  }
   659  
   660  // ModInverse calls (big.Int).ModInverse.
   661  func (z *BigInt) ModInverse(g, n *BigInt) *BigInt {
   662  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
   663  	zi := z.inner(&tmp1)
   664  	if zi.ModInverse(g.inner(&tmp2), n.inner(&tmp3)) == nil {
   665  		return nil
   666  	}
   667  	z.updateInner(zi)
   668  	return z
   669  }
   670  
   671  // ModSqrt calls (big.Int).ModSqrt.
   672  func (z *BigInt) ModSqrt(x, p *BigInt) *BigInt {
   673  	var tmp1, tmp2 big.Int //gcassert:noescape
   674  	var tmp3 big.Int       // escapes because of https://github.com/golang/go/pull/50527.
   675  	zi := z.inner(&tmp1)
   676  	if zi.ModSqrt(x.inner(&tmp2), p.inner(&tmp3)) == nil {
   677  		return nil
   678  	}
   679  	z.updateInner(zi)
   680  	return z
   681  }
   682  
   683  // Mul calls (big.Int).Mul.
   684  func (z *BigInt) Mul(x, y *BigInt) *BigInt {
   685  	if xVal, xNeg, ok := x.innerAsUint64(); ok {
   686  		if yVal, yNeg, ok := y.innerAsUint64(); ok {
   687  			if zVal, zNeg, ok := mulInline(xVal, yVal, xNeg, yNeg); ok {
   688  				z.updateInnerFromUint64(zVal, zNeg)
   689  				return z
   690  			}
   691  		}
   692  	}
   693  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
   694  	zi := z.inner(&tmp1)
   695  	zi.Mul(x.inner(&tmp2), y.inner(&tmp3))
   696  	z.updateInner(zi)
   697  	return z
   698  }
   699  
   700  // MulRange calls (big.Int).MulRange.
   701  func (z *BigInt) MulRange(x, y int64) *BigInt {
   702  	var tmp1 big.Int //gcassert:noescape
   703  	zi := z.inner(&tmp1)
   704  	zi.MulRange(x, y)
   705  	z.updateInner(zi)
   706  	return z
   707  }
   708  
   709  // Neg calls (big.Int).Neg.
   710  func (z *BigInt) Neg(x *BigInt) *BigInt {
   711  	if x.isInline() {
   712  		z._inline = x._inline
   713  		if x._inner == negSentinel {
   714  			z._inner = nil
   715  		} else {
   716  			z._inner = negSentinel
   717  		}
   718  		return z
   719  	}
   720  	var tmp1, tmp2 big.Int //gcassert:noescape
   721  	zi := z.inner(&tmp1)
   722  	zi.Neg(x.inner(&tmp2))
   723  	z.updateInner(zi)
   724  	return z
   725  }
   726  
   727  // Not calls (big.Int).Not.
   728  func (z *BigInt) Not(x *BigInt) *BigInt {
   729  	var tmp1, tmp2 big.Int //gcassert:noescape
   730  	zi := z.inner(&tmp1)
   731  	zi.Not(x.inner(&tmp2))
   732  	z.updateInner(zi)
   733  	return z
   734  }
   735  
   736  // Or calls (big.Int).Or.
   737  func (z *BigInt) Or(x, y *BigInt) *BigInt {
   738  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
   739  	zi := z.inner(&tmp1)
   740  	zi.Or(x.inner(&tmp2), y.inner(&tmp3))
   741  	z.updateInner(zi)
   742  	return z
   743  }
   744  
   745  // ProbablyPrime calls (big.Int).ProbablyPrime.
   746  func (z *BigInt) ProbablyPrime(n int) bool {
   747  	var tmp1 big.Int //gcassert:noescape
   748  	return z.inner(&tmp1).ProbablyPrime(n)
   749  }
   750  
   751  // Quo calls (big.Int).Quo.
   752  func (z *BigInt) Quo(x, y *BigInt) *BigInt {
   753  	if xVal, xNeg, ok := x.innerAsUint64(); ok {
   754  		if yVal, yNeg, ok := y.innerAsUint64(); ok {
   755  			if quoVal, quoNeg, ok := quoInline(xVal, yVal, xNeg, yNeg); ok {
   756  				z.updateInnerFromUint64(quoVal, quoNeg)
   757  				return z
   758  			}
   759  		}
   760  	}
   761  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
   762  	zi := z.inner(&tmp1)
   763  	zi.Quo(x.inner(&tmp2), y.inner(&tmp3))
   764  	z.updateInner(zi)
   765  	return z
   766  }
   767  
   768  // QuoRem calls (big.Int).QuoRem.
   769  func (z *BigInt) QuoRem(x, y, r *BigInt) (*BigInt, *BigInt) {
   770  	if xVal, xNeg, ok := x.innerAsUint64(); ok {
   771  		if yVal, yNeg, ok := y.innerAsUint64(); ok {
   772  			if quoVal, quoNeg, ok := quoInline(xVal, yVal, xNeg, yNeg); ok {
   773  				if remVal, remNeg, ok := remInline(xVal, yVal, xNeg, yNeg); ok {
   774  					z.updateInnerFromUint64(quoVal, quoNeg)
   775  					r.updateInnerFromUint64(remVal, remNeg)
   776  					return z, r
   777  				}
   778  			}
   779  		}
   780  	}
   781  	var tmp1, tmp2, tmp3, tmp4 big.Int //gcassert:noescape
   782  	zi := z.inner(&tmp1)
   783  	ri := r.inner(&tmp2)
   784  	zi.QuoRem(x.inner(&tmp3), y.inner(&tmp4), ri)
   785  	z.updateInner(zi)
   786  	r.updateInner(ri)
   787  	return z, r
   788  }
   789  
   790  // Rand calls (big.Int).Rand.
   791  func (z *BigInt) Rand(rnd *rand.Rand, n *BigInt) *BigInt {
   792  	var tmp1, tmp2 big.Int //gcassert:noescape
   793  	zi := z.inner(&tmp1)
   794  	zi.Rand(rnd, n.inner(&tmp2))
   795  	z.updateInner(zi)
   796  	return z
   797  }
   798  
   799  // Rem calls (big.Int).Rem.
   800  func (z *BigInt) Rem(x, y *BigInt) *BigInt {
   801  	if xVal, xNeg, ok := x.innerAsUint64(); ok {
   802  		if yVal, yNeg, ok := y.innerAsUint64(); ok {
   803  			if remVal, remNeg, ok := remInline(xVal, yVal, xNeg, yNeg); ok {
   804  				z.updateInnerFromUint64(remVal, remNeg)
   805  				return z
   806  			}
   807  		}
   808  	}
   809  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
   810  	zi := z.inner(&tmp1)
   811  	zi.Rem(x.inner(&tmp2), y.inner(&tmp3))
   812  	z.updateInner(zi)
   813  	return z
   814  }
   815  
   816  // Rsh calls (big.Int).Rsh.
   817  func (z *BigInt) Rsh(x *BigInt, n uint) *BigInt {
   818  	var tmp1, tmp2 big.Int //gcassert:noescape
   819  	zi := z.inner(&tmp1)
   820  	zi.Rsh(x.inner(&tmp2), n)
   821  	z.updateInner(zi)
   822  	return z
   823  }
   824  
   825  // Scan calls (big.Int).Scan.
   826  func (z *BigInt) Scan(s fmt.ScanState, ch rune) error {
   827  	var tmp1 big.Int //gcassert:noescape
   828  	zi := z.inner(&tmp1)
   829  	if err := zi.Scan(s, ch); err != nil {
   830  		return err
   831  	}
   832  	z.updateInner(zi)
   833  	return nil
   834  }
   835  
   836  // Set calls (big.Int).Set.
   837  func (z *BigInt) Set(x *BigInt) *BigInt {
   838  	if x.isInline() {
   839  		*z = *x
   840  		return z
   841  	}
   842  	var tmp1, tmp2 big.Int //gcassert:noescape
   843  	zi := z.inner(&tmp1)
   844  	zi.Set(x.inner(&tmp2))
   845  	z.updateInner(zi)
   846  	return z
   847  }
   848  
   849  // SetBit calls (big.Int).SetBit.
   850  func (z *BigInt) SetBit(x *BigInt, i int, b uint) *BigInt {
   851  	var tmp1, tmp2 big.Int //gcassert:noescape
   852  	zi := z.inner(&tmp1)
   853  	zi.SetBit(x.inner(&tmp2), i, b)
   854  	z.updateInner(zi)
   855  	return z
   856  }
   857  
   858  // SetBits calls (big.Int).SetBits.
   859  func (z *BigInt) SetBits(abs []big.Word) *BigInt {
   860  	var tmp1 big.Int //gcassert:noescape
   861  	zi := z.inner(&tmp1)
   862  	zi.SetBits(abs)
   863  	z.updateInner(zi)
   864  	return z
   865  }
   866  
   867  // SetBytes calls (big.Int).SetBytes.
   868  func (z *BigInt) SetBytes(buf []byte) *BigInt {
   869  	var tmp1 big.Int //gcassert:noescape
   870  	zi := z.inner(&tmp1)
   871  	zi.SetBytes(buf)
   872  	z.updateInner(zi)
   873  	return z
   874  }
   875  
   876  // SetInt64 calls (big.Int).SetInt64.
   877  func (z *BigInt) SetInt64(x int64) *BigInt {
   878  	neg := false
   879  	if x < 0 {
   880  		neg = true
   881  		x = -x
   882  	}
   883  	z.updateInnerFromUint64(uint64(x), neg)
   884  	return z
   885  }
   886  
   887  // SetString calls (big.Int).SetString.
   888  func (z *BigInt) SetString(s string, base int) (*BigInt, bool) {
   889  	if i, err := strconv.ParseInt(s, base, 64); err == nil {
   890  		z.SetInt64(i)
   891  		return z, true
   892  	}
   893  	var tmp1 big.Int //gcassert:noescape
   894  	zi := z.inner(&tmp1)
   895  	if _, ok := zi.SetString(s, base); !ok {
   896  		return nil, false
   897  	}
   898  	z.updateInner(zi)
   899  	return z, true
   900  }
   901  
   902  // SetUint64 calls (big.Int).SetUint64.
   903  func (z *BigInt) SetUint64(x uint64) *BigInt {
   904  	z.updateInnerFromUint64(x, false)
   905  	return z
   906  }
   907  
   908  // Sign calls (big.Int).Sign.
   909  func (z *BigInt) Sign() int {
   910  	if z._inner == nil {
   911  		if z._inline == [inlineWords]big.Word{} {
   912  			return 0
   913  		}
   914  		return 1
   915  	} else if z._inner == negSentinel {
   916  		return -1
   917  	}
   918  	return z._inner.Sign()
   919  }
   920  
   921  // Sqrt calls (big.Int).Sqrt.
   922  func (z *BigInt) Sqrt(x *BigInt) *BigInt {
   923  	var tmp1, tmp2 big.Int //gcassert:noescape
   924  	zi := z.inner(&tmp1)
   925  	zi.Sqrt(x.inner(&tmp2))
   926  	z.updateInner(zi)
   927  	return z
   928  }
   929  
   930  // String calls (big.Int).String.
   931  func (z *BigInt) String() string {
   932  	if z == nil {
   933  		// Fast-path that avoids innerOrNil, allowing inner to be inlined.
   934  		return "<nil>"
   935  	}
   936  	var tmp1 big.Int //gcassert:noescape
   937  	return z.inner(&tmp1).String()
   938  }
   939  
   940  // Sub calls (big.Int).Sub.
   941  func (z *BigInt) Sub(x, y *BigInt) *BigInt {
   942  	if xVal, xNeg, ok := x.innerAsUint64(); ok {
   943  		if yVal, yNeg, ok := y.innerAsUint64(); ok {
   944  			if zVal, zNeg, ok := addInline(xVal, yVal, xNeg, !yNeg); ok {
   945  				z.updateInnerFromUint64(zVal, zNeg)
   946  				return z
   947  			}
   948  		}
   949  	}
   950  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
   951  	zi := z.inner(&tmp1)
   952  	zi.Sub(x.inner(&tmp2), y.inner(&tmp3))
   953  	z.updateInner(zi)
   954  	return z
   955  }
   956  
   957  // Text calls (big.Int).Text.
   958  func (z *BigInt) Text(base int) string {
   959  	if z == nil {
   960  		// Fast-path that avoids innerOrNil, allowing inner to be inlined.
   961  		return "<nil>"
   962  	}
   963  	var tmp1 big.Int //gcassert:noescape
   964  	return z.inner(&tmp1).Text(base)
   965  }
   966  
   967  // TrailingZeroBits calls (big.Int).TrailingZeroBits.
   968  func (z *BigInt) TrailingZeroBits() uint {
   969  	var tmp1 big.Int //gcassert:noescape
   970  	return z.inner(&tmp1).TrailingZeroBits()
   971  }
   972  
   973  // Uint64 calls (big.Int).Uint64.
   974  func (z *BigInt) Uint64() uint64 {
   975  	if zVal, _, ok := z.innerAsUint64(); ok {
   976  		return zVal
   977  	}
   978  	var tmp1 big.Int //gcassert:noescape
   979  	return z.inner(&tmp1).Uint64()
   980  }
   981  
   982  // UnmarshalJSON calls (big.Int).UnmarshalJSON.
   983  func (z *BigInt) UnmarshalJSON(text []byte) error {
   984  	var tmp1 big.Int //gcassert:noescape
   985  	zi := z.inner(&tmp1)
   986  	if err := zi.UnmarshalJSON(text); err != nil {
   987  		return err
   988  	}
   989  	z.updateInner(zi)
   990  	return nil
   991  }
   992  
   993  // UnmarshalText calls (big.Int).UnmarshalText.
   994  func (z *BigInt) UnmarshalText(text []byte) error {
   995  	var tmp1 big.Int //gcassert:noescape
   996  	zi := z.inner(&tmp1)
   997  	if err := zi.UnmarshalText(text); err != nil {
   998  		return err
   999  	}
  1000  	z.updateInner(zi)
  1001  	return nil
  1002  }
  1003  
  1004  // Xor calls (big.Int).Xor.
  1005  func (z *BigInt) Xor(x, y *BigInt) *BigInt {
  1006  	var tmp1, tmp2, tmp3 big.Int //gcassert:noescape
  1007  	zi := z.inner(&tmp1)
  1008  	zi.Xor(x.inner(&tmp2), y.inner(&tmp3))
  1009  	z.updateInner(zi)
  1010  	return z
  1011  }
  1012  
  1013  ///////////////////////////////////////////////////////////////////////////////
  1014  //                     apd.BigInt / math/big.Int interop                     //
  1015  ///////////////////////////////////////////////////////////////////////////////
  1016  
  1017  // MathBigInt returns the math/big.Int representation of z.
  1018  func (z *BigInt) MathBigInt() *big.Int {
  1019  	var tmp1 big.Int //gcassert:noescape
  1020  	zi := z.inner(&tmp1)
  1021  	// NOTE: We can't return zi directly, because it may be pointing into z's
  1022  	// _inline array. We have disabled escape analysis for such aliasing, so
  1023  	// this would be unsafe as it would not force the receiver to escape and
  1024  	// could leave the return value pointing into stack memory.
  1025  	return new(big.Int).Set(zi)
  1026  }
  1027  
  1028  // SetMathBigInt sets z to x and returns z.
  1029  func (z *BigInt) SetMathBigInt(x *big.Int) *BigInt {
  1030  	var tmp1 big.Int //gcassert:noescape
  1031  	zi := z.inner(&tmp1)
  1032  	zi.Set(x)
  1033  	z.updateInner(zi)
  1034  	return z
  1035  }
  1036  

View as plain text