cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

udivmodti4.c (3531B)


      1// SPDX-License-Identifier: GPL-2.0
      2/* This has so very few changes over libgcc2's __udivmoddi4 it isn't funny.  */
      3
      4#include <math-emu/soft-fp.h>
      5
      6#undef count_leading_zeros
      7#define count_leading_zeros  __FP_CLZ
      8
      9void
     10_fp_udivmodti4(_FP_W_TYPE q[2], _FP_W_TYPE r[2],
     11	       _FP_W_TYPE n1, _FP_W_TYPE n0,
     12	       _FP_W_TYPE d1, _FP_W_TYPE d0)
     13{
     14  _FP_W_TYPE q0, q1, r0, r1;
     15  _FP_I_TYPE b, bm;
     16
     17  if (d1 == 0)
     18    {
     19#if !UDIV_NEEDS_NORMALIZATION
     20      if (d0 > n1)
     21	{
     22	  /* 0q = nn / 0D */
     23
     24	  udiv_qrnnd (q0, n0, n1, n0, d0);
     25	  q1 = 0;
     26
     27	  /* Remainder in n0.  */
     28	}
     29      else
     30	{
     31	  /* qq = NN / 0d */
     32
     33	  if (d0 == 0)
     34	    d0 = 1 / d0;	/* Divide intentionally by zero.  */
     35
     36	  udiv_qrnnd (q1, n1, 0, n1, d0);
     37	  udiv_qrnnd (q0, n0, n1, n0, d0);
     38
     39	  /* Remainder in n0.  */
     40	}
     41
     42      r0 = n0;
     43      r1 = 0;
     44
     45#else /* UDIV_NEEDS_NORMALIZATION */
     46
     47      if (d0 > n1)
     48	{
     49	  /* 0q = nn / 0D */
     50
     51	  count_leading_zeros (bm, d0);
     52
     53	  if (bm != 0)
     54	    {
     55	      /* Normalize, i.e. make the most significant bit of the
     56		 denominator set.  */
     57
     58	      d0 = d0 << bm;
     59	      n1 = (n1 << bm) | (n0 >> (_FP_W_TYPE_SIZE - bm));
     60	      n0 = n0 << bm;
     61	    }
     62
     63	  udiv_qrnnd (q0, n0, n1, n0, d0);
     64	  q1 = 0;
     65
     66	  /* Remainder in n0 >> bm.  */
     67	}
     68      else
     69	{
     70	  /* qq = NN / 0d */
     71
     72	  if (d0 == 0)
     73	    d0 = 1 / d0;	/* Divide intentionally by zero.  */
     74
     75	  count_leading_zeros (bm, d0);
     76
     77	  if (bm == 0)
     78	    {
     79	      /* From (n1 >= d0) /\ (the most significant bit of d0 is set),
     80		 conclude (the most significant bit of n1 is set) /\ (the
     81		 leading quotient digit q1 = 1).
     82
     83		 This special case is necessary, not an optimization.
     84		 (Shifts counts of SI_TYPE_SIZE are undefined.)  */
     85
     86	      n1 -= d0;
     87	      q1 = 1;
     88	    }
     89	  else
     90	    {
     91	      _FP_W_TYPE n2;
     92
     93	      /* Normalize.  */
     94
     95	      b = _FP_W_TYPE_SIZE - bm;
     96
     97	      d0 = d0 << bm;
     98	      n2 = n1 >> b;
     99	      n1 = (n1 << bm) | (n0 >> b);
    100	      n0 = n0 << bm;
    101
    102	      udiv_qrnnd (q1, n1, n2, n1, d0);
    103	    }
    104
    105	  /* n1 != d0...  */
    106
    107	  udiv_qrnnd (q0, n0, n1, n0, d0);
    108
    109	  /* Remainder in n0 >> bm.  */
    110	}
    111
    112      r0 = n0 >> bm;
    113      r1 = 0;
    114#endif /* UDIV_NEEDS_NORMALIZATION */
    115    }
    116  else
    117    {
    118      if (d1 > n1)
    119	{
    120	  /* 00 = nn / DD */
    121
    122	  q0 = 0;
    123	  q1 = 0;
    124
    125	  /* Remainder in n1n0.  */
    126	  r0 = n0;
    127	  r1 = n1;
    128	}
    129      else
    130	{
    131	  /* 0q = NN / dd */
    132
    133	  count_leading_zeros (bm, d1);
    134	  if (bm == 0)
    135	    {
    136	      /* From (n1 >= d1) /\ (the most significant bit of d1 is set),
    137		 conclude (the most significant bit of n1 is set) /\ (the
    138		 quotient digit q0 = 0 or 1).
    139
    140		 This special case is necessary, not an optimization.  */
    141
    142	      /* The condition on the next line takes advantage of that
    143		 n1 >= d1 (true due to program flow).  */
    144	      if (n1 > d1 || n0 >= d0)
    145		{
    146		  q0 = 1;
    147		  sub_ddmmss (n1, n0, n1, n0, d1, d0);
    148		}
    149	      else
    150		q0 = 0;
    151
    152	      q1 = 0;
    153
    154	      r0 = n0;
    155	      r1 = n1;
    156	    }
    157	  else
    158	    {
    159	      _FP_W_TYPE m1, m0, n2;
    160
    161	      /* Normalize.  */
    162
    163	      b = _FP_W_TYPE_SIZE - bm;
    164
    165	      d1 = (d1 << bm) | (d0 >> b);
    166	      d0 = d0 << bm;
    167	      n2 = n1 >> b;
    168	      n1 = (n1 << bm) | (n0 >> b);
    169	      n0 = n0 << bm;
    170
    171	      udiv_qrnnd (q0, n1, n2, n1, d1);
    172	      umul_ppmm (m1, m0, q0, d0);
    173
    174	      if (m1 > n1 || (m1 == n1 && m0 > n0))
    175		{
    176		  q0--;
    177		  sub_ddmmss (m1, m0, m1, m0, d1, d0);
    178		}
    179
    180	      q1 = 0;
    181
    182	      /* Remainder in (n1n0 - m1m0) >> bm.  */
    183	      sub_ddmmss (n1, n0, n1, n0, m1, m0);
    184	      r0 = (n1 << b) | (n0 >> bm);
    185	      r1 = n1 >> bm;
    186	    }
    187	}
    188    }
    189
    190  q[0] = q0; q[1] = q1;
    191  r[0] = r0, r[1] = r1;
    192}